symtab.c (dump_symtab_base): Update dumping.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "lto-section-names.h"
47 #include "tree-streamer.h"
48 #include "splay-tree.h"
49 #include "lto-partition.h"
50 #include "data-streamer.h"
51 #include "context.h"
52 #include "pass_manager.h"
53 #include "ipa-inline.h"
54 #include "params.h"
55
56
57 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
58 static int lto_parallelism;
59
60 static GTY(()) tree first_personality_decl;
61
62 /* Returns a hash code for P. */
63
64 static hashval_t
65 hash_name (const void *p)
66 {
67 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
68 return (hashval_t) htab_hash_string (ds->name);
69 }
70
71
72 /* Returns nonzero if P1 and P2 are equal. */
73
74 static int
75 eq_name (const void *p1, const void *p2)
76 {
77 const struct lto_section_slot *s1 =
78 (const struct lto_section_slot *) p1;
79 const struct lto_section_slot *s2 =
80 (const struct lto_section_slot *) p2;
81
82 return strcmp (s1->name, s2->name) == 0;
83 }
84
85 /* Free lto_section_slot */
86
87 static void
88 free_with_string (void *arg)
89 {
90 struct lto_section_slot *s = (struct lto_section_slot *)arg;
91
92 free (CONST_CAST (char *, s->name));
93 free (arg);
94 }
95
96 /* Create section hash table */
97
98 htab_t
99 lto_obj_create_section_hash_table (void)
100 {
101 return htab_create (37, hash_name, eq_name, free_with_string);
102 }
103
104 /* Delete an allocated integer KEY in the splay tree. */
105
106 static void
107 lto_splay_tree_delete_id (splay_tree_key key)
108 {
109 free ((void *) key);
110 }
111
112 /* Compare splay tree node ids A and B. */
113
114 static int
115 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
116 {
117 unsigned HOST_WIDE_INT ai;
118 unsigned HOST_WIDE_INT bi;
119
120 ai = *(unsigned HOST_WIDE_INT *) a;
121 bi = *(unsigned HOST_WIDE_INT *) b;
122
123 if (ai < bi)
124 return -1;
125 else if (ai > bi)
126 return 1;
127 return 0;
128 }
129
130 /* Look up splay tree node by ID in splay tree T. */
131
132 static splay_tree_node
133 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
134 {
135 return splay_tree_lookup (t, (splay_tree_key) &id);
136 }
137
138 /* Check if KEY has ID. */
139
140 static bool
141 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
142 {
143 return *(unsigned HOST_WIDE_INT *) key == id;
144 }
145
146 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
147 The ID is allocated separately because we need HOST_WIDE_INTs which may
148 be wider than a splay_tree_key. */
149
150 static void
151 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
152 struct lto_file_decl_data *file_data)
153 {
154 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
155 *idp = id;
156 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
157 }
158
159 /* Create a splay tree. */
160
161 static splay_tree
162 lto_splay_tree_new (void)
163 {
164 return splay_tree_new (lto_splay_tree_compare_ids,
165 lto_splay_tree_delete_id,
166 NULL);
167 }
168
169 /* Return true when NODE has a clone that is analyzed (i.e. we need
170 to load its body even if the node itself is not needed). */
171
172 static bool
173 has_analyzed_clone_p (struct cgraph_node *node)
174 {
175 struct cgraph_node *orig = node;
176 node = node->clones;
177 if (node)
178 while (node != orig)
179 {
180 if (node->analyzed)
181 return true;
182 if (node->clones)
183 node = node->clones;
184 else if (node->next_sibling_clone)
185 node = node->next_sibling_clone;
186 else
187 {
188 while (node != orig && !node->next_sibling_clone)
189 node = node->clone_of;
190 if (node != orig)
191 node = node->next_sibling_clone;
192 }
193 }
194 return false;
195 }
196
197 /* Read the function body for the function associated with NODE. */
198
199 static void
200 lto_materialize_function (struct cgraph_node *node)
201 {
202 tree decl;
203
204 decl = node->decl;
205 /* Read in functions with body (analyzed nodes)
206 and also functions that are needed to produce virtual clones. */
207 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
208 || node->used_as_abstract_origin
209 || has_analyzed_clone_p (node))
210 {
211 /* Clones don't need to be read. */
212 if (node->clone_of)
213 return;
214 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
215 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
216 }
217
218 /* Let the middle end know about the function. */
219 rest_of_decl_compilation (decl, 1, 0);
220 }
221
222
223 /* Decode the content of memory pointed to by DATA in the in decl
224 state object STATE. DATA_IN points to a data_in structure for
225 decoding. Return the address after the decoded object in the
226 input. */
227
228 static const uint32_t *
229 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
230 struct lto_in_decl_state *state)
231 {
232 uint32_t ix;
233 tree decl;
234 uint32_t i, j;
235
236 ix = *data++;
237 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
238 if (TREE_CODE (decl) != FUNCTION_DECL)
239 {
240 gcc_assert (decl == void_type_node);
241 decl = NULL_TREE;
242 }
243 state->fn_decl = decl;
244
245 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
246 {
247 uint32_t size = *data++;
248 tree *decls = ggc_vec_alloc<tree> (size);
249
250 for (j = 0; j < size; j++)
251 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
252
253 state->streams[i].size = size;
254 state->streams[i].trees = decls;
255 data += size;
256 }
257
258 return data;
259 }
260
261
262 /* Global canonical type table. */
263 static htab_t gimple_canonical_types;
264 static pointer_map <hashval_t> *canonical_type_hash_cache;
265 static unsigned long num_canonical_type_hash_entries;
266 static unsigned long num_canonical_type_hash_queries;
267
268 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
269 static hashval_t gimple_canonical_type_hash (const void *p);
270 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
271
272 /* Returning a hash value for gimple type TYPE.
273
274 The hash value returned is equal for types considered compatible
275 by gimple_canonical_types_compatible_p. */
276
277 static hashval_t
278 hash_canonical_type (tree type)
279 {
280 hashval_t v;
281
282 /* Combine a few common features of types so that types are grouped into
283 smaller sets; when searching for existing matching types to merge,
284 only existing types having the same features as the new type will be
285 checked. */
286 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
287 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
288
289 /* Incorporate common features of numerical types. */
290 if (INTEGRAL_TYPE_P (type)
291 || SCALAR_FLOAT_TYPE_P (type)
292 || FIXED_POINT_TYPE_P (type)
293 || TREE_CODE (type) == OFFSET_TYPE
294 || POINTER_TYPE_P (type))
295 {
296 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
297 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
298 }
299
300 if (VECTOR_TYPE_P (type))
301 {
302 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
303 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
304 }
305
306 if (TREE_CODE (type) == COMPLEX_TYPE)
307 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
308
309 /* For pointer and reference types, fold in information about the type
310 pointed to but do not recurse to the pointed-to type. */
311 if (POINTER_TYPE_P (type))
312 {
313 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
314 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
315 }
316
317 /* For integer types hash only the string flag. */
318 if (TREE_CODE (type) == INTEGER_TYPE)
319 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
320
321 /* For array types hash the domain bounds and the string flag. */
322 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
323 {
324 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
325 /* OMP lowering can introduce error_mark_node in place of
326 random local decls in types. */
327 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
328 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
329 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
330 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
331 }
332
333 /* Recurse for aggregates with a single element type. */
334 if (TREE_CODE (type) == ARRAY_TYPE
335 || TREE_CODE (type) == COMPLEX_TYPE
336 || TREE_CODE (type) == VECTOR_TYPE)
337 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
338
339 /* Incorporate function return and argument types. */
340 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
341 {
342 unsigned na;
343 tree p;
344
345 /* For method types also incorporate their parent class. */
346 if (TREE_CODE (type) == METHOD_TYPE)
347 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
348
349 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
350
351 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
352 {
353 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
354 na++;
355 }
356
357 v = iterative_hash_hashval_t (na, v);
358 }
359
360 if (RECORD_OR_UNION_TYPE_P (type))
361 {
362 unsigned nf;
363 tree f;
364
365 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
366 if (TREE_CODE (f) == FIELD_DECL)
367 {
368 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
369 nf++;
370 }
371
372 v = iterative_hash_hashval_t (nf, v);
373 }
374
375 return v;
376 }
377
378 /* Returning a hash value for gimple type TYPE combined with VAL. */
379
380 static hashval_t
381 iterative_hash_canonical_type (tree type, hashval_t val)
382 {
383 hashval_t v;
384 /* An already processed type. */
385 if (TYPE_CANONICAL (type))
386 {
387 type = TYPE_CANONICAL (type);
388 v = gimple_canonical_type_hash (type);
389 }
390 else
391 {
392 /* Canonical types should not be able to form SCCs by design, this
393 recursion is just because we do not register canonical types in
394 optimal order. To avoid quadratic behavior also register the
395 type here. */
396 v = hash_canonical_type (type);
397 gimple_register_canonical_type_1 (type, v);
398 }
399 return iterative_hash_hashval_t (v, val);
400 }
401
402 /* Returns the hash for a canonical type P. */
403
404 static hashval_t
405 gimple_canonical_type_hash (const void *p)
406 {
407 num_canonical_type_hash_queries++;
408 hashval_t *slot
409 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
410 gcc_assert (slot != NULL);
411 return *slot;
412 }
413
414
415 /* The TYPE_CANONICAL merging machinery. It should closely resemble
416 the middle-end types_compatible_p function. It needs to avoid
417 claiming types are different for types that should be treated
418 the same with respect to TBAA. Canonical types are also used
419 for IL consistency checks via the useless_type_conversion_p
420 predicate which does not handle all type kinds itself but falls
421 back to pointer-comparison of TYPE_CANONICAL for aggregates
422 for example. */
423
424 /* Return true iff T1 and T2 are structurally identical for what
425 TBAA is concerned. */
426
427 static bool
428 gimple_canonical_types_compatible_p (tree t1, tree t2)
429 {
430 /* Before starting to set up the SCC machinery handle simple cases. */
431
432 /* Check first for the obvious case of pointer identity. */
433 if (t1 == t2)
434 return true;
435
436 /* Check that we have two types to compare. */
437 if (t1 == NULL_TREE || t2 == NULL_TREE)
438 return false;
439
440 /* If the types have been previously registered and found equal
441 they still are. */
442 if (TYPE_CANONICAL (t1)
443 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
444 return true;
445
446 /* Can't be the same type if the types don't have the same code. */
447 if (TREE_CODE (t1) != TREE_CODE (t2))
448 return false;
449
450 /* Qualifiers do not matter for canonical type comparison purposes. */
451
452 /* Void types and nullptr types are always the same. */
453 if (TREE_CODE (t1) == VOID_TYPE
454 || TREE_CODE (t1) == NULLPTR_TYPE)
455 return true;
456
457 /* Can't be the same type if they have different mode. */
458 if (TYPE_MODE (t1) != TYPE_MODE (t2))
459 return false;
460
461 /* Non-aggregate types can be handled cheaply. */
462 if (INTEGRAL_TYPE_P (t1)
463 || SCALAR_FLOAT_TYPE_P (t1)
464 || FIXED_POINT_TYPE_P (t1)
465 || TREE_CODE (t1) == VECTOR_TYPE
466 || TREE_CODE (t1) == COMPLEX_TYPE
467 || TREE_CODE (t1) == OFFSET_TYPE
468 || POINTER_TYPE_P (t1))
469 {
470 /* Can't be the same type if they have different sign or precision. */
471 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
472 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
473 return false;
474
475 if (TREE_CODE (t1) == INTEGER_TYPE
476 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
477 return false;
478
479 /* For canonical type comparisons we do not want to build SCCs
480 so we cannot compare pointed-to types. But we can, for now,
481 require the same pointed-to type kind and match what
482 useless_type_conversion_p would do. */
483 if (POINTER_TYPE_P (t1))
484 {
485 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
486 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
487 return false;
488
489 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
490 return false;
491 }
492
493 /* Tail-recurse to components. */
494 if (TREE_CODE (t1) == VECTOR_TYPE
495 || TREE_CODE (t1) == COMPLEX_TYPE)
496 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
497 TREE_TYPE (t2));
498
499 return true;
500 }
501
502 /* Do type-specific comparisons. */
503 switch (TREE_CODE (t1))
504 {
505 case ARRAY_TYPE:
506 /* Array types are the same if the element types are the same and
507 the number of elements are the same. */
508 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
509 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
510 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
511 return false;
512 else
513 {
514 tree i1 = TYPE_DOMAIN (t1);
515 tree i2 = TYPE_DOMAIN (t2);
516
517 /* For an incomplete external array, the type domain can be
518 NULL_TREE. Check this condition also. */
519 if (i1 == NULL_TREE && i2 == NULL_TREE)
520 return true;
521 else if (i1 == NULL_TREE || i2 == NULL_TREE)
522 return false;
523 else
524 {
525 tree min1 = TYPE_MIN_VALUE (i1);
526 tree min2 = TYPE_MIN_VALUE (i2);
527 tree max1 = TYPE_MAX_VALUE (i1);
528 tree max2 = TYPE_MAX_VALUE (i2);
529
530 /* The minimum/maximum values have to be the same. */
531 if ((min1 == min2
532 || (min1 && min2
533 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
534 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
535 || operand_equal_p (min1, min2, 0))))
536 && (max1 == max2
537 || (max1 && max2
538 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
539 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
540 || operand_equal_p (max1, max2, 0)))))
541 return true;
542 else
543 return false;
544 }
545 }
546
547 case METHOD_TYPE:
548 case FUNCTION_TYPE:
549 /* Function types are the same if the return type and arguments types
550 are the same. */
551 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
552 return false;
553
554 if (!comp_type_attributes (t1, t2))
555 return false;
556
557 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
558 return true;
559 else
560 {
561 tree parms1, parms2;
562
563 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
564 parms1 && parms2;
565 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
566 {
567 if (!gimple_canonical_types_compatible_p
568 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
569 return false;
570 }
571
572 if (parms1 || parms2)
573 return false;
574
575 return true;
576 }
577
578 case RECORD_TYPE:
579 case UNION_TYPE:
580 case QUAL_UNION_TYPE:
581 {
582 tree f1, f2;
583
584 /* For aggregate types, all the fields must be the same. */
585 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
586 f1 || f2;
587 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
588 {
589 /* Skip non-fields. */
590 while (f1 && TREE_CODE (f1) != FIELD_DECL)
591 f1 = TREE_CHAIN (f1);
592 while (f2 && TREE_CODE (f2) != FIELD_DECL)
593 f2 = TREE_CHAIN (f2);
594 if (!f1 || !f2)
595 break;
596 /* The fields must have the same name, offset and type. */
597 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
598 || !gimple_compare_field_offset (f1, f2)
599 || !gimple_canonical_types_compatible_p
600 (TREE_TYPE (f1), TREE_TYPE (f2)))
601 return false;
602 }
603
604 /* If one aggregate has more fields than the other, they
605 are not the same. */
606 if (f1 || f2)
607 return false;
608
609 return true;
610 }
611
612 default:
613 gcc_unreachable ();
614 }
615 }
616
617
618 /* Returns nonzero if P1 and P2 are equal. */
619
620 static int
621 gimple_canonical_type_eq (const void *p1, const void *p2)
622 {
623 const_tree t1 = (const_tree) p1;
624 const_tree t2 = (const_tree) p2;
625 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
626 CONST_CAST_TREE (t2));
627 }
628
629 /* Main worker for gimple_register_canonical_type. */
630
631 static void
632 gimple_register_canonical_type_1 (tree t, hashval_t hash)
633 {
634 void **slot;
635
636 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
637
638 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
639 if (*slot)
640 {
641 tree new_type = (tree)(*slot);
642 gcc_checking_assert (new_type != t);
643 TYPE_CANONICAL (t) = new_type;
644 }
645 else
646 {
647 TYPE_CANONICAL (t) = t;
648 *slot = (void *) t;
649 /* Cache the just computed hash value. */
650 num_canonical_type_hash_entries++;
651 bool existed_p;
652 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
653 gcc_assert (!existed_p);
654 *hslot = hash;
655 }
656 }
657
658 /* Register type T in the global type table gimple_types and set
659 TYPE_CANONICAL of T accordingly.
660 This is used by LTO to merge structurally equivalent types for
661 type-based aliasing purposes across different TUs and languages.
662
663 ??? This merging does not exactly match how the tree.c middle-end
664 functions will assign TYPE_CANONICAL when new types are created
665 during optimization (which at least happens for pointer and array
666 types). */
667
668 static void
669 gimple_register_canonical_type (tree t)
670 {
671 if (TYPE_CANONICAL (t))
672 return;
673
674 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
675 }
676
677 /* Re-compute TYPE_CANONICAL for NODE and related types. */
678
679 static void
680 lto_register_canonical_types (tree node, bool first_p)
681 {
682 if (!node
683 || !TYPE_P (node))
684 return;
685
686 if (first_p)
687 TYPE_CANONICAL (node) = NULL_TREE;
688
689 if (POINTER_TYPE_P (node)
690 || TREE_CODE (node) == COMPLEX_TYPE
691 || TREE_CODE (node) == ARRAY_TYPE)
692 lto_register_canonical_types (TREE_TYPE (node), first_p);
693
694 if (!first_p)
695 gimple_register_canonical_type (node);
696 }
697
698
699 /* Remember trees that contains references to declarations. */
700 static GTY(()) vec <tree, va_gc> *tree_with_vars;
701
702 #define CHECK_VAR(tt) \
703 do \
704 { \
705 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
706 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
707 return true; \
708 } while (0)
709
710 #define CHECK_NO_VAR(tt) \
711 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
712
713 /* Check presence of pointers to decls in fields of a tree_typed T. */
714
715 static inline bool
716 mentions_vars_p_typed (tree t)
717 {
718 CHECK_NO_VAR (TREE_TYPE (t));
719 return false;
720 }
721
722 /* Check presence of pointers to decls in fields of a tree_common T. */
723
724 static inline bool
725 mentions_vars_p_common (tree t)
726 {
727 if (mentions_vars_p_typed (t))
728 return true;
729 CHECK_NO_VAR (TREE_CHAIN (t));
730 return false;
731 }
732
733 /* Check presence of pointers to decls in fields of a decl_minimal T. */
734
735 static inline bool
736 mentions_vars_p_decl_minimal (tree t)
737 {
738 if (mentions_vars_p_common (t))
739 return true;
740 CHECK_NO_VAR (DECL_NAME (t));
741 CHECK_VAR (DECL_CONTEXT (t));
742 return false;
743 }
744
745 /* Check presence of pointers to decls in fields of a decl_common T. */
746
747 static inline bool
748 mentions_vars_p_decl_common (tree t)
749 {
750 if (mentions_vars_p_decl_minimal (t))
751 return true;
752 CHECK_VAR (DECL_SIZE (t));
753 CHECK_VAR (DECL_SIZE_UNIT (t));
754 CHECK_VAR (DECL_INITIAL (t));
755 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
756 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
757 return false;
758 }
759
760 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
761
762 static inline bool
763 mentions_vars_p_decl_with_vis (tree t)
764 {
765 if (mentions_vars_p_decl_common (t))
766 return true;
767
768 /* Accessor macro has side-effects, use field-name here. */
769 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
770 return false;
771 }
772
773 /* Check presence of pointers to decls in fields of a decl_non_common T. */
774
775 static inline bool
776 mentions_vars_p_decl_non_common (tree t)
777 {
778 if (mentions_vars_p_decl_with_vis (t))
779 return true;
780 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
781 CHECK_NO_VAR (DECL_RESULT_FLD (t));
782 CHECK_NO_VAR (DECL_VINDEX (t));
783 return false;
784 }
785
786 /* Check presence of pointers to decls in fields of a decl_non_common T. */
787
788 static bool
789 mentions_vars_p_function (tree t)
790 {
791 if (mentions_vars_p_decl_non_common (t))
792 return true;
793 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
794 return false;
795 }
796
797 /* Check presence of pointers to decls in fields of a field_decl T. */
798
799 static bool
800 mentions_vars_p_field_decl (tree t)
801 {
802 if (mentions_vars_p_decl_common (t))
803 return true;
804 CHECK_VAR (DECL_FIELD_OFFSET (t));
805 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
806 CHECK_NO_VAR (DECL_QUALIFIER (t));
807 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
808 CHECK_NO_VAR (DECL_FCONTEXT (t));
809 return false;
810 }
811
812 /* Check presence of pointers to decls in fields of a type T. */
813
814 static bool
815 mentions_vars_p_type (tree t)
816 {
817 if (mentions_vars_p_common (t))
818 return true;
819 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
820 CHECK_VAR (TYPE_SIZE (t));
821 CHECK_VAR (TYPE_SIZE_UNIT (t));
822 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
823 CHECK_NO_VAR (TYPE_NAME (t));
824
825 CHECK_VAR (TYPE_MINVAL (t));
826 CHECK_VAR (TYPE_MAXVAL (t));
827
828 /* Accessor is for derived node types only. */
829 CHECK_NO_VAR (t->type_non_common.binfo);
830
831 CHECK_VAR (TYPE_CONTEXT (t));
832 CHECK_NO_VAR (TYPE_CANONICAL (t));
833 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
834 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
835 return false;
836 }
837
838 /* Check presence of pointers to decls in fields of a BINFO T. */
839
840 static bool
841 mentions_vars_p_binfo (tree t)
842 {
843 unsigned HOST_WIDE_INT i, n;
844
845 if (mentions_vars_p_common (t))
846 return true;
847 CHECK_VAR (BINFO_VTABLE (t));
848 CHECK_NO_VAR (BINFO_OFFSET (t));
849 CHECK_NO_VAR (BINFO_VIRTUALS (t));
850 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
851 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
852 for (i = 0; i < n; i++)
853 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
854 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
855 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
856 n = BINFO_N_BASE_BINFOS (t);
857 for (i = 0; i < n; i++)
858 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
859 return false;
860 }
861
862 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
863
864 static bool
865 mentions_vars_p_constructor (tree t)
866 {
867 unsigned HOST_WIDE_INT idx;
868 constructor_elt *ce;
869
870 if (mentions_vars_p_typed (t))
871 return true;
872
873 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
874 {
875 CHECK_NO_VAR (ce->index);
876 CHECK_VAR (ce->value);
877 }
878 return false;
879 }
880
881 /* Check presence of pointers to decls in fields of an expression tree T. */
882
883 static bool
884 mentions_vars_p_expr (tree t)
885 {
886 int i;
887 if (mentions_vars_p_typed (t))
888 return true;
889 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
890 CHECK_VAR (TREE_OPERAND (t, i));
891 return false;
892 }
893
894 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
895
896 static bool
897 mentions_vars_p_omp_clause (tree t)
898 {
899 int i;
900 if (mentions_vars_p_common (t))
901 return true;
902 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
903 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
904 return false;
905 }
906
907 /* Check presence of pointers to decls that needs later fixup in T. */
908
909 static bool
910 mentions_vars_p (tree t)
911 {
912 switch (TREE_CODE (t))
913 {
914 case IDENTIFIER_NODE:
915 break;
916
917 case TREE_LIST:
918 CHECK_VAR (TREE_VALUE (t));
919 CHECK_VAR (TREE_PURPOSE (t));
920 CHECK_NO_VAR (TREE_CHAIN (t));
921 break;
922
923 case FIELD_DECL:
924 return mentions_vars_p_field_decl (t);
925
926 case LABEL_DECL:
927 case CONST_DECL:
928 case PARM_DECL:
929 case RESULT_DECL:
930 case IMPORTED_DECL:
931 case NAMESPACE_DECL:
932 case NAMELIST_DECL:
933 return mentions_vars_p_decl_common (t);
934
935 case VAR_DECL:
936 return mentions_vars_p_decl_with_vis (t);
937
938 case TYPE_DECL:
939 return mentions_vars_p_decl_non_common (t);
940
941 case FUNCTION_DECL:
942 return mentions_vars_p_function (t);
943
944 case TREE_BINFO:
945 return mentions_vars_p_binfo (t);
946
947 case PLACEHOLDER_EXPR:
948 return mentions_vars_p_common (t);
949
950 case BLOCK:
951 case TRANSLATION_UNIT_DECL:
952 case OPTIMIZATION_NODE:
953 case TARGET_OPTION_NODE:
954 break;
955
956 case CONSTRUCTOR:
957 return mentions_vars_p_constructor (t);
958
959 case OMP_CLAUSE:
960 return mentions_vars_p_omp_clause (t);
961
962 default:
963 if (TYPE_P (t))
964 {
965 if (mentions_vars_p_type (t))
966 return true;
967 }
968 else if (EXPR_P (t))
969 {
970 if (mentions_vars_p_expr (t))
971 return true;
972 }
973 else if (CONSTANT_CLASS_P (t))
974 CHECK_NO_VAR (TREE_TYPE (t));
975 else
976 gcc_unreachable ();
977 }
978 return false;
979 }
980
981
982 /* Return the resolution for the decl with index INDEX from DATA_IN. */
983
984 static enum ld_plugin_symbol_resolution
985 get_resolution (struct data_in *data_in, unsigned index)
986 {
987 if (data_in->globals_resolution.exists ())
988 {
989 ld_plugin_symbol_resolution_t ret;
990 /* We can have references to not emitted functions in
991 DECL_FUNCTION_PERSONALITY at least. So we can and have
992 to indeed return LDPR_UNKNOWN in some cases. */
993 if (data_in->globals_resolution.length () <= index)
994 return LDPR_UNKNOWN;
995 ret = data_in->globals_resolution[index];
996 return ret;
997 }
998 else
999 /* Delay resolution finding until decl merging. */
1000 return LDPR_UNKNOWN;
1001 }
1002
1003 /* We need to record resolutions until symbol table is read. */
1004 static void
1005 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1006 enum ld_plugin_symbol_resolution resolution)
1007 {
1008 if (resolution == LDPR_UNKNOWN)
1009 return;
1010 if (!file_data->resolution_map)
1011 file_data->resolution_map = pointer_map_create ();
1012 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1013 }
1014
1015 /* Register DECL with the global symbol table and change its
1016 name if necessary to avoid name clashes for static globals across
1017 different files. */
1018
1019 static void
1020 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1021 unsigned ix)
1022 {
1023 tree context;
1024
1025 /* Variable has file scope, not local. */
1026 if (!TREE_PUBLIC (decl)
1027 && !((context = decl_function_context (decl))
1028 && auto_var_in_fn_p (decl, context)))
1029 rest_of_decl_compilation (decl, 1, 0);
1030
1031 /* If this variable has already been declared, queue the
1032 declaration for merging. */
1033 if (TREE_PUBLIC (decl))
1034 register_resolution (data_in->file_data,
1035 decl, get_resolution (data_in, ix));
1036 }
1037
1038
1039 /* Register DECL with the global symbol table and change its
1040 name if necessary to avoid name clashes for static globals across
1041 different files. DATA_IN contains descriptors and tables for the
1042 file being read. */
1043
1044 static void
1045 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1046 unsigned ix)
1047 {
1048 /* If this variable has already been declared, queue the
1049 declaration for merging. */
1050 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1051 register_resolution (data_in->file_data,
1052 decl, get_resolution (data_in, ix));
1053 }
1054
1055
1056 /* For the type T re-materialize it in the type variant list and
1057 the pointer/reference-to chains. */
1058
1059 static void
1060 lto_fixup_prevailing_type (tree t)
1061 {
1062 /* The following re-creates proper variant lists while fixing up
1063 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1064 variant list state before fixup is broken. */
1065
1066 /* If we are not our own variant leader link us into our new leaders
1067 variant list. */
1068 if (TYPE_MAIN_VARIANT (t) != t)
1069 {
1070 tree mv = TYPE_MAIN_VARIANT (t);
1071 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1072 TYPE_NEXT_VARIANT (mv) = t;
1073 }
1074
1075 /* The following reconstructs the pointer chains
1076 of the new pointed-to type if we are a main variant. We do
1077 not stream those so they are broken before fixup. */
1078 if (TREE_CODE (t) == POINTER_TYPE
1079 && TYPE_MAIN_VARIANT (t) == t)
1080 {
1081 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1082 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1083 }
1084 else if (TREE_CODE (t) == REFERENCE_TYPE
1085 && TYPE_MAIN_VARIANT (t) == t)
1086 {
1087 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1088 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1089 }
1090 }
1091
1092
1093 /* We keep prevailing tree SCCs in a hashtable with manual collision
1094 handling (in case all hashes compare the same) and keep the colliding
1095 entries in the tree_scc->next chain. */
1096
1097 struct tree_scc
1098 {
1099 tree_scc *next;
1100 /* Hash of the whole SCC. */
1101 hashval_t hash;
1102 /* Number of trees in the SCC. */
1103 unsigned len;
1104 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1105 which share the same individual tree hash). */
1106 unsigned entry_len;
1107 /* The members of the SCC.
1108 We only need to remember the first entry node candidate for prevailing
1109 SCCs (but of course have access to all entries for SCCs we are
1110 processing).
1111 ??? For prevailing SCCs we really only need hash and the first
1112 entry candidate, but that's too awkward to implement. */
1113 tree entries[1];
1114 };
1115
1116 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1117 {
1118 typedef tree_scc value_type;
1119 typedef tree_scc compare_type;
1120 static inline hashval_t hash (const value_type *);
1121 static inline bool equal (const value_type *, const compare_type *);
1122 };
1123
1124 hashval_t
1125 tree_scc_hasher::hash (const value_type *scc)
1126 {
1127 return scc->hash;
1128 }
1129
1130 bool
1131 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1132 {
1133 if (scc1->hash != scc2->hash
1134 || scc1->len != scc2->len
1135 || scc1->entry_len != scc2->entry_len)
1136 return false;
1137 return true;
1138 }
1139
1140 static hash_table <tree_scc_hasher> tree_scc_hash;
1141 static struct obstack tree_scc_hash_obstack;
1142
1143 static unsigned long num_merged_types;
1144 static unsigned long num_prevailing_types;
1145 static unsigned long num_type_scc_trees;
1146 static unsigned long total_scc_size;
1147 static unsigned long num_sccs_read;
1148 static unsigned long total_scc_size_merged;
1149 static unsigned long num_sccs_merged;
1150 static unsigned long num_scc_compares;
1151 static unsigned long num_scc_compare_collisions;
1152
1153
1154 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1155 recursing through in-SCC tree edges. Returns true if the SCCs entered
1156 through T1 and T2 are equal and fills in *MAP with the pairs of
1157 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1158
1159 static bool
1160 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1161 {
1162 enum tree_code code;
1163
1164 /* Mark already visited nodes. */
1165 TREE_ASM_WRITTEN (t2) = 1;
1166
1167 /* Push the pair onto map. */
1168 (*map)[0] = t1;
1169 (*map)[1] = t2;
1170 *map = *map + 2;
1171
1172 /* Compare value-fields. */
1173 #define compare_values(X) \
1174 do { \
1175 if (X(t1) != X(t2)) \
1176 return false; \
1177 } while (0)
1178
1179 compare_values (TREE_CODE);
1180 code = TREE_CODE (t1);
1181
1182 if (!TYPE_P (t1))
1183 {
1184 compare_values (TREE_SIDE_EFFECTS);
1185 compare_values (TREE_CONSTANT);
1186 compare_values (TREE_READONLY);
1187 compare_values (TREE_PUBLIC);
1188 }
1189 compare_values (TREE_ADDRESSABLE);
1190 compare_values (TREE_THIS_VOLATILE);
1191 if (DECL_P (t1))
1192 compare_values (DECL_UNSIGNED);
1193 else if (TYPE_P (t1))
1194 compare_values (TYPE_UNSIGNED);
1195 if (TYPE_P (t1))
1196 compare_values (TYPE_ARTIFICIAL);
1197 else
1198 compare_values (TREE_NO_WARNING);
1199 compare_values (TREE_NOTHROW);
1200 compare_values (TREE_STATIC);
1201 if (code != TREE_BINFO)
1202 compare_values (TREE_PRIVATE);
1203 compare_values (TREE_PROTECTED);
1204 compare_values (TREE_DEPRECATED);
1205 if (TYPE_P (t1))
1206 {
1207 compare_values (TYPE_SATURATING);
1208 compare_values (TYPE_ADDR_SPACE);
1209 }
1210 else if (code == SSA_NAME)
1211 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1214 {
1215 if (!wi::eq_p (t1, t2))
1216 return false;
1217 }
1218
1219 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1220 {
1221 /* ??? No suitable compare routine available. */
1222 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1223 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1224 if (r1.cl != r2.cl
1225 || r1.decimal != r2.decimal
1226 || r1.sign != r2.sign
1227 || r1.signalling != r2.signalling
1228 || r1.canonical != r2.canonical
1229 || r1.uexp != r2.uexp)
1230 return false;
1231 for (unsigned i = 0; i < SIGSZ; ++i)
1232 if (r1.sig[i] != r2.sig[i])
1233 return false;
1234 }
1235
1236 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1237 if (!fixed_compare (EQ_EXPR,
1238 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1239 return false;
1240
1241
1242 /* We don't want to compare locations, so there is nothing do compare
1243 for TS_DECL_MINIMAL. */
1244
1245 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1246 {
1247 compare_values (DECL_MODE);
1248 compare_values (DECL_NONLOCAL);
1249 compare_values (DECL_VIRTUAL_P);
1250 compare_values (DECL_IGNORED_P);
1251 compare_values (DECL_ABSTRACT);
1252 compare_values (DECL_ARTIFICIAL);
1253 compare_values (DECL_USER_ALIGN);
1254 compare_values (DECL_PRESERVE_P);
1255 compare_values (DECL_EXTERNAL);
1256 compare_values (DECL_GIMPLE_REG_P);
1257 compare_values (DECL_ALIGN);
1258 if (code == LABEL_DECL)
1259 {
1260 compare_values (EH_LANDING_PAD_NR);
1261 compare_values (LABEL_DECL_UID);
1262 }
1263 else if (code == FIELD_DECL)
1264 {
1265 compare_values (DECL_PACKED);
1266 compare_values (DECL_NONADDRESSABLE_P);
1267 compare_values (DECL_OFFSET_ALIGN);
1268 }
1269 else if (code == VAR_DECL)
1270 {
1271 compare_values (DECL_HAS_DEBUG_EXPR_P);
1272 compare_values (DECL_NONLOCAL_FRAME);
1273 }
1274 if (code == RESULT_DECL
1275 || code == PARM_DECL
1276 || code == VAR_DECL)
1277 {
1278 compare_values (DECL_BY_REFERENCE);
1279 if (code == VAR_DECL
1280 || code == PARM_DECL)
1281 compare_values (DECL_HAS_VALUE_EXPR_P);
1282 }
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1286 compare_values (DECL_REGISTER);
1287
1288 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1289 {
1290 compare_values (DECL_COMMON);
1291 compare_values (DECL_DLLIMPORT_P);
1292 compare_values (DECL_WEAK);
1293 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1294 compare_values (DECL_COMDAT);
1295 compare_values (DECL_VISIBILITY);
1296 compare_values (DECL_VISIBILITY_SPECIFIED);
1297 if (code == VAR_DECL)
1298 {
1299 compare_values (DECL_HARD_REGISTER);
1300 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1301 compare_values (DECL_IN_CONSTANT_POOL);
1302 compare_values (DECL_TLS_MODEL);
1303 }
1304 if (VAR_OR_FUNCTION_DECL_P (t1))
1305 compare_values (DECL_INIT_PRIORITY);
1306 }
1307
1308 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1309 {
1310 compare_values (DECL_BUILT_IN_CLASS);
1311 compare_values (DECL_STATIC_CONSTRUCTOR);
1312 compare_values (DECL_STATIC_DESTRUCTOR);
1313 compare_values (DECL_UNINLINABLE);
1314 compare_values (DECL_POSSIBLY_INLINED);
1315 compare_values (DECL_IS_NOVOPS);
1316 compare_values (DECL_IS_RETURNS_TWICE);
1317 compare_values (DECL_IS_MALLOC);
1318 compare_values (DECL_IS_OPERATOR_NEW);
1319 compare_values (DECL_DECLARED_INLINE_P);
1320 compare_values (DECL_STATIC_CHAIN);
1321 compare_values (DECL_NO_INLINE_WARNING_P);
1322 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1323 compare_values (DECL_NO_LIMIT_STACK);
1324 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1325 compare_values (DECL_PURE_P);
1326 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1327 compare_values (DECL_FINAL_P);
1328 compare_values (DECL_CXX_CONSTRUCTOR_P);
1329 compare_values (DECL_CXX_DESTRUCTOR_P);
1330 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1331 compare_values (DECL_FUNCTION_CODE);
1332 if (DECL_STATIC_DESTRUCTOR (t1))
1333 compare_values (DECL_FINI_PRIORITY);
1334 }
1335
1336 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1337 {
1338 compare_values (TYPE_MODE);
1339 compare_values (TYPE_STRING_FLAG);
1340 compare_values (TYPE_NO_FORCE_BLK);
1341 compare_values (TYPE_NEEDS_CONSTRUCTING);
1342 if (RECORD_OR_UNION_TYPE_P (t1))
1343 {
1344 compare_values (TYPE_TRANSPARENT_AGGR);
1345 compare_values (TYPE_FINAL_P);
1346 }
1347 else if (code == ARRAY_TYPE)
1348 compare_values (TYPE_NONALIASED_COMPONENT);
1349 compare_values (TYPE_PACKED);
1350 compare_values (TYPE_RESTRICT);
1351 compare_values (TYPE_USER_ALIGN);
1352 compare_values (TYPE_READONLY);
1353 compare_values (TYPE_PRECISION);
1354 compare_values (TYPE_ALIGN);
1355 compare_values (TYPE_ALIAS_SET);
1356 }
1357
1358 /* We don't want to compare locations, so there is nothing do compare
1359 for TS_EXP. */
1360
1361 /* BLOCKs are function local and we don't merge anything there, so
1362 simply refuse to merge. */
1363 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1364 return false;
1365
1366 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1367 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1368 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1369 return false;
1370
1371 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1372 gcc_unreachable ();
1373
1374 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1375 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1376 sizeof (struct cl_optimization)) != 0)
1377 return false;
1378
1379 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1380 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1381 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1382 return false;
1383
1384 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1385 compare_values (CONSTRUCTOR_NELTS);
1386
1387 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1388 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1389 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1390 IDENTIFIER_LENGTH (t1)) != 0)
1391 return false;
1392
1393 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1394 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1395 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1396 TREE_STRING_LENGTH (t1)) != 0)
1397 return false;
1398
1399 if (code == OMP_CLAUSE)
1400 {
1401 compare_values (OMP_CLAUSE_CODE);
1402 switch (OMP_CLAUSE_CODE (t1))
1403 {
1404 case OMP_CLAUSE_DEFAULT:
1405 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1406 break;
1407 case OMP_CLAUSE_SCHEDULE:
1408 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1409 break;
1410 case OMP_CLAUSE_DEPEND:
1411 compare_values (OMP_CLAUSE_DEPEND_KIND);
1412 break;
1413 case OMP_CLAUSE_MAP:
1414 compare_values (OMP_CLAUSE_MAP_KIND);
1415 break;
1416 case OMP_CLAUSE_PROC_BIND:
1417 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1418 break;
1419 case OMP_CLAUSE_REDUCTION:
1420 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1421 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1422 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1423 break;
1424 default:
1425 break;
1426 }
1427 }
1428
1429 #undef compare_values
1430
1431
1432 /* Compare pointer fields. */
1433
1434 /* Recurse. Search & Replaced from DFS_write_tree_body.
1435 Folding the early checks into the compare_tree_edges recursion
1436 macro makes debugging way quicker as you are able to break on
1437 compare_tree_sccs_1 and simply finish until a call returns false
1438 to spot the SCC members with the difference. */
1439 #define compare_tree_edges(E1, E2) \
1440 do { \
1441 tree t1_ = (E1), t2_ = (E2); \
1442 if (t1_ != t2_ \
1443 && (!t1_ || !t2_ \
1444 || !TREE_VISITED (t2_) \
1445 || (!TREE_ASM_WRITTEN (t2_) \
1446 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1447 return false; \
1448 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1449 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1450 } while (0)
1451
1452 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1453 {
1454 if (code != IDENTIFIER_NODE)
1455 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1456 }
1457
1458 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1459 {
1460 unsigned i;
1461 /* Note that the number of elements for EXPR has already been emitted
1462 in EXPR's header (see streamer_write_tree_header). */
1463 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1464 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1465 }
1466
1467 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1468 {
1469 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1470 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1471 }
1472
1473 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1474 {
1475 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1476 /* ??? Global decls from different TUs have non-matching
1477 TRANSLATION_UNIT_DECLs. Only consider a small set of
1478 decls equivalent, we should not end up merging others. */
1479 if ((code == TYPE_DECL
1480 || code == NAMESPACE_DECL
1481 || code == IMPORTED_DECL
1482 || code == CONST_DECL
1483 || (VAR_OR_FUNCTION_DECL_P (t1)
1484 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1485 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1486 ;
1487 else
1488 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1489 }
1490
1491 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1492 {
1493 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1494 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1495 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1496 if ((code == VAR_DECL
1497 || code == PARM_DECL)
1498 && DECL_HAS_VALUE_EXPR_P (t1))
1499 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1500 if (code == VAR_DECL
1501 && DECL_HAS_DEBUG_EXPR_P (t1))
1502 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1503 /* LTO specific edges. */
1504 if (code != FUNCTION_DECL
1505 && code != TRANSLATION_UNIT_DECL)
1506 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1507 }
1508
1509 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1510 {
1511 if (code == FUNCTION_DECL)
1512 {
1513 tree a1, a2;
1514 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1515 a1 || a2;
1516 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1517 compare_tree_edges (a1, a2);
1518 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1519 }
1520 else if (code == TYPE_DECL)
1521 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1522 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1523 }
1524
1525 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1526 {
1527 /* Make sure we don't inadvertently set the assembler name. */
1528 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1529 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1530 DECL_ASSEMBLER_NAME (t2));
1531 }
1532
1533 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1534 {
1535 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1536 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1537 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1538 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1539 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1540 DECL_FIELD_BIT_OFFSET (t2));
1541 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1542 }
1543
1544 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1545 {
1546 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1547 DECL_FUNCTION_PERSONALITY (t2));
1548 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1549 the attribute list instead. */
1550 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1551 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1552 }
1553
1554 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1555 {
1556 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1557 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1558 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1559 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1560 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1561 reconstructed during fixup. */
1562 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1563 during fixup. */
1564 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1565 /* ??? Global types from different TUs have non-matching
1566 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1567 equal. */
1568 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1569 ;
1570 else
1571 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1572 /* TYPE_CANONICAL is re-computed during type merging, so do not
1573 compare it here. */
1574 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1575 }
1576
1577 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1578 {
1579 if (code == ENUMERAL_TYPE)
1580 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1581 else if (code == ARRAY_TYPE)
1582 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1583 else if (RECORD_OR_UNION_TYPE_P (t1))
1584 {
1585 tree f1, f2;
1586 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1587 f1 || f2;
1588 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1589 compare_tree_edges (f1, f2);
1590 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1591 }
1592 else if (code == FUNCTION_TYPE
1593 || code == METHOD_TYPE)
1594 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1595 if (!POINTER_TYPE_P (t1))
1596 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1597 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1598 }
1599
1600 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1601 {
1602 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1603 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1604 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1605 }
1606
1607 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1608 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1609 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1610
1611 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1612 {
1613 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1614 compare_tree_edges (TREE_OPERAND (t1, i),
1615 TREE_OPERAND (t2, i));
1616
1617 /* BLOCKs are function local and we don't merge anything there. */
1618 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1619 return false;
1620 }
1621
1622 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1623 {
1624 unsigned i;
1625 tree t;
1626 /* Lengths have already been compared above. */
1627 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1628 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1629 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1630 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1631 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1632 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1633 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1634 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1635 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1636 }
1637
1638 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1639 {
1640 unsigned i;
1641 tree index, value;
1642 /* Lengths have already been compared above. */
1643 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1644 {
1645 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1646 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1647 }
1648 }
1649
1650 if (code == OMP_CLAUSE)
1651 {
1652 int i;
1653
1654 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1655 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1656 OMP_CLAUSE_OPERAND (t2, i));
1657 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1658 }
1659
1660 #undef compare_tree_edges
1661
1662 return true;
1663 }
1664
1665 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1666 out MAP if they are equal. */
1667
1668 static bool
1669 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1670 tree *map)
1671 {
1672 /* Assume SCC entry hashes are sorted after their cardinality. Which
1673 means we can simply take the first n-tuple of equal hashes
1674 (which is recorded as entry_len) and do n SCC entry candidate
1675 comparisons. */
1676 for (unsigned i = 0; i < pscc->entry_len; ++i)
1677 {
1678 tree *mapp = map;
1679 num_scc_compare_collisions++;
1680 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1681 {
1682 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1683 on the scc as all trees will be freed. */
1684 return true;
1685 }
1686 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1687 the SCC prevails. */
1688 for (unsigned j = 0; j < scc->len; ++j)
1689 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1690 }
1691
1692 return false;
1693 }
1694
1695 /* QSort sort function to sort a map of two pointers after the 2nd
1696 pointer. */
1697
1698 static int
1699 cmp_tree (const void *p1_, const void *p2_)
1700 {
1701 tree *p1 = (tree *)(const_cast<void *>(p1_));
1702 tree *p2 = (tree *)(const_cast<void *>(p2_));
1703 if (p1[1] == p2[1])
1704 return 0;
1705 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1706 }
1707
1708 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1709 hash value SCC_HASH with an already recorded SCC. Return true if
1710 that was successful, otherwise return false. */
1711
1712 static bool
1713 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1714 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1715 {
1716 bool unified_p = false;
1717 tree_scc *scc
1718 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1719 scc->next = NULL;
1720 scc->hash = scc_hash;
1721 scc->len = len;
1722 scc->entry_len = scc_entry_len;
1723 for (unsigned i = 0; i < len; ++i)
1724 {
1725 tree t = streamer_tree_cache_get_tree (cache, from + i);
1726 scc->entries[i] = t;
1727 /* Do not merge SCCs with local entities inside them. Also do
1728 not merge TRANSLATION_UNIT_DECLs. */
1729 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1730 || (VAR_OR_FUNCTION_DECL_P (t)
1731 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1732 || TREE_CODE (t) == LABEL_DECL)
1733 {
1734 /* Avoid doing any work for these cases and do not worry to
1735 record the SCCs for further merging. */
1736 return false;
1737 }
1738 }
1739
1740 /* Look for the list of candidate SCCs to compare against. */
1741 tree_scc **slot;
1742 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1743 if (*slot)
1744 {
1745 /* Try unifying against each candidate. */
1746 num_scc_compares++;
1747
1748 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1749 outside of the scc when following tree edges. Make sure
1750 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1751 to track whether we visited the SCC member during the compare.
1752 We cannot use TREE_VISITED on the pscc members as the extended
1753 scc and pscc can overlap. */
1754 for (unsigned i = 0; i < scc->len; ++i)
1755 {
1756 TREE_VISITED (scc->entries[i]) = 1;
1757 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1758 }
1759
1760 tree *map = XALLOCAVEC (tree, 2 * len);
1761 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1762 {
1763 if (!compare_tree_sccs (pscc, scc, map))
1764 continue;
1765
1766 /* Found an equal SCC. */
1767 unified_p = true;
1768 num_scc_compare_collisions--;
1769 num_sccs_merged++;
1770 total_scc_size_merged += len;
1771
1772 #ifdef ENABLE_CHECKING
1773 for (unsigned i = 0; i < len; ++i)
1774 {
1775 tree t = map[2*i+1];
1776 enum tree_code code = TREE_CODE (t);
1777 /* IDENTIFIER_NODEs should be singletons and are merged by the
1778 streamer. The others should be singletons, too, and we
1779 should not merge them in any way. */
1780 gcc_assert (code != TRANSLATION_UNIT_DECL
1781 && code != IDENTIFIER_NODE
1782 && !streamer_handle_as_builtin_p (t));
1783 }
1784 #endif
1785
1786 /* Fixup the streamer cache with the prevailing nodes according
1787 to the tree node mapping computed by compare_tree_sccs. */
1788 if (len == 1)
1789 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1790 else
1791 {
1792 tree *map2 = XALLOCAVEC (tree, 2 * len);
1793 for (unsigned i = 0; i < len; ++i)
1794 {
1795 map2[i*2] = (tree)(uintptr_t)(from + i);
1796 map2[i*2+1] = scc->entries[i];
1797 }
1798 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1799 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1800 for (unsigned i = 0; i < len; ++i)
1801 streamer_tree_cache_replace_tree (cache, map[2*i],
1802 (uintptr_t)map2[2*i]);
1803 }
1804
1805 /* Free the tree nodes from the read SCC. */
1806 for (unsigned i = 0; i < len; ++i)
1807 {
1808 enum tree_code code;
1809 if (TYPE_P (scc->entries[i]))
1810 num_merged_types++;
1811 code = TREE_CODE (scc->entries[i]);
1812 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1813 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1814 ggc_free (scc->entries[i]);
1815 }
1816
1817 break;
1818 }
1819
1820 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1821 if (!unified_p)
1822 for (unsigned i = 0; i < scc->len; ++i)
1823 TREE_VISITED (scc->entries[i]) = 0;
1824 }
1825
1826 /* If we didn't unify it to any candidate duplicate the relevant
1827 pieces to permanent storage and link it into the chain. */
1828 if (!unified_p)
1829 {
1830 tree_scc *pscc
1831 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1832 memcpy (pscc, scc, sizeof (tree_scc));
1833 pscc->next = (*slot);
1834 *slot = pscc;
1835 }
1836 return unified_p;
1837 }
1838
1839
1840 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1841 RESOLUTIONS is the set of symbols picked by the linker (read from the
1842 resolution file when the linker plugin is being used). */
1843
1844 static void
1845 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1846 vec<ld_plugin_symbol_resolution_t> resolutions)
1847 {
1848 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1849 const int decl_offset = sizeof (struct lto_decl_header);
1850 const int main_offset = decl_offset + header->decl_state_size;
1851 const int string_offset = main_offset + header->main_size;
1852 struct lto_input_block ib_main;
1853 struct data_in *data_in;
1854 unsigned int i;
1855 const uint32_t *data_ptr, *data_end;
1856 uint32_t num_decl_states;
1857
1858 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1859 header->main_size);
1860
1861 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1862 header->string_size, resolutions);
1863
1864 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1865 internal types that should not be merged. */
1866
1867 /* Read the global declarations and types. */
1868 while (ib_main.p < ib_main.len)
1869 {
1870 tree t;
1871 unsigned from = data_in->reader_cache->nodes.length ();
1872 /* Read and uniquify SCCs as in the input stream. */
1873 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1874 if (tag == LTO_tree_scc)
1875 {
1876 unsigned len_;
1877 unsigned scc_entry_len;
1878 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1879 &scc_entry_len);
1880 unsigned len = data_in->reader_cache->nodes.length () - from;
1881 gcc_assert (len == len_);
1882
1883 total_scc_size += len;
1884 num_sccs_read++;
1885
1886 /* We have the special case of size-1 SCCs that are pre-merged
1887 by means of identifier and string sharing for example.
1888 ??? Maybe we should avoid streaming those as SCCs. */
1889 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1890 from);
1891 if (len == 1
1892 && (TREE_CODE (first) == IDENTIFIER_NODE
1893 || TREE_CODE (first) == INTEGER_CST
1894 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1895 || streamer_handle_as_builtin_p (first)))
1896 continue;
1897
1898 /* Try to unify the SCC with already existing ones. */
1899 if (!flag_ltrans
1900 && unify_scc (data_in->reader_cache, from,
1901 len, scc_entry_len, scc_hash))
1902 continue;
1903
1904 /* Do remaining fixup tasks for prevailing nodes. */
1905 bool seen_type = false;
1906 for (unsigned i = 0; i < len; ++i)
1907 {
1908 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1909 from + i);
1910 /* Reconstruct the type variant and pointer-to/reference-to
1911 chains. */
1912 if (TYPE_P (t))
1913 {
1914 seen_type = true;
1915 num_prevailing_types++;
1916 lto_fixup_prevailing_type (t);
1917 }
1918 /* Compute the canonical type of all types.
1919 ??? Should be able to assert that !TYPE_CANONICAL. */
1920 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1921 gimple_register_canonical_type (t);
1922 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1923 type which is also member of this SCC. */
1924 if (TREE_CODE (t) == INTEGER_CST
1925 && !TREE_OVERFLOW (t))
1926 cache_integer_cst (t);
1927 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1928 for both WPA and LTRANS stage. */
1929 if (TREE_CODE (t) == FUNCTION_DECL)
1930 {
1931 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1932 if (attr)
1933 targetm.target_option.valid_attribute_p
1934 (t, NULL_TREE, TREE_VALUE (attr), 0);
1935 }
1936 /* Register TYPE_DECLs with the debuginfo machinery. */
1937 if (!flag_wpa
1938 && TREE_CODE (t) == TYPE_DECL)
1939 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1940 if (!flag_ltrans)
1941 {
1942 /* Register variables and functions with the
1943 symbol table. */
1944 if (TREE_CODE (t) == VAR_DECL)
1945 lto_register_var_decl_in_symtab (data_in, t, from + i);
1946 else if (TREE_CODE (t) == FUNCTION_DECL
1947 && !DECL_BUILT_IN (t))
1948 lto_register_function_decl_in_symtab (data_in, t, from + i);
1949 /* Scan the tree for references to global functions or
1950 variables and record those for later fixup. */
1951 if (mentions_vars_p (t))
1952 vec_safe_push (tree_with_vars, t);
1953 }
1954 }
1955 if (seen_type)
1956 num_type_scc_trees += len;
1957 }
1958 else
1959 {
1960 /* Pickle stray references. */
1961 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1962 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1963 }
1964 }
1965
1966 /* Read in lto_in_decl_state objects. */
1967 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1968 data_end =
1969 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1970 num_decl_states = *data_ptr++;
1971
1972 gcc_assert (num_decl_states > 0);
1973 decl_data->global_decl_state = lto_new_in_decl_state ();
1974 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1975 decl_data->global_decl_state);
1976
1977 /* Read in per-function decl states and enter them in hash table. */
1978 decl_data->function_decl_states =
1979 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1980
1981 for (i = 1; i < num_decl_states; i++)
1982 {
1983 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1984 void **slot;
1985
1986 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1987 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1988 gcc_assert (*slot == NULL);
1989 *slot = state;
1990 }
1991
1992 if (data_ptr != data_end)
1993 internal_error ("bytecode stream: garbage at the end of symbols section");
1994
1995 /* Set the current decl state to be the global state. */
1996 decl_data->current_decl_state = decl_data->global_decl_state;
1997
1998 lto_data_in_delete (data_in);
1999 }
2000
2001 /* Custom version of strtoll, which is not portable. */
2002
2003 static int64_t
2004 lto_parse_hex (const char *p)
2005 {
2006 int64_t ret = 0;
2007
2008 for (; *p != '\0'; ++p)
2009 {
2010 char c = *p;
2011 unsigned char part;
2012 ret <<= 4;
2013 if (c >= '0' && c <= '9')
2014 part = c - '0';
2015 else if (c >= 'a' && c <= 'f')
2016 part = c - 'a' + 10;
2017 else if (c >= 'A' && c <= 'F')
2018 part = c - 'A' + 10;
2019 else
2020 internal_error ("could not parse hex number");
2021 ret |= part;
2022 }
2023
2024 return ret;
2025 }
2026
2027 /* Read resolution for file named FILE_NAME. The resolution is read from
2028 RESOLUTION. */
2029
2030 static void
2031 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2032 {
2033 /* We require that objects in the resolution file are in the same
2034 order as the lto1 command line. */
2035 unsigned int name_len;
2036 char *obj_name;
2037 unsigned int num_symbols;
2038 unsigned int i;
2039 struct lto_file_decl_data *file_data;
2040 splay_tree_node nd = NULL;
2041
2042 if (!resolution)
2043 return;
2044
2045 name_len = strlen (file->filename);
2046 obj_name = XNEWVEC (char, name_len + 1);
2047 fscanf (resolution, " "); /* Read white space. */
2048
2049 fread (obj_name, sizeof (char), name_len, resolution);
2050 obj_name[name_len] = '\0';
2051 if (filename_cmp (obj_name, file->filename) != 0)
2052 internal_error ("unexpected file name %s in linker resolution file. "
2053 "Expected %s", obj_name, file->filename);
2054 if (file->offset != 0)
2055 {
2056 int t;
2057 char offset_p[17];
2058 int64_t offset;
2059 t = fscanf (resolution, "@0x%16s", offset_p);
2060 if (t != 1)
2061 internal_error ("could not parse file offset");
2062 offset = lto_parse_hex (offset_p);
2063 if (offset != file->offset)
2064 internal_error ("unexpected offset");
2065 }
2066
2067 free (obj_name);
2068
2069 fscanf (resolution, "%u", &num_symbols);
2070
2071 for (i = 0; i < num_symbols; i++)
2072 {
2073 int t;
2074 unsigned index;
2075 unsigned HOST_WIDE_INT id;
2076 char r_str[27];
2077 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2078 unsigned int j;
2079 unsigned int lto_resolution_str_len =
2080 sizeof (lto_resolution_str) / sizeof (char *);
2081 res_pair rp;
2082
2083 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2084 &index, &id, r_str);
2085 if (t != 3)
2086 internal_error ("invalid line in the resolution file");
2087
2088 for (j = 0; j < lto_resolution_str_len; j++)
2089 {
2090 if (strcmp (lto_resolution_str[j], r_str) == 0)
2091 {
2092 r = (enum ld_plugin_symbol_resolution) j;
2093 break;
2094 }
2095 }
2096 if (j == lto_resolution_str_len)
2097 internal_error ("invalid resolution in the resolution file");
2098
2099 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2100 {
2101 nd = lto_splay_tree_lookup (file_ids, id);
2102 if (nd == NULL)
2103 internal_error ("resolution sub id %wx not in object file", id);
2104 }
2105
2106 file_data = (struct lto_file_decl_data *)nd->value;
2107 /* The indexes are very sparse. To save memory save them in a compact
2108 format that is only unpacked later when the subfile is processed. */
2109 rp.res = r;
2110 rp.index = index;
2111 file_data->respairs.safe_push (rp);
2112 if (file_data->max_index < index)
2113 file_data->max_index = index;
2114 }
2115 }
2116
2117 /* List of file_decl_datas */
2118 struct file_data_list
2119 {
2120 struct lto_file_decl_data *first, *last;
2121 };
2122
2123 /* Is the name for a id'ed LTO section? */
2124
2125 static int
2126 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2127 {
2128 const char *s;
2129
2130 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2131 return 0;
2132 s = strrchr (name, '.');
2133 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2134 }
2135
2136 /* Create file_data of each sub file id */
2137
2138 static int
2139 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2140 struct file_data_list *list)
2141 {
2142 struct lto_section_slot s_slot, *new_slot;
2143 unsigned HOST_WIDE_INT id;
2144 splay_tree_node nd;
2145 void **hash_slot;
2146 char *new_name;
2147 struct lto_file_decl_data *file_data;
2148
2149 if (!lto_section_with_id (ls->name, &id))
2150 return 1;
2151
2152 /* Find hash table of sub module id */
2153 nd = lto_splay_tree_lookup (file_ids, id);
2154 if (nd != NULL)
2155 {
2156 file_data = (struct lto_file_decl_data *)nd->value;
2157 }
2158 else
2159 {
2160 file_data = ggc_alloc<lto_file_decl_data> ();
2161 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2162 file_data->id = id;
2163 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2164 lto_splay_tree_insert (file_ids, id, file_data);
2165
2166 /* Maintain list in linker order */
2167 if (!list->first)
2168 list->first = file_data;
2169 if (list->last)
2170 list->last->next = file_data;
2171 list->last = file_data;
2172 }
2173
2174 /* Copy section into sub module hash table */
2175 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2176 s_slot.name = new_name;
2177 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2178 gcc_assert (*hash_slot == NULL);
2179
2180 new_slot = XDUP (struct lto_section_slot, ls);
2181 new_slot->name = new_name;
2182 *hash_slot = new_slot;
2183 return 1;
2184 }
2185
2186 /* Read declarations and other initializations for a FILE_DATA. */
2187
2188 static void
2189 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2190 {
2191 const char *data;
2192 size_t len;
2193 vec<ld_plugin_symbol_resolution_t>
2194 resolutions = vNULL;
2195 int i;
2196 res_pair *rp;
2197
2198 /* Create vector for fast access of resolution. We do this lazily
2199 to save memory. */
2200 resolutions.safe_grow_cleared (file_data->max_index + 1);
2201 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2202 resolutions[rp->index] = rp->res;
2203 file_data->respairs.release ();
2204
2205 file_data->renaming_hash_table = lto_create_renaming_table ();
2206 file_data->file_name = file->filename;
2207 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2208 if (data == NULL)
2209 {
2210 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2211 return;
2212 }
2213 /* Frees resolutions */
2214 lto_read_decls (file_data, data, resolutions);
2215 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2216 }
2217
2218 /* Finalize FILE_DATA in FILE and increase COUNT. */
2219
2220 static int
2221 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2222 int *count)
2223 {
2224 lto_file_finalize (file_data, file);
2225 if (cgraph_dump_file)
2226 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2227 file_data->file_name, file_data->id);
2228 (*count)++;
2229 return 0;
2230 }
2231
2232 /* Generate a TREE representation for all types and external decls
2233 entities in FILE.
2234
2235 Read all of the globals out of the file. Then read the cgraph
2236 and process the .o index into the cgraph nodes so that it can open
2237 the .o file to load the functions and ipa information. */
2238
2239 static struct lto_file_decl_data *
2240 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2241 {
2242 struct lto_file_decl_data *file_data = NULL;
2243 splay_tree file_ids;
2244 htab_t section_hash_table;
2245 struct lto_section_slot *section;
2246 struct file_data_list file_list;
2247 struct lto_section_list section_list;
2248
2249 memset (&section_list, 0, sizeof (struct lto_section_list));
2250 section_hash_table = lto_obj_build_section_table (file, &section_list);
2251
2252 /* Find all sub modules in the object and put their sections into new hash
2253 tables in a splay tree. */
2254 file_ids = lto_splay_tree_new ();
2255 memset (&file_list, 0, sizeof (struct file_data_list));
2256 for (section = section_list.first; section != NULL; section = section->next)
2257 create_subid_section_table (section, file_ids, &file_list);
2258
2259 /* Add resolutions to file ids */
2260 lto_resolution_read (file_ids, resolution_file, file);
2261
2262 /* Finalize each lto file for each submodule in the merged object */
2263 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2264 lto_create_files_from_ids (file, file_data, count);
2265
2266 splay_tree_delete (file_ids);
2267 htab_delete (section_hash_table);
2268
2269 return file_list.first;
2270 }
2271
2272 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2273 #define LTO_MMAP_IO 1
2274 #endif
2275
2276 #if LTO_MMAP_IO
2277 /* Page size of machine is used for mmap and munmap calls. */
2278 static size_t page_mask;
2279 #endif
2280
2281 /* Get the section data of length LEN from FILENAME starting at
2282 OFFSET. The data segment must be freed by the caller when the
2283 caller is finished. Returns NULL if all was not well. */
2284
2285 static char *
2286 lto_read_section_data (struct lto_file_decl_data *file_data,
2287 intptr_t offset, size_t len)
2288 {
2289 char *result;
2290 static int fd = -1;
2291 static char *fd_name;
2292 #if LTO_MMAP_IO
2293 intptr_t computed_len;
2294 intptr_t computed_offset;
2295 intptr_t diff;
2296 #endif
2297
2298 /* Keep a single-entry file-descriptor cache. The last file we
2299 touched will get closed at exit.
2300 ??? Eventually we want to add a more sophisticated larger cache
2301 or rather fix function body streaming to not stream them in
2302 practically random order. */
2303 if (fd != -1
2304 && filename_cmp (fd_name, file_data->file_name) != 0)
2305 {
2306 free (fd_name);
2307 close (fd);
2308 fd = -1;
2309 }
2310 if (fd == -1)
2311 {
2312 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2313 if (fd == -1)
2314 {
2315 fatal_error ("Cannot open %s", file_data->file_name);
2316 return NULL;
2317 }
2318 fd_name = xstrdup (file_data->file_name);
2319 }
2320
2321 #if LTO_MMAP_IO
2322 if (!page_mask)
2323 {
2324 size_t page_size = sysconf (_SC_PAGE_SIZE);
2325 page_mask = ~(page_size - 1);
2326 }
2327
2328 computed_offset = offset & page_mask;
2329 diff = offset - computed_offset;
2330 computed_len = len + diff;
2331
2332 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2333 fd, computed_offset);
2334 if (result == MAP_FAILED)
2335 {
2336 fatal_error ("Cannot map %s", file_data->file_name);
2337 return NULL;
2338 }
2339
2340 return result + diff;
2341 #else
2342 result = (char *) xmalloc (len);
2343 if (lseek (fd, offset, SEEK_SET) != offset
2344 || read (fd, result, len) != (ssize_t) len)
2345 {
2346 free (result);
2347 fatal_error ("Cannot read %s", file_data->file_name);
2348 result = NULL;
2349 }
2350 #ifdef __MINGW32__
2351 /* Native windows doesn't supports delayed unlink on opened file. So
2352 we close file here again. This produces higher I/O load, but at least
2353 it prevents to have dangling file handles preventing unlink. */
2354 free (fd_name);
2355 fd_name = NULL;
2356 close (fd);
2357 fd = -1;
2358 #endif
2359 return result;
2360 #endif
2361 }
2362
2363
2364 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2365 NAME will be NULL unless the section type is for a function
2366 body. */
2367
2368 static const char *
2369 get_section_data (struct lto_file_decl_data *file_data,
2370 enum lto_section_type section_type,
2371 const char *name,
2372 size_t *len)
2373 {
2374 htab_t section_hash_table = file_data->section_hash_table;
2375 struct lto_section_slot *f_slot;
2376 struct lto_section_slot s_slot;
2377 const char *section_name = lto_get_section_name (section_type, name, file_data);
2378 char *data = NULL;
2379
2380 *len = 0;
2381 s_slot.name = section_name;
2382 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2383 if (f_slot)
2384 {
2385 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2386 *len = f_slot->len;
2387 }
2388
2389 free (CONST_CAST (char *, section_name));
2390 return data;
2391 }
2392
2393
2394 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2395 starts at OFFSET and has LEN bytes. */
2396
2397 static void
2398 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2399 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2400 const char *name ATTRIBUTE_UNUSED,
2401 const char *offset, size_t len ATTRIBUTE_UNUSED)
2402 {
2403 #if LTO_MMAP_IO
2404 intptr_t computed_len;
2405 intptr_t computed_offset;
2406 intptr_t diff;
2407 #endif
2408
2409 #if LTO_MMAP_IO
2410 computed_offset = ((intptr_t) offset) & page_mask;
2411 diff = (intptr_t) offset - computed_offset;
2412 computed_len = len + diff;
2413
2414 munmap ((caddr_t) computed_offset, computed_len);
2415 #else
2416 free (CONST_CAST(char *, offset));
2417 #endif
2418 }
2419
2420 static lto_file *current_lto_file;
2421
2422 /* Helper for qsort; compare partitions and return one with smaller size.
2423 We sort from greatest to smallest so parallel build doesn't stale on the
2424 longest compilation being executed too late. */
2425
2426 static int
2427 cmp_partitions_size (const void *a, const void *b)
2428 {
2429 const struct ltrans_partition_def *pa
2430 = *(struct ltrans_partition_def *const *)a;
2431 const struct ltrans_partition_def *pb
2432 = *(struct ltrans_partition_def *const *)b;
2433 return pb->insns - pa->insns;
2434 }
2435
2436 /* Helper for qsort; compare partitions and return one with smaller order. */
2437
2438 static int
2439 cmp_partitions_order (const void *a, const void *b)
2440 {
2441 const struct ltrans_partition_def *pa
2442 = *(struct ltrans_partition_def *const *)a;
2443 const struct ltrans_partition_def *pb
2444 = *(struct ltrans_partition_def *const *)b;
2445 int ordera = -1, orderb = -1;
2446
2447 if (lto_symtab_encoder_size (pa->encoder))
2448 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2449 if (lto_symtab_encoder_size (pb->encoder))
2450 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2451 return orderb - ordera;
2452 }
2453
2454 /* Actually stream out ENCODER into TEMP_FILENAME. */
2455
2456 static void
2457 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2458 {
2459 lto_file *file = lto_obj_file_open (temp_filename, true);
2460 if (!file)
2461 fatal_error ("lto_obj_file_open() failed");
2462 lto_set_current_out_file (file);
2463
2464 ipa_write_optimization_summaries (encoder);
2465
2466 lto_set_current_out_file (NULL);
2467 lto_obj_file_close (file);
2468 free (file);
2469 }
2470
2471 /* Wait for forked process and signal errors. */
2472 #ifdef HAVE_WORKING_FORK
2473 static void
2474 wait_for_child ()
2475 {
2476 int status;
2477 do
2478 {
2479 #ifndef WCONTINUED
2480 #define WCONTINUED 0
2481 #endif
2482 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2483 if (w == -1)
2484 fatal_error ("waitpid failed");
2485
2486 if (WIFEXITED (status) && WEXITSTATUS (status))
2487 fatal_error ("streaming subprocess failed");
2488 else if (WIFSIGNALED (status))
2489 fatal_error ("streaming subprocess was killed by signal");
2490 }
2491 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2492 }
2493 #endif
2494
2495 /* Stream out ENCODER into TEMP_FILENAME
2496 Fork if that seems to help. */
2497
2498 static void
2499 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2500 {
2501 #ifdef HAVE_WORKING_FORK
2502 static int nruns;
2503
2504 if (lto_parallelism <= 1)
2505 {
2506 do_stream_out (temp_filename, encoder);
2507 return;
2508 }
2509
2510 /* Do not run more than LTO_PARALLELISM streamings
2511 FIXME: we ignore limits on jobserver. */
2512 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2513 {
2514 wait_for_child ();
2515 nruns --;
2516 }
2517 /* If this is not the last parallel partition, execute new
2518 streaming process. */
2519 if (!last)
2520 {
2521 pid_t cpid = fork ();
2522
2523 if (!cpid)
2524 {
2525 setproctitle ("lto1-wpa-streaming");
2526 do_stream_out (temp_filename, encoder);
2527 exit (0);
2528 }
2529 /* Fork failed; lets do the job ourseleves. */
2530 else if (cpid == -1)
2531 do_stream_out (temp_filename, encoder);
2532 else
2533 nruns++;
2534 }
2535 /* Last partition; stream it and wait for all children to die. */
2536 else
2537 {
2538 int i;
2539 do_stream_out (temp_filename, encoder);
2540 for (i = 0; i < nruns; i++)
2541 wait_for_child ();
2542 }
2543 asm_nodes_output = true;
2544 #else
2545 do_stream_out (temp_filename, encoder);
2546 #endif
2547 }
2548
2549 /* Write all output files in WPA mode and the file with the list of
2550 LTRANS units. */
2551
2552 static void
2553 lto_wpa_write_files (void)
2554 {
2555 unsigned i, n_sets;
2556 ltrans_partition part;
2557 FILE *ltrans_output_list_stream;
2558 char *temp_filename;
2559 vec <char *>temp_filenames = vNULL;
2560 size_t blen;
2561
2562 /* Open the LTRANS output list. */
2563 if (!ltrans_output_list)
2564 fatal_error ("no LTRANS output list filename provided");
2565
2566 timevar_push (TV_WHOPR_WPA);
2567
2568 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2569 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2570
2571 timevar_pop (TV_WHOPR_WPA);
2572
2573 timevar_push (TV_WHOPR_WPA_IO);
2574
2575 /* Generate a prefix for the LTRANS unit files. */
2576 blen = strlen (ltrans_output_list);
2577 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2578 strcpy (temp_filename, ltrans_output_list);
2579 if (blen > sizeof (".out")
2580 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2581 ".out") == 0)
2582 temp_filename[blen - sizeof (".out") + 1] = '\0';
2583 blen = strlen (temp_filename);
2584
2585 n_sets = ltrans_partitions.length ();
2586
2587 /* Sort partitions by size so small ones are compiled last.
2588 FIXME: Even when not reordering we may want to output one list for parallel make
2589 and other for final link command. */
2590
2591 if (!flag_profile_reorder_functions || !flag_profile_use)
2592 ltrans_partitions.qsort (flag_toplevel_reorder
2593 ? cmp_partitions_size
2594 : cmp_partitions_order);
2595
2596 for (i = 0; i < n_sets; i++)
2597 {
2598 ltrans_partition part = ltrans_partitions[i];
2599
2600 /* Write all the nodes in SET. */
2601 sprintf (temp_filename + blen, "%u.o", i);
2602
2603 if (!quiet_flag)
2604 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2605 if (cgraph_dump_file)
2606 {
2607 lto_symtab_encoder_iterator lsei;
2608
2609 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2610 part->name, temp_filename, part->insns);
2611 fprintf (cgraph_dump_file, " Symbols in partition: ");
2612 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2613 lsei_next_in_partition (&lsei))
2614 {
2615 symtab_node *node = lsei_node (lsei);
2616 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2617 }
2618 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2619 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2620 lsei_next (&lsei))
2621 {
2622 symtab_node *node = lsei_node (lsei);
2623 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2624 {
2625 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2626 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2627 if (cnode
2628 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2629 fprintf (cgraph_dump_file, "(body included)");
2630 else
2631 {
2632 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2633 if (vnode
2634 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2635 fprintf (cgraph_dump_file, "(initializer included)");
2636 }
2637 }
2638 }
2639 fprintf (cgraph_dump_file, "\n");
2640 }
2641 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2642
2643 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2644
2645 part->encoder = NULL;
2646
2647 temp_filenames.safe_push (xstrdup (temp_filename));
2648 }
2649 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2650 if (ltrans_output_list_stream == NULL)
2651 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2652 for (i = 0; i < n_sets; i++)
2653 {
2654 unsigned int len = strlen (temp_filenames[i]);
2655 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2656 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2657 fatal_error ("writing to LTRANS output list %s: %m",
2658 ltrans_output_list);
2659 free (temp_filenames[i]);
2660 }
2661 temp_filenames.release();
2662
2663 lto_stats.num_output_files += n_sets;
2664
2665 /* Close the LTRANS output list. */
2666 if (fclose (ltrans_output_list_stream))
2667 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2668
2669 free_ltrans_partitions();
2670 free (temp_filename);
2671
2672 timevar_pop (TV_WHOPR_WPA_IO);
2673 }
2674
2675
2676 /* If TT is a variable or function decl replace it with its
2677 prevailing variant. */
2678 #define LTO_SET_PREVAIL(tt) \
2679 do {\
2680 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2681 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2682 { \
2683 tt = lto_symtab_prevailing_decl (tt); \
2684 fixed = true; \
2685 } \
2686 } while (0)
2687
2688 /* Ensure that TT isn't a replacable var of function decl. */
2689 #define LTO_NO_PREVAIL(tt) \
2690 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2691
2692 /* Given a tree T replace all fields referring to variables or functions
2693 with their prevailing variant. */
2694 static void
2695 lto_fixup_prevailing_decls (tree t)
2696 {
2697 enum tree_code code = TREE_CODE (t);
2698 bool fixed = false;
2699
2700 gcc_checking_assert (code != TREE_BINFO);
2701 LTO_NO_PREVAIL (TREE_TYPE (t));
2702 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2703 LTO_NO_PREVAIL (TREE_CHAIN (t));
2704 if (DECL_P (t))
2705 {
2706 LTO_NO_PREVAIL (DECL_NAME (t));
2707 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2708 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2709 {
2710 LTO_SET_PREVAIL (DECL_SIZE (t));
2711 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2712 LTO_SET_PREVAIL (DECL_INITIAL (t));
2713 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2714 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2715 }
2716 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2717 {
2718 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2719 }
2720 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2721 {
2722 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2723 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2724 LTO_NO_PREVAIL (DECL_VINDEX (t));
2725 }
2726 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2727 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2728 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2729 {
2730 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2731 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2732 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2733 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2734 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2735 }
2736 }
2737 else if (TYPE_P (t))
2738 {
2739 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2740 LTO_SET_PREVAIL (TYPE_SIZE (t));
2741 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2742 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2743 LTO_NO_PREVAIL (TYPE_NAME (t));
2744
2745 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2746 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2747 LTO_NO_PREVAIL (t->type_non_common.binfo);
2748
2749 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2750
2751 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2752 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2753 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2754 }
2755 else if (EXPR_P (t))
2756 {
2757 int i;
2758 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2759 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2760 }
2761 else if (TREE_CODE (t) == CONSTRUCTOR)
2762 {
2763 unsigned i;
2764 tree val;
2765 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2766 LTO_SET_PREVAIL (val);
2767 }
2768 else
2769 {
2770 switch (code)
2771 {
2772 case TREE_LIST:
2773 LTO_SET_PREVAIL (TREE_VALUE (t));
2774 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2775 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2776 break;
2777 default:
2778 gcc_unreachable ();
2779 }
2780 }
2781 /* If we fixed nothing, then we missed something seen by
2782 mentions_vars_p. */
2783 gcc_checking_assert (fixed);
2784 }
2785 #undef LTO_SET_PREVAIL
2786 #undef LTO_NO_PREVAIL
2787
2788 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2789 replaces var and function decls with the corresponding prevailing def. */
2790
2791 static void
2792 lto_fixup_state (struct lto_in_decl_state *state)
2793 {
2794 unsigned i, si;
2795 struct lto_tree_ref_table *table;
2796
2797 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2798 we still need to walk from all DECLs to find the reachable
2799 FUNCTION_DECLs and VAR_DECLs. */
2800 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2801 {
2802 table = &state->streams[si];
2803 for (i = 0; i < table->size; i++)
2804 {
2805 tree *tp = table->trees + i;
2806 if (VAR_OR_FUNCTION_DECL_P (*tp)
2807 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2808 *tp = lto_symtab_prevailing_decl (*tp);
2809 }
2810 }
2811 }
2812
2813 /* A callback of htab_traverse. Just extracts a state from SLOT
2814 and calls lto_fixup_state. */
2815
2816 static int
2817 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2818 {
2819 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2820 lto_fixup_state (state);
2821 return 1;
2822 }
2823
2824 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2825 prevailing one. */
2826
2827 static void
2828 lto_fixup_decls (struct lto_file_decl_data **files)
2829 {
2830 unsigned int i;
2831 tree t;
2832
2833 if (tree_with_vars)
2834 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2835 lto_fixup_prevailing_decls (t);
2836
2837 for (i = 0; files[i]; i++)
2838 {
2839 struct lto_file_decl_data *file = files[i];
2840 struct lto_in_decl_state *state = file->global_decl_state;
2841 lto_fixup_state (state);
2842
2843 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2844 }
2845 }
2846
2847 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2848
2849 /* Turn file datas for sub files into a single array, so that they look
2850 like separate files for further passes. */
2851
2852 static void
2853 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2854 {
2855 struct lto_file_decl_data *n, *next;
2856 int i, k;
2857
2858 lto_stats.num_input_files = count;
2859 all_file_decl_data
2860 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2861 /* Set the hooks so that all of the ipa passes can read in their data. */
2862 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2863 for (i = 0, k = 0; i < last_file_ix; i++)
2864 {
2865 for (n = orig[i]; n != NULL; n = next)
2866 {
2867 all_file_decl_data[k++] = n;
2868 next = n->next;
2869 n->next = NULL;
2870 }
2871 }
2872 all_file_decl_data[k] = NULL;
2873 gcc_assert (k == count);
2874 }
2875
2876 /* Input file data before flattening (i.e. splitting them to subfiles to support
2877 incremental linking. */
2878 static int real_file_count;
2879 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2880
2881 static void print_lto_report_1 (void);
2882
2883 /* Read all the symbols from the input files FNAMES. NFILES is the
2884 number of files requested in the command line. Instantiate a
2885 global call graph by aggregating all the sub-graphs found in each
2886 file. */
2887
2888 static void
2889 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2890 {
2891 unsigned int i, last_file_ix;
2892 FILE *resolution;
2893 int count = 0;
2894 struct lto_file_decl_data **decl_data;
2895 void **res;
2896 symtab_node *snode;
2897
2898 init_cgraph ();
2899
2900 timevar_push (TV_IPA_LTO_DECL_IN);
2901
2902 real_file_decl_data
2903 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2904 real_file_count = nfiles;
2905
2906 /* Read the resolution file. */
2907 resolution = NULL;
2908 if (resolution_file_name)
2909 {
2910 int t;
2911 unsigned num_objects;
2912
2913 resolution = fopen (resolution_file_name, "r");
2914 if (resolution == NULL)
2915 fatal_error ("could not open symbol resolution file: %m");
2916
2917 t = fscanf (resolution, "%u", &num_objects);
2918 gcc_assert (t == 1);
2919
2920 /* True, since the plugin splits the archives. */
2921 gcc_assert (num_objects == nfiles);
2922 }
2923 cgraph_state = CGRAPH_LTO_STREAMING;
2924
2925 canonical_type_hash_cache = new pointer_map <hashval_t>;
2926 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2927 gimple_canonical_type_eq, 0);
2928 gcc_obstack_init (&tree_scc_hash_obstack);
2929 tree_scc_hash.create (4096);
2930
2931 /* Register the common node types with the canonical type machinery so
2932 we properly share alias-sets across languages and TUs. Do not
2933 expose the common nodes as type merge target - those that should be
2934 are already exposed so by pre-loading the LTO streamer caches.
2935 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2936 for (i = 0; i < itk_none; ++i)
2937 lto_register_canonical_types (integer_types[i], true);
2938 for (i = 0; i < stk_type_kind_last; ++i)
2939 lto_register_canonical_types (sizetype_tab[i], true);
2940 for (i = 0; i < TI_MAX; ++i)
2941 lto_register_canonical_types (global_trees[i], true);
2942 for (i = 0; i < itk_none; ++i)
2943 lto_register_canonical_types (integer_types[i], false);
2944 for (i = 0; i < stk_type_kind_last; ++i)
2945 lto_register_canonical_types (sizetype_tab[i], false);
2946 for (i = 0; i < TI_MAX; ++i)
2947 lto_register_canonical_types (global_trees[i], false);
2948
2949 if (!quiet_flag)
2950 fprintf (stderr, "Reading object files:");
2951
2952 /* Read all of the object files specified on the command line. */
2953 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2954 {
2955 struct lto_file_decl_data *file_data = NULL;
2956 if (!quiet_flag)
2957 {
2958 fprintf (stderr, " %s", fnames[i]);
2959 fflush (stderr);
2960 }
2961
2962 current_lto_file = lto_obj_file_open (fnames[i], false);
2963 if (!current_lto_file)
2964 break;
2965
2966 file_data = lto_file_read (current_lto_file, resolution, &count);
2967 if (!file_data)
2968 {
2969 lto_obj_file_close (current_lto_file);
2970 free (current_lto_file);
2971 current_lto_file = NULL;
2972 break;
2973 }
2974
2975 decl_data[last_file_ix++] = file_data;
2976
2977 lto_obj_file_close (current_lto_file);
2978 free (current_lto_file);
2979 current_lto_file = NULL;
2980 }
2981
2982 lto_flatten_files (decl_data, count, last_file_ix);
2983 lto_stats.num_input_files = count;
2984 ggc_free(decl_data);
2985 real_file_decl_data = NULL;
2986
2987 if (resolution_file_name)
2988 fclose (resolution);
2989
2990 /* Show the LTO report before launching LTRANS. */
2991 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2992 print_lto_report_1 ();
2993
2994 /* Free gimple type merging datastructures. */
2995 tree_scc_hash.dispose ();
2996 obstack_free (&tree_scc_hash_obstack, NULL);
2997 htab_delete (gimple_canonical_types);
2998 gimple_canonical_types = NULL;
2999 delete canonical_type_hash_cache;
3000 canonical_type_hash_cache = NULL;
3001
3002 /* At this stage we know that majority of GGC memory is reachable.
3003 Growing the limits prevents unnecesary invocation of GGC. */
3004 ggc_grow ();
3005 ggc_collect ();
3006
3007 /* Set the hooks so that all of the ipa passes can read in their data. */
3008 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3009
3010 timevar_pop (TV_IPA_LTO_DECL_IN);
3011
3012 if (!quiet_flag)
3013 fprintf (stderr, "\nReading the callgraph\n");
3014
3015 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3016 /* Read the symtab. */
3017 input_symtab ();
3018
3019 /* Store resolutions into the symbol table. */
3020
3021 FOR_EACH_SYMBOL (snode)
3022 if (symtab_real_symbol_p (snode)
3023 && snode->lto_file_data
3024 && snode->lto_file_data->resolution_map
3025 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3026 snode->decl)))
3027 snode->resolution
3028 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3029 for (i = 0; all_file_decl_data[i]; i++)
3030 if (all_file_decl_data[i]->resolution_map)
3031 {
3032 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3033 all_file_decl_data[i]->resolution_map = NULL;
3034 }
3035
3036 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3037
3038 if (!quiet_flag)
3039 fprintf (stderr, "Merging declarations\n");
3040
3041 timevar_push (TV_IPA_LTO_DECL_MERGE);
3042 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3043 need to care about resolving symbols again, we only need to replace
3044 duplicated declarations read from the callgraph and from function
3045 sections. */
3046 if (!flag_ltrans)
3047 {
3048 lto_symtab_merge_decls ();
3049
3050 /* If there were errors during symbol merging bail out, we have no
3051 good way to recover here. */
3052 if (seen_error ())
3053 fatal_error ("errors during merging of translation units");
3054
3055 /* Fixup all decls. */
3056 lto_fixup_decls (all_file_decl_data);
3057 }
3058 if (tree_with_vars)
3059 ggc_free (tree_with_vars);
3060 tree_with_vars = NULL;
3061 ggc_collect ();
3062
3063 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3064 /* Each pass will set the appropriate timer. */
3065
3066 if (!quiet_flag)
3067 fprintf (stderr, "Reading summaries\n");
3068
3069 /* Read the IPA summary data. */
3070 if (flag_ltrans)
3071 ipa_read_optimization_summaries ();
3072 else
3073 ipa_read_summaries ();
3074
3075 for (i = 0; all_file_decl_data[i]; i++)
3076 {
3077 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3078 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3079 all_file_decl_data[i]->symtab_node_encoder = NULL;
3080 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3081 all_file_decl_data[i]->global_decl_state = NULL;
3082 all_file_decl_data[i]->current_decl_state = NULL;
3083 }
3084
3085 /* Finally merge the cgraph according to the decl merging decisions. */
3086 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3087 if (cgraph_dump_file)
3088 {
3089 fprintf (cgraph_dump_file, "Before merging:\n");
3090 dump_symtab (cgraph_dump_file);
3091 }
3092 lto_symtab_merge_symbols ();
3093 ggc_collect ();
3094 cgraph_state = CGRAPH_STATE_IPA_SSA;
3095
3096 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3097
3098 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3099
3100 /* Indicate that the cgraph is built and ready. */
3101 cgraph_function_flags_ready = true;
3102
3103 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3104 ggc_free (all_file_decl_data);
3105 all_file_decl_data = NULL;
3106 }
3107
3108
3109 /* Materialize all the bodies for all the nodes in the callgraph. */
3110
3111 static void
3112 materialize_cgraph (void)
3113 {
3114 struct cgraph_node *node;
3115 timevar_id_t lto_timer;
3116
3117 if (!quiet_flag)
3118 fprintf (stderr,
3119 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3120
3121 /* Now that we have input the cgraph, we need to clear all of the aux
3122 nodes and read the functions if we are not running in WPA mode. */
3123 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3124
3125 FOR_EACH_FUNCTION (node)
3126 {
3127 if (node->lto_file_data)
3128 {
3129 lto_materialize_function (node);
3130 lto_stats.num_input_cgraph_nodes++;
3131 }
3132 }
3133
3134 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3135
3136 /* Start the appropriate timer depending on the mode that we are
3137 operating in. */
3138 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3139 : (flag_ltrans) ? TV_WHOPR_LTRANS
3140 : TV_LTO;
3141 timevar_push (lto_timer);
3142
3143 current_function_decl = NULL;
3144 set_cfun (NULL);
3145
3146 if (!quiet_flag)
3147 fprintf (stderr, "\n");
3148
3149 timevar_pop (lto_timer);
3150 }
3151
3152
3153 /* Show various memory usage statistics related to LTO. */
3154 static void
3155 print_lto_report_1 (void)
3156 {
3157 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3158 fprintf (stderr, "%s statistics\n", pfx);
3159
3160 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3161 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3162 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3163 if (flag_wpa && tree_scc_hash.is_created ())
3164 {
3165 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3166 "collision ratio: %f\n", pfx,
3167 (long) tree_scc_hash.size (),
3168 (long) tree_scc_hash.elements (),
3169 tree_scc_hash.collisions ());
3170 hash_table<tree_scc_hasher>::iterator hiter;
3171 tree_scc *scc, *max_scc = NULL;
3172 unsigned max_length = 0;
3173 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3174 {
3175 unsigned length = 0;
3176 tree_scc *s = scc;
3177 for (; s; s = s->next)
3178 length++;
3179 if (length > max_length)
3180 {
3181 max_length = length;
3182 max_scc = scc;
3183 }
3184 }
3185 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3186 pfx, max_length, max_scc->len);
3187 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3188 num_scc_compares, num_scc_compare_collisions,
3189 num_scc_compare_collisions / (double) num_scc_compares);
3190 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3191 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3192 total_scc_size_merged);
3193 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3194 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3195 pfx, num_prevailing_types, num_type_scc_trees);
3196 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3197 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3198 (long) htab_size (gimple_canonical_types),
3199 (long) htab_elements (gimple_canonical_types),
3200 (long) gimple_canonical_types->searches,
3201 (long) gimple_canonical_types->collisions,
3202 htab_collisions (gimple_canonical_types));
3203 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3204 "%lu elements, %ld searches\n", pfx,
3205 num_canonical_type_hash_entries,
3206 num_canonical_type_hash_queries);
3207 }
3208
3209 print_lto_report (pfx);
3210 }
3211
3212 /* Perform whole program analysis (WPA) on the callgraph and write out the
3213 optimization plan. */
3214
3215 static void
3216 do_whole_program_analysis (void)
3217 {
3218 symtab_node *node;
3219
3220 lto_parallelism = 1;
3221
3222 /* TODO: jobserver communicatoin is not supported, yet. */
3223 if (!strcmp (flag_wpa, "jobserver"))
3224 lto_parallelism = -1;
3225 else
3226 {
3227 lto_parallelism = atoi (flag_wpa);
3228 if (lto_parallelism <= 0)
3229 lto_parallelism = 0;
3230 }
3231
3232 timevar_start (TV_PHASE_OPT_GEN);
3233
3234 /* Note that since we are in WPA mode, materialize_cgraph will not
3235 actually read in all the function bodies. It only materializes
3236 the decls and cgraph nodes so that analysis can be performed. */
3237 materialize_cgraph ();
3238
3239 /* Reading in the cgraph uses different timers, start timing WPA now. */
3240 timevar_push (TV_WHOPR_WPA);
3241
3242 if (pre_ipa_mem_report)
3243 {
3244 fprintf (stderr, "Memory consumption before IPA\n");
3245 dump_memory_report (false);
3246 }
3247
3248 cgraph_function_flags_ready = true;
3249
3250 if (cgraph_dump_file)
3251 dump_symtab (cgraph_dump_file);
3252 bitmap_obstack_initialize (NULL);
3253 cgraph_state = CGRAPH_STATE_IPA_SSA;
3254
3255 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3256 symtab_remove_unreachable_nodes (false, dump_file);
3257
3258 if (cgraph_dump_file)
3259 {
3260 fprintf (cgraph_dump_file, "Optimized ");
3261 dump_symtab (cgraph_dump_file);
3262 }
3263 #ifdef ENABLE_CHECKING
3264 verify_cgraph ();
3265 #endif
3266 bitmap_obstack_release (NULL);
3267
3268 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3269 timevar_pop (TV_WHOPR_WPA);
3270
3271 timevar_push (TV_WHOPR_PARTITIONING);
3272 if (flag_lto_partition == LTO_PARTITION_1TO1)
3273 lto_1_to_1_map ();
3274 else if (flag_lto_partition == LTO_PARTITION_MAX)
3275 lto_max_map ();
3276 else if (flag_lto_partition == LTO_PARTITION_ONE)
3277 lto_balanced_map (1);
3278 else if (flag_lto_partition == LTO_PARTITION_BALANCED)
3279 lto_balanced_map (PARAM_VALUE (PARAM_LTO_PARTITIONS));
3280 else
3281 gcc_unreachable ();
3282
3283 /* Inline summaries are needed for balanced partitioning. Free them now so
3284 the memory can be used for streamer caches. */
3285 inline_free_summary ();
3286
3287 /* AUX pointers are used by partitioning code to bookkeep number of
3288 partitions symbol is in. This is no longer needed. */
3289 FOR_EACH_SYMBOL (node)
3290 node->aux = NULL;
3291
3292 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3293
3294 /* Find out statics that need to be promoted
3295 to globals with hidden visibility because they are accessed from multiple
3296 partitions. */
3297 lto_promote_cross_file_statics ();
3298 timevar_pop (TV_WHOPR_PARTITIONING);
3299
3300 timevar_stop (TV_PHASE_OPT_GEN);
3301
3302 /* Collect a last time - in lto_wpa_write_files we may end up forking
3303 with the idea that this doesn't increase memory usage. So we
3304 absoultely do not want to collect after that. */
3305 ggc_collect ();
3306
3307 timevar_start (TV_PHASE_STREAM_OUT);
3308 if (!quiet_flag)
3309 {
3310 fprintf (stderr, "\nStreaming out");
3311 fflush (stderr);
3312 }
3313 lto_wpa_write_files ();
3314 if (!quiet_flag)
3315 fprintf (stderr, "\n");
3316 timevar_stop (TV_PHASE_STREAM_OUT);
3317
3318 if (post_ipa_mem_report)
3319 {
3320 fprintf (stderr, "Memory consumption after IPA\n");
3321 dump_memory_report (false);
3322 }
3323
3324 /* Show the LTO report before launching LTRANS. */
3325 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3326 print_lto_report_1 ();
3327 if (mem_report_wpa)
3328 dump_memory_report (true);
3329 }
3330
3331
3332 static GTY(()) tree lto_eh_personality_decl;
3333
3334 /* Return the LTO personality function decl. */
3335
3336 tree
3337 lto_eh_personality (void)
3338 {
3339 if (!lto_eh_personality_decl)
3340 {
3341 /* Use the first personality DECL for our personality if we don't
3342 support multiple ones. This ensures that we don't artificially
3343 create the need for them in a single-language program. */
3344 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3345 lto_eh_personality_decl = first_personality_decl;
3346 else
3347 lto_eh_personality_decl = lhd_gcc_personality ();
3348 }
3349
3350 return lto_eh_personality_decl;
3351 }
3352
3353 /* Set the process name based on the LTO mode. */
3354
3355 static void
3356 lto_process_name (void)
3357 {
3358 if (flag_lto)
3359 setproctitle ("lto1-lto");
3360 if (flag_wpa)
3361 setproctitle ("lto1-wpa");
3362 if (flag_ltrans)
3363 setproctitle ("lto1-ltrans");
3364 }
3365
3366
3367 /* Initialize the LTO front end. */
3368
3369 static void
3370 lto_init (void)
3371 {
3372 lto_process_name ();
3373 lto_streamer_hooks_init ();
3374 lto_reader_init ();
3375 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3376 memset (&lto_stats, 0, sizeof (lto_stats));
3377 bitmap_obstack_initialize (NULL);
3378 gimple_register_cfg_hooks ();
3379 }
3380
3381
3382 /* Main entry point for the GIMPLE front end. This front end has
3383 three main personalities:
3384
3385 - LTO (-flto). All the object files on the command line are
3386 loaded in memory and processed as a single translation unit.
3387 This is the traditional link-time optimization behavior.
3388
3389 - WPA (-fwpa). Only the callgraph and summary information for
3390 files in the command file are loaded. A single callgraph
3391 (without function bodies) is instantiated for the whole set of
3392 files. IPA passes are only allowed to analyze the call graph
3393 and make transformation decisions. The callgraph is
3394 partitioned, each partition is written to a new object file
3395 together with the transformation decisions.
3396
3397 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3398 summary files from running again. Since WPA computed summary
3399 information and decided what transformations to apply, LTRANS
3400 simply applies them. */
3401
3402 void
3403 lto_main (void)
3404 {
3405 /* LTO is called as a front end, even though it is not a front end.
3406 Because it is called as a front end, TV_PHASE_PARSING and
3407 TV_PARSE_GLOBAL are active, and we need to turn them off while
3408 doing LTO. Later we turn them back on so they are active up in
3409 toplev.c. */
3410 timevar_pop (TV_PARSE_GLOBAL);
3411 timevar_stop (TV_PHASE_PARSING);
3412
3413 timevar_start (TV_PHASE_SETUP);
3414
3415 /* Initialize the LTO front end. */
3416 lto_init ();
3417
3418 timevar_stop (TV_PHASE_SETUP);
3419 timevar_start (TV_PHASE_STREAM_IN);
3420
3421 /* Read all the symbols and call graph from all the files in the
3422 command line. */
3423 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3424
3425 timevar_stop (TV_PHASE_STREAM_IN);
3426
3427 if (!seen_error ())
3428 {
3429 /* If WPA is enabled analyze the whole call graph and create an
3430 optimization plan. Otherwise, read in all the function
3431 bodies and continue with optimization. */
3432 if (flag_wpa)
3433 do_whole_program_analysis ();
3434 else
3435 {
3436 timevar_start (TV_PHASE_OPT_GEN);
3437
3438 materialize_cgraph ();
3439 if (!flag_ltrans)
3440 lto_promote_statics_nonwpa ();
3441
3442 /* Let the middle end know that we have read and merged all of
3443 the input files. */
3444 compile ();
3445
3446 timevar_stop (TV_PHASE_OPT_GEN);
3447
3448 /* FIXME lto, if the processes spawned by WPA fail, we miss
3449 the chance to print WPA's report, so WPA will call
3450 print_lto_report before launching LTRANS. If LTRANS was
3451 launched directly by the driver we would not need to do
3452 this. */
3453 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3454 print_lto_report_1 ();
3455 }
3456 }
3457
3458 /* Here we make LTO pretend to be a parser. */
3459 timevar_start (TV_PHASE_PARSING);
3460 timevar_push (TV_PARSE_GLOBAL);
3461 }
3462
3463 #include "gt-lto-lto.h"