Create a new header lto-section-names.h
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "lto-section-names.h"
47 #include "tree-streamer.h"
48 #include "splay-tree.h"
49 #include "lto-partition.h"
50 #include "data-streamer.h"
51 #include "context.h"
52 #include "pass_manager.h"
53 #include "ipa-inline.h"
54 #include "params.h"
55
56
57 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
58 static int lto_parallelism;
59
60 static GTY(()) tree first_personality_decl;
61
62 /* Returns a hash code for P. */
63
64 static hashval_t
65 hash_name (const void *p)
66 {
67 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
68 return (hashval_t) htab_hash_string (ds->name);
69 }
70
71
72 /* Returns nonzero if P1 and P2 are equal. */
73
74 static int
75 eq_name (const void *p1, const void *p2)
76 {
77 const struct lto_section_slot *s1 =
78 (const struct lto_section_slot *) p1;
79 const struct lto_section_slot *s2 =
80 (const struct lto_section_slot *) p2;
81
82 return strcmp (s1->name, s2->name) == 0;
83 }
84
85 /* Free lto_section_slot */
86
87 static void
88 free_with_string (void *arg)
89 {
90 struct lto_section_slot *s = (struct lto_section_slot *)arg;
91
92 free (CONST_CAST (char *, s->name));
93 free (arg);
94 }
95
96 /* Create section hash table */
97
98 htab_t
99 lto_obj_create_section_hash_table (void)
100 {
101 return htab_create (37, hash_name, eq_name, free_with_string);
102 }
103
104 /* Delete an allocated integer KEY in the splay tree. */
105
106 static void
107 lto_splay_tree_delete_id (splay_tree_key key)
108 {
109 free ((void *) key);
110 }
111
112 /* Compare splay tree node ids A and B. */
113
114 static int
115 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
116 {
117 unsigned HOST_WIDE_INT ai;
118 unsigned HOST_WIDE_INT bi;
119
120 ai = *(unsigned HOST_WIDE_INT *) a;
121 bi = *(unsigned HOST_WIDE_INT *) b;
122
123 if (ai < bi)
124 return -1;
125 else if (ai > bi)
126 return 1;
127 return 0;
128 }
129
130 /* Look up splay tree node by ID in splay tree T. */
131
132 static splay_tree_node
133 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
134 {
135 return splay_tree_lookup (t, (splay_tree_key) &id);
136 }
137
138 /* Check if KEY has ID. */
139
140 static bool
141 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
142 {
143 return *(unsigned HOST_WIDE_INT *) key == id;
144 }
145
146 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
147 The ID is allocated separately because we need HOST_WIDE_INTs which may
148 be wider than a splay_tree_key. */
149
150 static void
151 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
152 struct lto_file_decl_data *file_data)
153 {
154 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
155 *idp = id;
156 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
157 }
158
159 /* Create a splay tree. */
160
161 static splay_tree
162 lto_splay_tree_new (void)
163 {
164 return splay_tree_new (lto_splay_tree_compare_ids,
165 lto_splay_tree_delete_id,
166 NULL);
167 }
168
169 /* Return true when NODE has a clone that is analyzed (i.e. we need
170 to load its body even if the node itself is not needed). */
171
172 static bool
173 has_analyzed_clone_p (struct cgraph_node *node)
174 {
175 struct cgraph_node *orig = node;
176 node = node->clones;
177 if (node)
178 while (node != orig)
179 {
180 if (node->analyzed)
181 return true;
182 if (node->clones)
183 node = node->clones;
184 else if (node->next_sibling_clone)
185 node = node->next_sibling_clone;
186 else
187 {
188 while (node != orig && !node->next_sibling_clone)
189 node = node->clone_of;
190 if (node != orig)
191 node = node->next_sibling_clone;
192 }
193 }
194 return false;
195 }
196
197 /* Read the function body for the function associated with NODE. */
198
199 static void
200 lto_materialize_function (struct cgraph_node *node)
201 {
202 tree decl;
203
204 decl = node->decl;
205 /* Read in functions with body (analyzed nodes)
206 and also functions that are needed to produce virtual clones. */
207 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
208 || node->used_as_abstract_origin
209 || has_analyzed_clone_p (node))
210 {
211 /* Clones don't need to be read. */
212 if (node->clone_of)
213 return;
214 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
215 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
216 }
217
218 /* Let the middle end know about the function. */
219 rest_of_decl_compilation (decl, 1, 0);
220 }
221
222
223 /* Decode the content of memory pointed to by DATA in the in decl
224 state object STATE. DATA_IN points to a data_in structure for
225 decoding. Return the address after the decoded object in the
226 input. */
227
228 static const uint32_t *
229 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
230 struct lto_in_decl_state *state)
231 {
232 uint32_t ix;
233 tree decl;
234 uint32_t i, j;
235
236 ix = *data++;
237 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
238 if (TREE_CODE (decl) != FUNCTION_DECL)
239 {
240 gcc_assert (decl == void_type_node);
241 decl = NULL_TREE;
242 }
243 state->fn_decl = decl;
244
245 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
246 {
247 uint32_t size = *data++;
248 tree *decls = ggc_vec_alloc<tree> (size);
249
250 for (j = 0; j < size; j++)
251 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
252
253 state->streams[i].size = size;
254 state->streams[i].trees = decls;
255 data += size;
256 }
257
258 return data;
259 }
260
261
262 /* Global canonical type table. */
263 static htab_t gimple_canonical_types;
264 static pointer_map <hashval_t> *canonical_type_hash_cache;
265 static unsigned long num_canonical_type_hash_entries;
266 static unsigned long num_canonical_type_hash_queries;
267
268 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
269 static hashval_t gimple_canonical_type_hash (const void *p);
270 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
271
272 /* Returning a hash value for gimple type TYPE.
273
274 The hash value returned is equal for types considered compatible
275 by gimple_canonical_types_compatible_p. */
276
277 static hashval_t
278 hash_canonical_type (tree type)
279 {
280 hashval_t v;
281
282 /* Combine a few common features of types so that types are grouped into
283 smaller sets; when searching for existing matching types to merge,
284 only existing types having the same features as the new type will be
285 checked. */
286 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
287 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
288
289 /* Incorporate common features of numerical types. */
290 if (INTEGRAL_TYPE_P (type)
291 || SCALAR_FLOAT_TYPE_P (type)
292 || FIXED_POINT_TYPE_P (type)
293 || TREE_CODE (type) == OFFSET_TYPE
294 || POINTER_TYPE_P (type))
295 {
296 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
297 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
298 }
299
300 if (VECTOR_TYPE_P (type))
301 {
302 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
303 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
304 }
305
306 if (TREE_CODE (type) == COMPLEX_TYPE)
307 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
308
309 /* For pointer and reference types, fold in information about the type
310 pointed to but do not recurse to the pointed-to type. */
311 if (POINTER_TYPE_P (type))
312 {
313 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
314 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
315 }
316
317 /* For integer types hash only the string flag. */
318 if (TREE_CODE (type) == INTEGER_TYPE)
319 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
320
321 /* For array types hash the domain bounds and the string flag. */
322 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
323 {
324 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
325 /* OMP lowering can introduce error_mark_node in place of
326 random local decls in types. */
327 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
328 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
329 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
330 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
331 }
332
333 /* Recurse for aggregates with a single element type. */
334 if (TREE_CODE (type) == ARRAY_TYPE
335 || TREE_CODE (type) == COMPLEX_TYPE
336 || TREE_CODE (type) == VECTOR_TYPE)
337 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
338
339 /* Incorporate function return and argument types. */
340 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
341 {
342 unsigned na;
343 tree p;
344
345 /* For method types also incorporate their parent class. */
346 if (TREE_CODE (type) == METHOD_TYPE)
347 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
348
349 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
350
351 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
352 {
353 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
354 na++;
355 }
356
357 v = iterative_hash_hashval_t (na, v);
358 }
359
360 if (RECORD_OR_UNION_TYPE_P (type))
361 {
362 unsigned nf;
363 tree f;
364
365 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
366 if (TREE_CODE (f) == FIELD_DECL)
367 {
368 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
369 nf++;
370 }
371
372 v = iterative_hash_hashval_t (nf, v);
373 }
374
375 return v;
376 }
377
378 /* Returning a hash value for gimple type TYPE combined with VAL. */
379
380 static hashval_t
381 iterative_hash_canonical_type (tree type, hashval_t val)
382 {
383 hashval_t v;
384 /* An already processed type. */
385 if (TYPE_CANONICAL (type))
386 {
387 type = TYPE_CANONICAL (type);
388 v = gimple_canonical_type_hash (type);
389 }
390 else
391 {
392 /* Canonical types should not be able to form SCCs by design, this
393 recursion is just because we do not register canonical types in
394 optimal order. To avoid quadratic behavior also register the
395 type here. */
396 v = hash_canonical_type (type);
397 gimple_register_canonical_type_1 (type, v);
398 }
399 return iterative_hash_hashval_t (v, val);
400 }
401
402 /* Returns the hash for a canonical type P. */
403
404 static hashval_t
405 gimple_canonical_type_hash (const void *p)
406 {
407 num_canonical_type_hash_queries++;
408 hashval_t *slot
409 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
410 gcc_assert (slot != NULL);
411 return *slot;
412 }
413
414
415 /* The TYPE_CANONICAL merging machinery. It should closely resemble
416 the middle-end types_compatible_p function. It needs to avoid
417 claiming types are different for types that should be treated
418 the same with respect to TBAA. Canonical types are also used
419 for IL consistency checks via the useless_type_conversion_p
420 predicate which does not handle all type kinds itself but falls
421 back to pointer-comparison of TYPE_CANONICAL for aggregates
422 for example. */
423
424 /* Return true iff T1 and T2 are structurally identical for what
425 TBAA is concerned. */
426
427 static bool
428 gimple_canonical_types_compatible_p (tree t1, tree t2)
429 {
430 /* Before starting to set up the SCC machinery handle simple cases. */
431
432 /* Check first for the obvious case of pointer identity. */
433 if (t1 == t2)
434 return true;
435
436 /* Check that we have two types to compare. */
437 if (t1 == NULL_TREE || t2 == NULL_TREE)
438 return false;
439
440 /* If the types have been previously registered and found equal
441 they still are. */
442 if (TYPE_CANONICAL (t1)
443 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
444 return true;
445
446 /* Can't be the same type if the types don't have the same code. */
447 if (TREE_CODE (t1) != TREE_CODE (t2))
448 return false;
449
450 /* Qualifiers do not matter for canonical type comparison purposes. */
451
452 /* Void types and nullptr types are always the same. */
453 if (TREE_CODE (t1) == VOID_TYPE
454 || TREE_CODE (t1) == NULLPTR_TYPE)
455 return true;
456
457 /* Can't be the same type if they have different mode. */
458 if (TYPE_MODE (t1) != TYPE_MODE (t2))
459 return false;
460
461 /* Non-aggregate types can be handled cheaply. */
462 if (INTEGRAL_TYPE_P (t1)
463 || SCALAR_FLOAT_TYPE_P (t1)
464 || FIXED_POINT_TYPE_P (t1)
465 || TREE_CODE (t1) == VECTOR_TYPE
466 || TREE_CODE (t1) == COMPLEX_TYPE
467 || TREE_CODE (t1) == OFFSET_TYPE
468 || POINTER_TYPE_P (t1))
469 {
470 /* Can't be the same type if they have different sign or precision. */
471 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
472 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
473 return false;
474
475 if (TREE_CODE (t1) == INTEGER_TYPE
476 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
477 return false;
478
479 /* For canonical type comparisons we do not want to build SCCs
480 so we cannot compare pointed-to types. But we can, for now,
481 require the same pointed-to type kind and match what
482 useless_type_conversion_p would do. */
483 if (POINTER_TYPE_P (t1))
484 {
485 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
486 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
487 return false;
488
489 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
490 return false;
491 }
492
493 /* Tail-recurse to components. */
494 if (TREE_CODE (t1) == VECTOR_TYPE
495 || TREE_CODE (t1) == COMPLEX_TYPE)
496 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
497 TREE_TYPE (t2));
498
499 return true;
500 }
501
502 /* Do type-specific comparisons. */
503 switch (TREE_CODE (t1))
504 {
505 case ARRAY_TYPE:
506 /* Array types are the same if the element types are the same and
507 the number of elements are the same. */
508 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
509 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
510 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
511 return false;
512 else
513 {
514 tree i1 = TYPE_DOMAIN (t1);
515 tree i2 = TYPE_DOMAIN (t2);
516
517 /* For an incomplete external array, the type domain can be
518 NULL_TREE. Check this condition also. */
519 if (i1 == NULL_TREE && i2 == NULL_TREE)
520 return true;
521 else if (i1 == NULL_TREE || i2 == NULL_TREE)
522 return false;
523 else
524 {
525 tree min1 = TYPE_MIN_VALUE (i1);
526 tree min2 = TYPE_MIN_VALUE (i2);
527 tree max1 = TYPE_MAX_VALUE (i1);
528 tree max2 = TYPE_MAX_VALUE (i2);
529
530 /* The minimum/maximum values have to be the same. */
531 if ((min1 == min2
532 || (min1 && min2
533 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
534 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
535 || operand_equal_p (min1, min2, 0))))
536 && (max1 == max2
537 || (max1 && max2
538 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
539 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
540 || operand_equal_p (max1, max2, 0)))))
541 return true;
542 else
543 return false;
544 }
545 }
546
547 case METHOD_TYPE:
548 case FUNCTION_TYPE:
549 /* Function types are the same if the return type and arguments types
550 are the same. */
551 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
552 return false;
553
554 if (!comp_type_attributes (t1, t2))
555 return false;
556
557 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
558 return true;
559 else
560 {
561 tree parms1, parms2;
562
563 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
564 parms1 && parms2;
565 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
566 {
567 if (!gimple_canonical_types_compatible_p
568 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
569 return false;
570 }
571
572 if (parms1 || parms2)
573 return false;
574
575 return true;
576 }
577
578 case RECORD_TYPE:
579 case UNION_TYPE:
580 case QUAL_UNION_TYPE:
581 {
582 tree f1, f2;
583
584 /* For aggregate types, all the fields must be the same. */
585 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
586 f1 || f2;
587 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
588 {
589 /* Skip non-fields. */
590 while (f1 && TREE_CODE (f1) != FIELD_DECL)
591 f1 = TREE_CHAIN (f1);
592 while (f2 && TREE_CODE (f2) != FIELD_DECL)
593 f2 = TREE_CHAIN (f2);
594 if (!f1 || !f2)
595 break;
596 /* The fields must have the same name, offset and type. */
597 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
598 || !gimple_compare_field_offset (f1, f2)
599 || !gimple_canonical_types_compatible_p
600 (TREE_TYPE (f1), TREE_TYPE (f2)))
601 return false;
602 }
603
604 /* If one aggregate has more fields than the other, they
605 are not the same. */
606 if (f1 || f2)
607 return false;
608
609 return true;
610 }
611
612 default:
613 gcc_unreachable ();
614 }
615 }
616
617
618 /* Returns nonzero if P1 and P2 are equal. */
619
620 static int
621 gimple_canonical_type_eq (const void *p1, const void *p2)
622 {
623 const_tree t1 = (const_tree) p1;
624 const_tree t2 = (const_tree) p2;
625 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
626 CONST_CAST_TREE (t2));
627 }
628
629 /* Main worker for gimple_register_canonical_type. */
630
631 static void
632 gimple_register_canonical_type_1 (tree t, hashval_t hash)
633 {
634 void **slot;
635
636 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
637
638 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
639 if (*slot)
640 {
641 tree new_type = (tree)(*slot);
642 gcc_checking_assert (new_type != t);
643 TYPE_CANONICAL (t) = new_type;
644 }
645 else
646 {
647 TYPE_CANONICAL (t) = t;
648 *slot = (void *) t;
649 /* Cache the just computed hash value. */
650 num_canonical_type_hash_entries++;
651 bool existed_p;
652 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
653 gcc_assert (!existed_p);
654 *hslot = hash;
655 }
656 }
657
658 /* Register type T in the global type table gimple_types and set
659 TYPE_CANONICAL of T accordingly.
660 This is used by LTO to merge structurally equivalent types for
661 type-based aliasing purposes across different TUs and languages.
662
663 ??? This merging does not exactly match how the tree.c middle-end
664 functions will assign TYPE_CANONICAL when new types are created
665 during optimization (which at least happens for pointer and array
666 types). */
667
668 static void
669 gimple_register_canonical_type (tree t)
670 {
671 if (TYPE_CANONICAL (t))
672 return;
673
674 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
675 }
676
677 /* Re-compute TYPE_CANONICAL for NODE and related types. */
678
679 static void
680 lto_register_canonical_types (tree node, bool first_p)
681 {
682 if (!node
683 || !TYPE_P (node))
684 return;
685
686 if (first_p)
687 TYPE_CANONICAL (node) = NULL_TREE;
688
689 if (POINTER_TYPE_P (node)
690 || TREE_CODE (node) == COMPLEX_TYPE
691 || TREE_CODE (node) == ARRAY_TYPE)
692 lto_register_canonical_types (TREE_TYPE (node), first_p);
693
694 if (!first_p)
695 gimple_register_canonical_type (node);
696 }
697
698
699 /* Remember trees that contains references to declarations. */
700 static GTY(()) vec <tree, va_gc> *tree_with_vars;
701
702 #define CHECK_VAR(tt) \
703 do \
704 { \
705 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
706 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
707 return true; \
708 } while (0)
709
710 #define CHECK_NO_VAR(tt) \
711 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
712
713 /* Check presence of pointers to decls in fields of a tree_typed T. */
714
715 static inline bool
716 mentions_vars_p_typed (tree t)
717 {
718 CHECK_NO_VAR (TREE_TYPE (t));
719 return false;
720 }
721
722 /* Check presence of pointers to decls in fields of a tree_common T. */
723
724 static inline bool
725 mentions_vars_p_common (tree t)
726 {
727 if (mentions_vars_p_typed (t))
728 return true;
729 CHECK_NO_VAR (TREE_CHAIN (t));
730 return false;
731 }
732
733 /* Check presence of pointers to decls in fields of a decl_minimal T. */
734
735 static inline bool
736 mentions_vars_p_decl_minimal (tree t)
737 {
738 if (mentions_vars_p_common (t))
739 return true;
740 CHECK_NO_VAR (DECL_NAME (t));
741 CHECK_VAR (DECL_CONTEXT (t));
742 return false;
743 }
744
745 /* Check presence of pointers to decls in fields of a decl_common T. */
746
747 static inline bool
748 mentions_vars_p_decl_common (tree t)
749 {
750 if (mentions_vars_p_decl_minimal (t))
751 return true;
752 CHECK_VAR (DECL_SIZE (t));
753 CHECK_VAR (DECL_SIZE_UNIT (t));
754 CHECK_VAR (DECL_INITIAL (t));
755 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
756 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
757 return false;
758 }
759
760 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
761
762 static inline bool
763 mentions_vars_p_decl_with_vis (tree t)
764 {
765 if (mentions_vars_p_decl_common (t))
766 return true;
767
768 /* Accessor macro has side-effects, use field-name here. */
769 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
770 CHECK_NO_VAR (DECL_SECTION_NAME (t));
771 return false;
772 }
773
774 /* Check presence of pointers to decls in fields of a decl_non_common T. */
775
776 static inline bool
777 mentions_vars_p_decl_non_common (tree t)
778 {
779 if (mentions_vars_p_decl_with_vis (t))
780 return true;
781 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
782 CHECK_NO_VAR (DECL_RESULT_FLD (t));
783 CHECK_NO_VAR (DECL_VINDEX (t));
784 return false;
785 }
786
787 /* Check presence of pointers to decls in fields of a decl_non_common T. */
788
789 static bool
790 mentions_vars_p_function (tree t)
791 {
792 if (mentions_vars_p_decl_non_common (t))
793 return true;
794 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
795 return false;
796 }
797
798 /* Check presence of pointers to decls in fields of a field_decl T. */
799
800 static bool
801 mentions_vars_p_field_decl (tree t)
802 {
803 if (mentions_vars_p_decl_common (t))
804 return true;
805 CHECK_VAR (DECL_FIELD_OFFSET (t));
806 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
807 CHECK_NO_VAR (DECL_QUALIFIER (t));
808 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
809 CHECK_NO_VAR (DECL_FCONTEXT (t));
810 return false;
811 }
812
813 /* Check presence of pointers to decls in fields of a type T. */
814
815 static bool
816 mentions_vars_p_type (tree t)
817 {
818 if (mentions_vars_p_common (t))
819 return true;
820 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
821 CHECK_VAR (TYPE_SIZE (t));
822 CHECK_VAR (TYPE_SIZE_UNIT (t));
823 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
824 CHECK_NO_VAR (TYPE_NAME (t));
825
826 CHECK_VAR (TYPE_MINVAL (t));
827 CHECK_VAR (TYPE_MAXVAL (t));
828
829 /* Accessor is for derived node types only. */
830 CHECK_NO_VAR (t->type_non_common.binfo);
831
832 CHECK_VAR (TYPE_CONTEXT (t));
833 CHECK_NO_VAR (TYPE_CANONICAL (t));
834 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
835 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
836 return false;
837 }
838
839 /* Check presence of pointers to decls in fields of a BINFO T. */
840
841 static bool
842 mentions_vars_p_binfo (tree t)
843 {
844 unsigned HOST_WIDE_INT i, n;
845
846 if (mentions_vars_p_common (t))
847 return true;
848 CHECK_VAR (BINFO_VTABLE (t));
849 CHECK_NO_VAR (BINFO_OFFSET (t));
850 CHECK_NO_VAR (BINFO_VIRTUALS (t));
851 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
852 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
853 for (i = 0; i < n; i++)
854 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
855 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
856 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
857 n = BINFO_N_BASE_BINFOS (t);
858 for (i = 0; i < n; i++)
859 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
860 return false;
861 }
862
863 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
864
865 static bool
866 mentions_vars_p_constructor (tree t)
867 {
868 unsigned HOST_WIDE_INT idx;
869 constructor_elt *ce;
870
871 if (mentions_vars_p_typed (t))
872 return true;
873
874 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
875 {
876 CHECK_NO_VAR (ce->index);
877 CHECK_VAR (ce->value);
878 }
879 return false;
880 }
881
882 /* Check presence of pointers to decls in fields of an expression tree T. */
883
884 static bool
885 mentions_vars_p_expr (tree t)
886 {
887 int i;
888 if (mentions_vars_p_typed (t))
889 return true;
890 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
891 CHECK_VAR (TREE_OPERAND (t, i));
892 return false;
893 }
894
895 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
896
897 static bool
898 mentions_vars_p_omp_clause (tree t)
899 {
900 int i;
901 if (mentions_vars_p_common (t))
902 return true;
903 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
904 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
905 return false;
906 }
907
908 /* Check presence of pointers to decls that needs later fixup in T. */
909
910 static bool
911 mentions_vars_p (tree t)
912 {
913 switch (TREE_CODE (t))
914 {
915 case IDENTIFIER_NODE:
916 break;
917
918 case TREE_LIST:
919 CHECK_VAR (TREE_VALUE (t));
920 CHECK_VAR (TREE_PURPOSE (t));
921 CHECK_NO_VAR (TREE_CHAIN (t));
922 break;
923
924 case FIELD_DECL:
925 return mentions_vars_p_field_decl (t);
926
927 case LABEL_DECL:
928 case CONST_DECL:
929 case PARM_DECL:
930 case RESULT_DECL:
931 case IMPORTED_DECL:
932 case NAMESPACE_DECL:
933 case NAMELIST_DECL:
934 return mentions_vars_p_decl_common (t);
935
936 case VAR_DECL:
937 return mentions_vars_p_decl_with_vis (t);
938
939 case TYPE_DECL:
940 return mentions_vars_p_decl_non_common (t);
941
942 case FUNCTION_DECL:
943 return mentions_vars_p_function (t);
944
945 case TREE_BINFO:
946 return mentions_vars_p_binfo (t);
947
948 case PLACEHOLDER_EXPR:
949 return mentions_vars_p_common (t);
950
951 case BLOCK:
952 case TRANSLATION_UNIT_DECL:
953 case OPTIMIZATION_NODE:
954 case TARGET_OPTION_NODE:
955 break;
956
957 case CONSTRUCTOR:
958 return mentions_vars_p_constructor (t);
959
960 case OMP_CLAUSE:
961 return mentions_vars_p_omp_clause (t);
962
963 default:
964 if (TYPE_P (t))
965 {
966 if (mentions_vars_p_type (t))
967 return true;
968 }
969 else if (EXPR_P (t))
970 {
971 if (mentions_vars_p_expr (t))
972 return true;
973 }
974 else if (CONSTANT_CLASS_P (t))
975 CHECK_NO_VAR (TREE_TYPE (t));
976 else
977 gcc_unreachable ();
978 }
979 return false;
980 }
981
982
983 /* Return the resolution for the decl with index INDEX from DATA_IN. */
984
985 static enum ld_plugin_symbol_resolution
986 get_resolution (struct data_in *data_in, unsigned index)
987 {
988 if (data_in->globals_resolution.exists ())
989 {
990 ld_plugin_symbol_resolution_t ret;
991 /* We can have references to not emitted functions in
992 DECL_FUNCTION_PERSONALITY at least. So we can and have
993 to indeed return LDPR_UNKNOWN in some cases. */
994 if (data_in->globals_resolution.length () <= index)
995 return LDPR_UNKNOWN;
996 ret = data_in->globals_resolution[index];
997 return ret;
998 }
999 else
1000 /* Delay resolution finding until decl merging. */
1001 return LDPR_UNKNOWN;
1002 }
1003
1004 /* We need to record resolutions until symbol table is read. */
1005 static void
1006 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1007 enum ld_plugin_symbol_resolution resolution)
1008 {
1009 if (resolution == LDPR_UNKNOWN)
1010 return;
1011 if (!file_data->resolution_map)
1012 file_data->resolution_map = pointer_map_create ();
1013 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1014 }
1015
1016 /* Register DECL with the global symbol table and change its
1017 name if necessary to avoid name clashes for static globals across
1018 different files. */
1019
1020 static void
1021 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1022 unsigned ix)
1023 {
1024 tree context;
1025
1026 /* Variable has file scope, not local. */
1027 if (!TREE_PUBLIC (decl)
1028 && !((context = decl_function_context (decl))
1029 && auto_var_in_fn_p (decl, context)))
1030 rest_of_decl_compilation (decl, 1, 0);
1031
1032 /* If this variable has already been declared, queue the
1033 declaration for merging. */
1034 if (TREE_PUBLIC (decl))
1035 register_resolution (data_in->file_data,
1036 decl, get_resolution (data_in, ix));
1037 }
1038
1039
1040 /* Register DECL with the global symbol table and change its
1041 name if necessary to avoid name clashes for static globals across
1042 different files. DATA_IN contains descriptors and tables for the
1043 file being read. */
1044
1045 static void
1046 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1047 unsigned ix)
1048 {
1049 /* If this variable has already been declared, queue the
1050 declaration for merging. */
1051 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1052 register_resolution (data_in->file_data,
1053 decl, get_resolution (data_in, ix));
1054 }
1055
1056
1057 /* For the type T re-materialize it in the type variant list and
1058 the pointer/reference-to chains. */
1059
1060 static void
1061 lto_fixup_prevailing_type (tree t)
1062 {
1063 /* The following re-creates proper variant lists while fixing up
1064 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1065 variant list state before fixup is broken. */
1066
1067 /* If we are not our own variant leader link us into our new leaders
1068 variant list. */
1069 if (TYPE_MAIN_VARIANT (t) != t)
1070 {
1071 tree mv = TYPE_MAIN_VARIANT (t);
1072 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1073 TYPE_NEXT_VARIANT (mv) = t;
1074 }
1075
1076 /* The following reconstructs the pointer chains
1077 of the new pointed-to type if we are a main variant. We do
1078 not stream those so they are broken before fixup. */
1079 if (TREE_CODE (t) == POINTER_TYPE
1080 && TYPE_MAIN_VARIANT (t) == t)
1081 {
1082 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1083 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1084 }
1085 else if (TREE_CODE (t) == REFERENCE_TYPE
1086 && TYPE_MAIN_VARIANT (t) == t)
1087 {
1088 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1089 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1090 }
1091 }
1092
1093
1094 /* We keep prevailing tree SCCs in a hashtable with manual collision
1095 handling (in case all hashes compare the same) and keep the colliding
1096 entries in the tree_scc->next chain. */
1097
1098 struct tree_scc
1099 {
1100 tree_scc *next;
1101 /* Hash of the whole SCC. */
1102 hashval_t hash;
1103 /* Number of trees in the SCC. */
1104 unsigned len;
1105 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1106 which share the same individual tree hash). */
1107 unsigned entry_len;
1108 /* The members of the SCC.
1109 We only need to remember the first entry node candidate for prevailing
1110 SCCs (but of course have access to all entries for SCCs we are
1111 processing).
1112 ??? For prevailing SCCs we really only need hash and the first
1113 entry candidate, but that's too awkward to implement. */
1114 tree entries[1];
1115 };
1116
1117 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1118 {
1119 typedef tree_scc value_type;
1120 typedef tree_scc compare_type;
1121 static inline hashval_t hash (const value_type *);
1122 static inline bool equal (const value_type *, const compare_type *);
1123 };
1124
1125 hashval_t
1126 tree_scc_hasher::hash (const value_type *scc)
1127 {
1128 return scc->hash;
1129 }
1130
1131 bool
1132 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1133 {
1134 if (scc1->hash != scc2->hash
1135 || scc1->len != scc2->len
1136 || scc1->entry_len != scc2->entry_len)
1137 return false;
1138 return true;
1139 }
1140
1141 static hash_table <tree_scc_hasher> tree_scc_hash;
1142 static struct obstack tree_scc_hash_obstack;
1143
1144 static unsigned long num_merged_types;
1145 static unsigned long num_prevailing_types;
1146 static unsigned long num_type_scc_trees;
1147 static unsigned long total_scc_size;
1148 static unsigned long num_sccs_read;
1149 static unsigned long total_scc_size_merged;
1150 static unsigned long num_sccs_merged;
1151 static unsigned long num_scc_compares;
1152 static unsigned long num_scc_compare_collisions;
1153
1154
1155 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1156 recursing through in-SCC tree edges. Returns true if the SCCs entered
1157 through T1 and T2 are equal and fills in *MAP with the pairs of
1158 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1159
1160 static bool
1161 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1162 {
1163 enum tree_code code;
1164
1165 /* Mark already visited nodes. */
1166 TREE_ASM_WRITTEN (t2) = 1;
1167
1168 /* Push the pair onto map. */
1169 (*map)[0] = t1;
1170 (*map)[1] = t2;
1171 *map = *map + 2;
1172
1173 /* Compare value-fields. */
1174 #define compare_values(X) \
1175 do { \
1176 if (X(t1) != X(t2)) \
1177 return false; \
1178 } while (0)
1179
1180 compare_values (TREE_CODE);
1181 code = TREE_CODE (t1);
1182
1183 if (!TYPE_P (t1))
1184 {
1185 compare_values (TREE_SIDE_EFFECTS);
1186 compare_values (TREE_CONSTANT);
1187 compare_values (TREE_READONLY);
1188 compare_values (TREE_PUBLIC);
1189 }
1190 compare_values (TREE_ADDRESSABLE);
1191 compare_values (TREE_THIS_VOLATILE);
1192 if (DECL_P (t1))
1193 compare_values (DECL_UNSIGNED);
1194 else if (TYPE_P (t1))
1195 compare_values (TYPE_UNSIGNED);
1196 if (TYPE_P (t1))
1197 compare_values (TYPE_ARTIFICIAL);
1198 else
1199 compare_values (TREE_NO_WARNING);
1200 compare_values (TREE_NOTHROW);
1201 compare_values (TREE_STATIC);
1202 if (code != TREE_BINFO)
1203 compare_values (TREE_PRIVATE);
1204 compare_values (TREE_PROTECTED);
1205 compare_values (TREE_DEPRECATED);
1206 if (TYPE_P (t1))
1207 {
1208 compare_values (TYPE_SATURATING);
1209 compare_values (TYPE_ADDR_SPACE);
1210 }
1211 else if (code == SSA_NAME)
1212 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1213
1214 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1215 {
1216 if (!wi::eq_p (t1, t2))
1217 return false;
1218 }
1219
1220 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1221 {
1222 /* ??? No suitable compare routine available. */
1223 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1224 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1225 if (r1.cl != r2.cl
1226 || r1.decimal != r2.decimal
1227 || r1.sign != r2.sign
1228 || r1.signalling != r2.signalling
1229 || r1.canonical != r2.canonical
1230 || r1.uexp != r2.uexp)
1231 return false;
1232 for (unsigned i = 0; i < SIGSZ; ++i)
1233 if (r1.sig[i] != r2.sig[i])
1234 return false;
1235 }
1236
1237 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1238 if (!fixed_compare (EQ_EXPR,
1239 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1240 return false;
1241
1242
1243 /* We don't want to compare locations, so there is nothing do compare
1244 for TS_DECL_MINIMAL. */
1245
1246 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1247 {
1248 compare_values (DECL_MODE);
1249 compare_values (DECL_NONLOCAL);
1250 compare_values (DECL_VIRTUAL_P);
1251 compare_values (DECL_IGNORED_P);
1252 compare_values (DECL_ABSTRACT);
1253 compare_values (DECL_ARTIFICIAL);
1254 compare_values (DECL_USER_ALIGN);
1255 compare_values (DECL_PRESERVE_P);
1256 compare_values (DECL_EXTERNAL);
1257 compare_values (DECL_GIMPLE_REG_P);
1258 compare_values (DECL_ALIGN);
1259 if (code == LABEL_DECL)
1260 {
1261 compare_values (EH_LANDING_PAD_NR);
1262 compare_values (LABEL_DECL_UID);
1263 }
1264 else if (code == FIELD_DECL)
1265 {
1266 compare_values (DECL_PACKED);
1267 compare_values (DECL_NONADDRESSABLE_P);
1268 compare_values (DECL_OFFSET_ALIGN);
1269 }
1270 else if (code == VAR_DECL)
1271 {
1272 compare_values (DECL_HAS_DEBUG_EXPR_P);
1273 compare_values (DECL_NONLOCAL_FRAME);
1274 }
1275 if (code == RESULT_DECL
1276 || code == PARM_DECL
1277 || code == VAR_DECL)
1278 {
1279 compare_values (DECL_BY_REFERENCE);
1280 if (code == VAR_DECL
1281 || code == PARM_DECL)
1282 compare_values (DECL_HAS_VALUE_EXPR_P);
1283 }
1284 }
1285
1286 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1287 compare_values (DECL_REGISTER);
1288
1289 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1290 {
1291 compare_values (DECL_COMMON);
1292 compare_values (DECL_DLLIMPORT_P);
1293 compare_values (DECL_WEAK);
1294 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1295 compare_values (DECL_COMDAT);
1296 compare_values (DECL_VISIBILITY);
1297 compare_values (DECL_VISIBILITY_SPECIFIED);
1298 if (code == VAR_DECL)
1299 {
1300 compare_values (DECL_HARD_REGISTER);
1301 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1302 compare_values (DECL_IN_CONSTANT_POOL);
1303 compare_values (DECL_TLS_MODEL);
1304 }
1305 if (VAR_OR_FUNCTION_DECL_P (t1))
1306 compare_values (DECL_INIT_PRIORITY);
1307 }
1308
1309 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1310 {
1311 compare_values (DECL_BUILT_IN_CLASS);
1312 compare_values (DECL_STATIC_CONSTRUCTOR);
1313 compare_values (DECL_STATIC_DESTRUCTOR);
1314 compare_values (DECL_UNINLINABLE);
1315 compare_values (DECL_POSSIBLY_INLINED);
1316 compare_values (DECL_IS_NOVOPS);
1317 compare_values (DECL_IS_RETURNS_TWICE);
1318 compare_values (DECL_IS_MALLOC);
1319 compare_values (DECL_IS_OPERATOR_NEW);
1320 compare_values (DECL_DECLARED_INLINE_P);
1321 compare_values (DECL_STATIC_CHAIN);
1322 compare_values (DECL_NO_INLINE_WARNING_P);
1323 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1324 compare_values (DECL_NO_LIMIT_STACK);
1325 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1326 compare_values (DECL_PURE_P);
1327 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1328 compare_values (DECL_FINAL_P);
1329 compare_values (DECL_CXX_CONSTRUCTOR_P);
1330 compare_values (DECL_CXX_DESTRUCTOR_P);
1331 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1332 compare_values (DECL_FUNCTION_CODE);
1333 if (DECL_STATIC_DESTRUCTOR (t1))
1334 compare_values (DECL_FINI_PRIORITY);
1335 }
1336
1337 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1338 {
1339 compare_values (TYPE_MODE);
1340 compare_values (TYPE_STRING_FLAG);
1341 compare_values (TYPE_NO_FORCE_BLK);
1342 compare_values (TYPE_NEEDS_CONSTRUCTING);
1343 if (RECORD_OR_UNION_TYPE_P (t1))
1344 {
1345 compare_values (TYPE_TRANSPARENT_AGGR);
1346 compare_values (TYPE_FINAL_P);
1347 }
1348 else if (code == ARRAY_TYPE)
1349 compare_values (TYPE_NONALIASED_COMPONENT);
1350 compare_values (TYPE_PACKED);
1351 compare_values (TYPE_RESTRICT);
1352 compare_values (TYPE_USER_ALIGN);
1353 compare_values (TYPE_READONLY);
1354 compare_values (TYPE_PRECISION);
1355 compare_values (TYPE_ALIGN);
1356 compare_values (TYPE_ALIAS_SET);
1357 }
1358
1359 /* We don't want to compare locations, so there is nothing do compare
1360 for TS_EXP. */
1361
1362 /* BLOCKs are function local and we don't merge anything there, so
1363 simply refuse to merge. */
1364 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1365 return false;
1366
1367 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1368 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1369 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1370 return false;
1371
1372 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1373 gcc_unreachable ();
1374
1375 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1376 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1377 sizeof (struct cl_optimization)) != 0)
1378 return false;
1379
1380 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1381 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1382 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1383 return false;
1384
1385 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1386 compare_values (CONSTRUCTOR_NELTS);
1387
1388 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1389 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1390 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1391 IDENTIFIER_LENGTH (t1)) != 0)
1392 return false;
1393
1394 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1395 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1396 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1397 TREE_STRING_LENGTH (t1)) != 0)
1398 return false;
1399
1400 if (code == OMP_CLAUSE)
1401 {
1402 compare_values (OMP_CLAUSE_CODE);
1403 switch (OMP_CLAUSE_CODE (t1))
1404 {
1405 case OMP_CLAUSE_DEFAULT:
1406 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1407 break;
1408 case OMP_CLAUSE_SCHEDULE:
1409 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1410 break;
1411 case OMP_CLAUSE_DEPEND:
1412 compare_values (OMP_CLAUSE_DEPEND_KIND);
1413 break;
1414 case OMP_CLAUSE_MAP:
1415 compare_values (OMP_CLAUSE_MAP_KIND);
1416 break;
1417 case OMP_CLAUSE_PROC_BIND:
1418 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1419 break;
1420 case OMP_CLAUSE_REDUCTION:
1421 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1422 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1423 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1424 break;
1425 default:
1426 break;
1427 }
1428 }
1429
1430 #undef compare_values
1431
1432
1433 /* Compare pointer fields. */
1434
1435 /* Recurse. Search & Replaced from DFS_write_tree_body.
1436 Folding the early checks into the compare_tree_edges recursion
1437 macro makes debugging way quicker as you are able to break on
1438 compare_tree_sccs_1 and simply finish until a call returns false
1439 to spot the SCC members with the difference. */
1440 #define compare_tree_edges(E1, E2) \
1441 do { \
1442 tree t1_ = (E1), t2_ = (E2); \
1443 if (t1_ != t2_ \
1444 && (!t1_ || !t2_ \
1445 || !TREE_VISITED (t2_) \
1446 || (!TREE_ASM_WRITTEN (t2_) \
1447 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1448 return false; \
1449 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1450 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1451 } while (0)
1452
1453 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1454 {
1455 if (code != IDENTIFIER_NODE)
1456 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1457 }
1458
1459 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1460 {
1461 unsigned i;
1462 /* Note that the number of elements for EXPR has already been emitted
1463 in EXPR's header (see streamer_write_tree_header). */
1464 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1465 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1466 }
1467
1468 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1469 {
1470 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1471 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1472 }
1473
1474 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1475 {
1476 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1477 /* ??? Global decls from different TUs have non-matching
1478 TRANSLATION_UNIT_DECLs. Only consider a small set of
1479 decls equivalent, we should not end up merging others. */
1480 if ((code == TYPE_DECL
1481 || code == NAMESPACE_DECL
1482 || code == IMPORTED_DECL
1483 || code == CONST_DECL
1484 || (VAR_OR_FUNCTION_DECL_P (t1)
1485 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1486 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1487 ;
1488 else
1489 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1490 }
1491
1492 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1493 {
1494 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1495 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1496 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1497 if ((code == VAR_DECL
1498 || code == PARM_DECL)
1499 && DECL_HAS_VALUE_EXPR_P (t1))
1500 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1501 if (code == VAR_DECL
1502 && DECL_HAS_DEBUG_EXPR_P (t1))
1503 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1504 /* LTO specific edges. */
1505 if (code != FUNCTION_DECL
1506 && code != TRANSLATION_UNIT_DECL)
1507 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1508 }
1509
1510 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1511 {
1512 if (code == FUNCTION_DECL)
1513 {
1514 tree a1, a2;
1515 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1516 a1 || a2;
1517 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1518 compare_tree_edges (a1, a2);
1519 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1520 }
1521 else if (code == TYPE_DECL)
1522 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1523 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1524 }
1525
1526 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1527 {
1528 /* Make sure we don't inadvertently set the assembler name. */
1529 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1530 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1531 DECL_ASSEMBLER_NAME (t2));
1532 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
1533 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
1534 }
1535
1536 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1537 {
1538 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1539 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1540 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1541 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1542 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1543 DECL_FIELD_BIT_OFFSET (t2));
1544 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1545 }
1546
1547 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1548 {
1549 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1550 DECL_FUNCTION_PERSONALITY (t2));
1551 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1552 the attribute list instead. */
1553 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1554 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1555 }
1556
1557 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1558 {
1559 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1560 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1561 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1562 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1563 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1564 reconstructed during fixup. */
1565 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1566 during fixup. */
1567 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1568 /* ??? Global types from different TUs have non-matching
1569 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1570 equal. */
1571 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1572 ;
1573 else
1574 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1575 /* TYPE_CANONICAL is re-computed during type merging, so do not
1576 compare it here. */
1577 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1578 }
1579
1580 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1581 {
1582 if (code == ENUMERAL_TYPE)
1583 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1584 else if (code == ARRAY_TYPE)
1585 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1586 else if (RECORD_OR_UNION_TYPE_P (t1))
1587 {
1588 tree f1, f2;
1589 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1590 f1 || f2;
1591 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1592 compare_tree_edges (f1, f2);
1593 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1594 }
1595 else if (code == FUNCTION_TYPE
1596 || code == METHOD_TYPE)
1597 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1598 if (!POINTER_TYPE_P (t1))
1599 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1600 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1601 }
1602
1603 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1604 {
1605 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1606 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1607 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1608 }
1609
1610 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1611 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1612 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1613
1614 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1615 {
1616 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1617 compare_tree_edges (TREE_OPERAND (t1, i),
1618 TREE_OPERAND (t2, i));
1619
1620 /* BLOCKs are function local and we don't merge anything there. */
1621 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1622 return false;
1623 }
1624
1625 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1626 {
1627 unsigned i;
1628 tree t;
1629 /* Lengths have already been compared above. */
1630 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1631 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1632 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1633 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1634 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1635 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1636 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1637 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1638 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1639 }
1640
1641 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1642 {
1643 unsigned i;
1644 tree index, value;
1645 /* Lengths have already been compared above. */
1646 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1647 {
1648 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1649 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1650 }
1651 }
1652
1653 if (code == OMP_CLAUSE)
1654 {
1655 int i;
1656
1657 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1658 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1659 OMP_CLAUSE_OPERAND (t2, i));
1660 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1661 }
1662
1663 #undef compare_tree_edges
1664
1665 return true;
1666 }
1667
1668 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1669 out MAP if they are equal. */
1670
1671 static bool
1672 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1673 tree *map)
1674 {
1675 /* Assume SCC entry hashes are sorted after their cardinality. Which
1676 means we can simply take the first n-tuple of equal hashes
1677 (which is recorded as entry_len) and do n SCC entry candidate
1678 comparisons. */
1679 for (unsigned i = 0; i < pscc->entry_len; ++i)
1680 {
1681 tree *mapp = map;
1682 num_scc_compare_collisions++;
1683 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1684 {
1685 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1686 on the scc as all trees will be freed. */
1687 return true;
1688 }
1689 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1690 the SCC prevails. */
1691 for (unsigned j = 0; j < scc->len; ++j)
1692 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1693 }
1694
1695 return false;
1696 }
1697
1698 /* QSort sort function to sort a map of two pointers after the 2nd
1699 pointer. */
1700
1701 static int
1702 cmp_tree (const void *p1_, const void *p2_)
1703 {
1704 tree *p1 = (tree *)(const_cast<void *>(p1_));
1705 tree *p2 = (tree *)(const_cast<void *>(p2_));
1706 if (p1[1] == p2[1])
1707 return 0;
1708 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1709 }
1710
1711 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1712 hash value SCC_HASH with an already recorded SCC. Return true if
1713 that was successful, otherwise return false. */
1714
1715 static bool
1716 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1717 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1718 {
1719 bool unified_p = false;
1720 tree_scc *scc
1721 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1722 scc->next = NULL;
1723 scc->hash = scc_hash;
1724 scc->len = len;
1725 scc->entry_len = scc_entry_len;
1726 for (unsigned i = 0; i < len; ++i)
1727 {
1728 tree t = streamer_tree_cache_get_tree (cache, from + i);
1729 scc->entries[i] = t;
1730 /* Do not merge SCCs with local entities inside them. Also do
1731 not merge TRANSLATION_UNIT_DECLs. */
1732 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1733 || (VAR_OR_FUNCTION_DECL_P (t)
1734 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1735 || TREE_CODE (t) == LABEL_DECL)
1736 {
1737 /* Avoid doing any work for these cases and do not worry to
1738 record the SCCs for further merging. */
1739 return false;
1740 }
1741 }
1742
1743 /* Look for the list of candidate SCCs to compare against. */
1744 tree_scc **slot;
1745 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1746 if (*slot)
1747 {
1748 /* Try unifying against each candidate. */
1749 num_scc_compares++;
1750
1751 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1752 outside of the scc when following tree edges. Make sure
1753 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1754 to track whether we visited the SCC member during the compare.
1755 We cannot use TREE_VISITED on the pscc members as the extended
1756 scc and pscc can overlap. */
1757 for (unsigned i = 0; i < scc->len; ++i)
1758 {
1759 TREE_VISITED (scc->entries[i]) = 1;
1760 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1761 }
1762
1763 tree *map = XALLOCAVEC (tree, 2 * len);
1764 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1765 {
1766 if (!compare_tree_sccs (pscc, scc, map))
1767 continue;
1768
1769 /* Found an equal SCC. */
1770 unified_p = true;
1771 num_scc_compare_collisions--;
1772 num_sccs_merged++;
1773 total_scc_size_merged += len;
1774
1775 #ifdef ENABLE_CHECKING
1776 for (unsigned i = 0; i < len; ++i)
1777 {
1778 tree t = map[2*i+1];
1779 enum tree_code code = TREE_CODE (t);
1780 /* IDENTIFIER_NODEs should be singletons and are merged by the
1781 streamer. The others should be singletons, too, and we
1782 should not merge them in any way. */
1783 gcc_assert (code != TRANSLATION_UNIT_DECL
1784 && code != IDENTIFIER_NODE
1785 && !streamer_handle_as_builtin_p (t));
1786 }
1787 #endif
1788
1789 /* Fixup the streamer cache with the prevailing nodes according
1790 to the tree node mapping computed by compare_tree_sccs. */
1791 if (len == 1)
1792 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1793 else
1794 {
1795 tree *map2 = XALLOCAVEC (tree, 2 * len);
1796 for (unsigned i = 0; i < len; ++i)
1797 {
1798 map2[i*2] = (tree)(uintptr_t)(from + i);
1799 map2[i*2+1] = scc->entries[i];
1800 }
1801 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1802 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1803 for (unsigned i = 0; i < len; ++i)
1804 streamer_tree_cache_replace_tree (cache, map[2*i],
1805 (uintptr_t)map2[2*i]);
1806 }
1807
1808 /* Free the tree nodes from the read SCC. */
1809 for (unsigned i = 0; i < len; ++i)
1810 {
1811 enum tree_code code;
1812 if (TYPE_P (scc->entries[i]))
1813 num_merged_types++;
1814 code = TREE_CODE (scc->entries[i]);
1815 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1816 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1817 ggc_free (scc->entries[i]);
1818 }
1819
1820 break;
1821 }
1822
1823 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1824 if (!unified_p)
1825 for (unsigned i = 0; i < scc->len; ++i)
1826 TREE_VISITED (scc->entries[i]) = 0;
1827 }
1828
1829 /* If we didn't unify it to any candidate duplicate the relevant
1830 pieces to permanent storage and link it into the chain. */
1831 if (!unified_p)
1832 {
1833 tree_scc *pscc
1834 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1835 memcpy (pscc, scc, sizeof (tree_scc));
1836 pscc->next = (*slot);
1837 *slot = pscc;
1838 }
1839 return unified_p;
1840 }
1841
1842
1843 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1844 RESOLUTIONS is the set of symbols picked by the linker (read from the
1845 resolution file when the linker plugin is being used). */
1846
1847 static void
1848 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1849 vec<ld_plugin_symbol_resolution_t> resolutions)
1850 {
1851 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1852 const int decl_offset = sizeof (struct lto_decl_header);
1853 const int main_offset = decl_offset + header->decl_state_size;
1854 const int string_offset = main_offset + header->main_size;
1855 struct lto_input_block ib_main;
1856 struct data_in *data_in;
1857 unsigned int i;
1858 const uint32_t *data_ptr, *data_end;
1859 uint32_t num_decl_states;
1860
1861 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1862 header->main_size);
1863
1864 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1865 header->string_size, resolutions);
1866
1867 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1868 internal types that should not be merged. */
1869
1870 /* Read the global declarations and types. */
1871 while (ib_main.p < ib_main.len)
1872 {
1873 tree t;
1874 unsigned from = data_in->reader_cache->nodes.length ();
1875 /* Read and uniquify SCCs as in the input stream. */
1876 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1877 if (tag == LTO_tree_scc)
1878 {
1879 unsigned len_;
1880 unsigned scc_entry_len;
1881 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1882 &scc_entry_len);
1883 unsigned len = data_in->reader_cache->nodes.length () - from;
1884 gcc_assert (len == len_);
1885
1886 total_scc_size += len;
1887 num_sccs_read++;
1888
1889 /* We have the special case of size-1 SCCs that are pre-merged
1890 by means of identifier and string sharing for example.
1891 ??? Maybe we should avoid streaming those as SCCs. */
1892 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1893 from);
1894 if (len == 1
1895 && (TREE_CODE (first) == IDENTIFIER_NODE
1896 || TREE_CODE (first) == INTEGER_CST
1897 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1898 || streamer_handle_as_builtin_p (first)))
1899 continue;
1900
1901 /* Try to unify the SCC with already existing ones. */
1902 if (!flag_ltrans
1903 && unify_scc (data_in->reader_cache, from,
1904 len, scc_entry_len, scc_hash))
1905 continue;
1906
1907 /* Do remaining fixup tasks for prevailing nodes. */
1908 bool seen_type = false;
1909 for (unsigned i = 0; i < len; ++i)
1910 {
1911 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1912 from + i);
1913 /* Reconstruct the type variant and pointer-to/reference-to
1914 chains. */
1915 if (TYPE_P (t))
1916 {
1917 seen_type = true;
1918 num_prevailing_types++;
1919 lto_fixup_prevailing_type (t);
1920 }
1921 /* Compute the canonical type of all types.
1922 ??? Should be able to assert that !TYPE_CANONICAL. */
1923 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1924 gimple_register_canonical_type (t);
1925 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1926 type which is also member of this SCC. */
1927 if (TREE_CODE (t) == INTEGER_CST
1928 && !TREE_OVERFLOW (t))
1929 cache_integer_cst (t);
1930 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1931 for both WPA and LTRANS stage. */
1932 if (TREE_CODE (t) == FUNCTION_DECL)
1933 {
1934 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1935 if (attr)
1936 targetm.target_option.valid_attribute_p
1937 (t, NULL_TREE, TREE_VALUE (attr), 0);
1938 }
1939 /* Register TYPE_DECLs with the debuginfo machinery. */
1940 if (!flag_wpa
1941 && TREE_CODE (t) == TYPE_DECL)
1942 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1943 if (!flag_ltrans)
1944 {
1945 /* Register variables and functions with the
1946 symbol table. */
1947 if (TREE_CODE (t) == VAR_DECL)
1948 lto_register_var_decl_in_symtab (data_in, t, from + i);
1949 else if (TREE_CODE (t) == FUNCTION_DECL
1950 && !DECL_BUILT_IN (t))
1951 lto_register_function_decl_in_symtab (data_in, t, from + i);
1952 /* Scan the tree for references to global functions or
1953 variables and record those for later fixup. */
1954 if (mentions_vars_p (t))
1955 vec_safe_push (tree_with_vars, t);
1956 }
1957 }
1958 if (seen_type)
1959 num_type_scc_trees += len;
1960 }
1961 else
1962 {
1963 /* Pickle stray references. */
1964 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1965 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1966 }
1967 }
1968
1969 /* Read in lto_in_decl_state objects. */
1970 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1971 data_end =
1972 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1973 num_decl_states = *data_ptr++;
1974
1975 gcc_assert (num_decl_states > 0);
1976 decl_data->global_decl_state = lto_new_in_decl_state ();
1977 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1978 decl_data->global_decl_state);
1979
1980 /* Read in per-function decl states and enter them in hash table. */
1981 decl_data->function_decl_states =
1982 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1983
1984 for (i = 1; i < num_decl_states; i++)
1985 {
1986 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1987 void **slot;
1988
1989 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1990 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1991 gcc_assert (*slot == NULL);
1992 *slot = state;
1993 }
1994
1995 if (data_ptr != data_end)
1996 internal_error ("bytecode stream: garbage at the end of symbols section");
1997
1998 /* Set the current decl state to be the global state. */
1999 decl_data->current_decl_state = decl_data->global_decl_state;
2000
2001 lto_data_in_delete (data_in);
2002 }
2003
2004 /* Custom version of strtoll, which is not portable. */
2005
2006 static HOST_WIDEST_INT
2007 lto_parse_hex (const char *p)
2008 {
2009 HOST_WIDEST_INT ret = 0;
2010
2011 for (; *p != '\0'; ++p)
2012 {
2013 char c = *p;
2014 unsigned char part;
2015 ret <<= 4;
2016 if (c >= '0' && c <= '9')
2017 part = c - '0';
2018 else if (c >= 'a' && c <= 'f')
2019 part = c - 'a' + 10;
2020 else if (c >= 'A' && c <= 'F')
2021 part = c - 'A' + 10;
2022 else
2023 internal_error ("could not parse hex number");
2024 ret |= part;
2025 }
2026
2027 return ret;
2028 }
2029
2030 /* Read resolution for file named FILE_NAME. The resolution is read from
2031 RESOLUTION. */
2032
2033 static void
2034 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2035 {
2036 /* We require that objects in the resolution file are in the same
2037 order as the lto1 command line. */
2038 unsigned int name_len;
2039 char *obj_name;
2040 unsigned int num_symbols;
2041 unsigned int i;
2042 struct lto_file_decl_data *file_data;
2043 splay_tree_node nd = NULL;
2044
2045 if (!resolution)
2046 return;
2047
2048 name_len = strlen (file->filename);
2049 obj_name = XNEWVEC (char, name_len + 1);
2050 fscanf (resolution, " "); /* Read white space. */
2051
2052 fread (obj_name, sizeof (char), name_len, resolution);
2053 obj_name[name_len] = '\0';
2054 if (filename_cmp (obj_name, file->filename) != 0)
2055 internal_error ("unexpected file name %s in linker resolution file. "
2056 "Expected %s", obj_name, file->filename);
2057 if (file->offset != 0)
2058 {
2059 int t;
2060 char offset_p[17];
2061 HOST_WIDEST_INT offset;
2062 t = fscanf (resolution, "@0x%16s", offset_p);
2063 if (t != 1)
2064 internal_error ("could not parse file offset");
2065 offset = lto_parse_hex (offset_p);
2066 if (offset != file->offset)
2067 internal_error ("unexpected offset");
2068 }
2069
2070 free (obj_name);
2071
2072 fscanf (resolution, "%u", &num_symbols);
2073
2074 for (i = 0; i < num_symbols; i++)
2075 {
2076 int t;
2077 unsigned index;
2078 unsigned HOST_WIDE_INT id;
2079 char r_str[27];
2080 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2081 unsigned int j;
2082 unsigned int lto_resolution_str_len =
2083 sizeof (lto_resolution_str) / sizeof (char *);
2084 res_pair rp;
2085
2086 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2087 &index, &id, r_str);
2088 if (t != 3)
2089 internal_error ("invalid line in the resolution file");
2090
2091 for (j = 0; j < lto_resolution_str_len; j++)
2092 {
2093 if (strcmp (lto_resolution_str[j], r_str) == 0)
2094 {
2095 r = (enum ld_plugin_symbol_resolution) j;
2096 break;
2097 }
2098 }
2099 if (j == lto_resolution_str_len)
2100 internal_error ("invalid resolution in the resolution file");
2101
2102 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2103 {
2104 nd = lto_splay_tree_lookup (file_ids, id);
2105 if (nd == NULL)
2106 internal_error ("resolution sub id %wx not in object file", id);
2107 }
2108
2109 file_data = (struct lto_file_decl_data *)nd->value;
2110 /* The indexes are very sparse. To save memory save them in a compact
2111 format that is only unpacked later when the subfile is processed. */
2112 rp.res = r;
2113 rp.index = index;
2114 file_data->respairs.safe_push (rp);
2115 if (file_data->max_index < index)
2116 file_data->max_index = index;
2117 }
2118 }
2119
2120 /* List of file_decl_datas */
2121 struct file_data_list
2122 {
2123 struct lto_file_decl_data *first, *last;
2124 };
2125
2126 /* Is the name for a id'ed LTO section? */
2127
2128 static int
2129 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2130 {
2131 const char *s;
2132
2133 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2134 return 0;
2135 s = strrchr (name, '.');
2136 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2137 }
2138
2139 /* Create file_data of each sub file id */
2140
2141 static int
2142 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2143 struct file_data_list *list)
2144 {
2145 struct lto_section_slot s_slot, *new_slot;
2146 unsigned HOST_WIDE_INT id;
2147 splay_tree_node nd;
2148 void **hash_slot;
2149 char *new_name;
2150 struct lto_file_decl_data *file_data;
2151
2152 if (!lto_section_with_id (ls->name, &id))
2153 return 1;
2154
2155 /* Find hash table of sub module id */
2156 nd = lto_splay_tree_lookup (file_ids, id);
2157 if (nd != NULL)
2158 {
2159 file_data = (struct lto_file_decl_data *)nd->value;
2160 }
2161 else
2162 {
2163 file_data = ggc_alloc<lto_file_decl_data> ();
2164 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2165 file_data->id = id;
2166 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2167 lto_splay_tree_insert (file_ids, id, file_data);
2168
2169 /* Maintain list in linker order */
2170 if (!list->first)
2171 list->first = file_data;
2172 if (list->last)
2173 list->last->next = file_data;
2174 list->last = file_data;
2175 }
2176
2177 /* Copy section into sub module hash table */
2178 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2179 s_slot.name = new_name;
2180 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2181 gcc_assert (*hash_slot == NULL);
2182
2183 new_slot = XDUP (struct lto_section_slot, ls);
2184 new_slot->name = new_name;
2185 *hash_slot = new_slot;
2186 return 1;
2187 }
2188
2189 /* Read declarations and other initializations for a FILE_DATA. */
2190
2191 static void
2192 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2193 {
2194 const char *data;
2195 size_t len;
2196 vec<ld_plugin_symbol_resolution_t>
2197 resolutions = vNULL;
2198 int i;
2199 res_pair *rp;
2200
2201 /* Create vector for fast access of resolution. We do this lazily
2202 to save memory. */
2203 resolutions.safe_grow_cleared (file_data->max_index + 1);
2204 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2205 resolutions[rp->index] = rp->res;
2206 file_data->respairs.release ();
2207
2208 file_data->renaming_hash_table = lto_create_renaming_table ();
2209 file_data->file_name = file->filename;
2210 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2211 if (data == NULL)
2212 {
2213 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2214 return;
2215 }
2216 /* Frees resolutions */
2217 lto_read_decls (file_data, data, resolutions);
2218 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2219 }
2220
2221 /* Finalize FILE_DATA in FILE and increase COUNT. */
2222
2223 static int
2224 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2225 int *count)
2226 {
2227 lto_file_finalize (file_data, file);
2228 if (cgraph_dump_file)
2229 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2230 file_data->file_name, file_data->id);
2231 (*count)++;
2232 return 0;
2233 }
2234
2235 /* Generate a TREE representation for all types and external decls
2236 entities in FILE.
2237
2238 Read all of the globals out of the file. Then read the cgraph
2239 and process the .o index into the cgraph nodes so that it can open
2240 the .o file to load the functions and ipa information. */
2241
2242 static struct lto_file_decl_data *
2243 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2244 {
2245 struct lto_file_decl_data *file_data = NULL;
2246 splay_tree file_ids;
2247 htab_t section_hash_table;
2248 struct lto_section_slot *section;
2249 struct file_data_list file_list;
2250 struct lto_section_list section_list;
2251
2252 memset (&section_list, 0, sizeof (struct lto_section_list));
2253 section_hash_table = lto_obj_build_section_table (file, &section_list);
2254
2255 /* Find all sub modules in the object and put their sections into new hash
2256 tables in a splay tree. */
2257 file_ids = lto_splay_tree_new ();
2258 memset (&file_list, 0, sizeof (struct file_data_list));
2259 for (section = section_list.first; section != NULL; section = section->next)
2260 create_subid_section_table (section, file_ids, &file_list);
2261
2262 /* Add resolutions to file ids */
2263 lto_resolution_read (file_ids, resolution_file, file);
2264
2265 /* Finalize each lto file for each submodule in the merged object */
2266 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2267 lto_create_files_from_ids (file, file_data, count);
2268
2269 splay_tree_delete (file_ids);
2270 htab_delete (section_hash_table);
2271
2272 return file_list.first;
2273 }
2274
2275 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2276 #define LTO_MMAP_IO 1
2277 #endif
2278
2279 #if LTO_MMAP_IO
2280 /* Page size of machine is used for mmap and munmap calls. */
2281 static size_t page_mask;
2282 #endif
2283
2284 /* Get the section data of length LEN from FILENAME starting at
2285 OFFSET. The data segment must be freed by the caller when the
2286 caller is finished. Returns NULL if all was not well. */
2287
2288 static char *
2289 lto_read_section_data (struct lto_file_decl_data *file_data,
2290 intptr_t offset, size_t len)
2291 {
2292 char *result;
2293 static int fd = -1;
2294 static char *fd_name;
2295 #if LTO_MMAP_IO
2296 intptr_t computed_len;
2297 intptr_t computed_offset;
2298 intptr_t diff;
2299 #endif
2300
2301 /* Keep a single-entry file-descriptor cache. The last file we
2302 touched will get closed at exit.
2303 ??? Eventually we want to add a more sophisticated larger cache
2304 or rather fix function body streaming to not stream them in
2305 practically random order. */
2306 if (fd != -1
2307 && filename_cmp (fd_name, file_data->file_name) != 0)
2308 {
2309 free (fd_name);
2310 close (fd);
2311 fd = -1;
2312 }
2313 if (fd == -1)
2314 {
2315 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2316 if (fd == -1)
2317 {
2318 fatal_error ("Cannot open %s", file_data->file_name);
2319 return NULL;
2320 }
2321 fd_name = xstrdup (file_data->file_name);
2322 }
2323
2324 #if LTO_MMAP_IO
2325 if (!page_mask)
2326 {
2327 size_t page_size = sysconf (_SC_PAGE_SIZE);
2328 page_mask = ~(page_size - 1);
2329 }
2330
2331 computed_offset = offset & page_mask;
2332 diff = offset - computed_offset;
2333 computed_len = len + diff;
2334
2335 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2336 fd, computed_offset);
2337 if (result == MAP_FAILED)
2338 {
2339 fatal_error ("Cannot map %s", file_data->file_name);
2340 return NULL;
2341 }
2342
2343 return result + diff;
2344 #else
2345 result = (char *) xmalloc (len);
2346 if (lseek (fd, offset, SEEK_SET) != offset
2347 || read (fd, result, len) != (ssize_t) len)
2348 {
2349 free (result);
2350 fatal_error ("Cannot read %s", file_data->file_name);
2351 result = NULL;
2352 }
2353 #ifdef __MINGW32__
2354 /* Native windows doesn't supports delayed unlink on opened file. So
2355 we close file here again. This produces higher I/O load, but at least
2356 it prevents to have dangling file handles preventing unlink. */
2357 free (fd_name);
2358 fd_name = NULL;
2359 close (fd);
2360 fd = -1;
2361 #endif
2362 return result;
2363 #endif
2364 }
2365
2366
2367 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2368 NAME will be NULL unless the section type is for a function
2369 body. */
2370
2371 static const char *
2372 get_section_data (struct lto_file_decl_data *file_data,
2373 enum lto_section_type section_type,
2374 const char *name,
2375 size_t *len)
2376 {
2377 htab_t section_hash_table = file_data->section_hash_table;
2378 struct lto_section_slot *f_slot;
2379 struct lto_section_slot s_slot;
2380 const char *section_name = lto_get_section_name (section_type, name, file_data);
2381 char *data = NULL;
2382
2383 *len = 0;
2384 s_slot.name = section_name;
2385 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2386 if (f_slot)
2387 {
2388 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2389 *len = f_slot->len;
2390 }
2391
2392 free (CONST_CAST (char *, section_name));
2393 return data;
2394 }
2395
2396
2397 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2398 starts at OFFSET and has LEN bytes. */
2399
2400 static void
2401 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2402 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2403 const char *name ATTRIBUTE_UNUSED,
2404 const char *offset, size_t len ATTRIBUTE_UNUSED)
2405 {
2406 #if LTO_MMAP_IO
2407 intptr_t computed_len;
2408 intptr_t computed_offset;
2409 intptr_t diff;
2410 #endif
2411
2412 #if LTO_MMAP_IO
2413 computed_offset = ((intptr_t) offset) & page_mask;
2414 diff = (intptr_t) offset - computed_offset;
2415 computed_len = len + diff;
2416
2417 munmap ((caddr_t) computed_offset, computed_len);
2418 #else
2419 free (CONST_CAST(char *, offset));
2420 #endif
2421 }
2422
2423 static lto_file *current_lto_file;
2424
2425 /* Helper for qsort; compare partitions and return one with smaller size.
2426 We sort from greatest to smallest so parallel build doesn't stale on the
2427 longest compilation being executed too late. */
2428
2429 static int
2430 cmp_partitions_size (const void *a, const void *b)
2431 {
2432 const struct ltrans_partition_def *pa
2433 = *(struct ltrans_partition_def *const *)a;
2434 const struct ltrans_partition_def *pb
2435 = *(struct ltrans_partition_def *const *)b;
2436 return pb->insns - pa->insns;
2437 }
2438
2439 /* Helper for qsort; compare partitions and return one with smaller order. */
2440
2441 static int
2442 cmp_partitions_order (const void *a, const void *b)
2443 {
2444 const struct ltrans_partition_def *pa
2445 = *(struct ltrans_partition_def *const *)a;
2446 const struct ltrans_partition_def *pb
2447 = *(struct ltrans_partition_def *const *)b;
2448 int ordera = -1, orderb = -1;
2449
2450 if (lto_symtab_encoder_size (pa->encoder))
2451 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2452 if (lto_symtab_encoder_size (pb->encoder))
2453 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2454 return orderb - ordera;
2455 }
2456
2457 /* Actually stream out ENCODER into TEMP_FILENAME. */
2458
2459 static void
2460 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2461 {
2462 lto_file *file = lto_obj_file_open (temp_filename, true);
2463 if (!file)
2464 fatal_error ("lto_obj_file_open() failed");
2465 lto_set_current_out_file (file);
2466
2467 ipa_write_optimization_summaries (encoder);
2468
2469 lto_set_current_out_file (NULL);
2470 lto_obj_file_close (file);
2471 free (file);
2472 }
2473
2474 /* Wait for forked process and signal errors. */
2475 #ifdef HAVE_WORKING_FORK
2476 static void
2477 wait_for_child ()
2478 {
2479 int status;
2480 do
2481 {
2482 #ifndef WCONTINUED
2483 #define WCONTINUED 0
2484 #endif
2485 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2486 if (w == -1)
2487 fatal_error ("waitpid failed");
2488
2489 if (WIFEXITED (status) && WEXITSTATUS (status))
2490 fatal_error ("streaming subprocess failed");
2491 else if (WIFSIGNALED (status))
2492 fatal_error ("streaming subprocess was killed by signal");
2493 }
2494 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2495 }
2496 #endif
2497
2498 /* Stream out ENCODER into TEMP_FILENAME
2499 Fork if that seems to help. */
2500
2501 static void
2502 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2503 {
2504 #ifdef HAVE_WORKING_FORK
2505 static int nruns;
2506
2507 if (lto_parallelism <= 1)
2508 {
2509 do_stream_out (temp_filename, encoder);
2510 return;
2511 }
2512
2513 /* Do not run more than LTO_PARALLELISM streamings
2514 FIXME: we ignore limits on jobserver. */
2515 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2516 {
2517 wait_for_child ();
2518 nruns --;
2519 }
2520 /* If this is not the last parallel partition, execute new
2521 streaming process. */
2522 if (!last)
2523 {
2524 pid_t cpid = fork ();
2525
2526 if (!cpid)
2527 {
2528 setproctitle ("lto1-wpa-streaming");
2529 do_stream_out (temp_filename, encoder);
2530 exit (0);
2531 }
2532 /* Fork failed; lets do the job ourseleves. */
2533 else if (cpid == -1)
2534 do_stream_out (temp_filename, encoder);
2535 else
2536 nruns++;
2537 }
2538 /* Last partition; stream it and wait for all children to die. */
2539 else
2540 {
2541 int i;
2542 do_stream_out (temp_filename, encoder);
2543 for (i = 0; i < nruns; i++)
2544 wait_for_child ();
2545 }
2546 asm_nodes_output = true;
2547 #else
2548 do_stream_out (temp_filename, encoder);
2549 #endif
2550 }
2551
2552 /* Write all output files in WPA mode and the file with the list of
2553 LTRANS units. */
2554
2555 static void
2556 lto_wpa_write_files (void)
2557 {
2558 unsigned i, n_sets;
2559 ltrans_partition part;
2560 FILE *ltrans_output_list_stream;
2561 char *temp_filename;
2562 vec <char *>temp_filenames = vNULL;
2563 size_t blen;
2564
2565 /* Open the LTRANS output list. */
2566 if (!ltrans_output_list)
2567 fatal_error ("no LTRANS output list filename provided");
2568
2569 timevar_push (TV_WHOPR_WPA);
2570
2571 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2572 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2573
2574 timevar_pop (TV_WHOPR_WPA);
2575
2576 timevar_push (TV_WHOPR_WPA_IO);
2577
2578 /* Generate a prefix for the LTRANS unit files. */
2579 blen = strlen (ltrans_output_list);
2580 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2581 strcpy (temp_filename, ltrans_output_list);
2582 if (blen > sizeof (".out")
2583 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2584 ".out") == 0)
2585 temp_filename[blen - sizeof (".out") + 1] = '\0';
2586 blen = strlen (temp_filename);
2587
2588 n_sets = ltrans_partitions.length ();
2589
2590 /* Sort partitions by size so small ones are compiled last.
2591 FIXME: Even when not reordering we may want to output one list for parallel make
2592 and other for final link command. */
2593
2594 if (!flag_profile_reorder_functions || !flag_profile_use)
2595 ltrans_partitions.qsort (flag_toplevel_reorder
2596 ? cmp_partitions_size
2597 : cmp_partitions_order);
2598
2599 for (i = 0; i < n_sets; i++)
2600 {
2601 ltrans_partition part = ltrans_partitions[i];
2602
2603 /* Write all the nodes in SET. */
2604 sprintf (temp_filename + blen, "%u.o", i);
2605
2606 if (!quiet_flag)
2607 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2608 if (cgraph_dump_file)
2609 {
2610 lto_symtab_encoder_iterator lsei;
2611
2612 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2613 part->name, temp_filename, part->insns);
2614 fprintf (cgraph_dump_file, " Symbols in partition: ");
2615 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2616 lsei_next_in_partition (&lsei))
2617 {
2618 symtab_node *node = lsei_node (lsei);
2619 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2620 }
2621 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2622 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2623 lsei_next (&lsei))
2624 {
2625 symtab_node *node = lsei_node (lsei);
2626 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2627 {
2628 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2629 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2630 if (cnode
2631 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2632 fprintf (cgraph_dump_file, "(body included)");
2633 else
2634 {
2635 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2636 if (vnode
2637 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2638 fprintf (cgraph_dump_file, "(initializer included)");
2639 }
2640 }
2641 }
2642 fprintf (cgraph_dump_file, "\n");
2643 }
2644 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2645
2646 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2647
2648 part->encoder = NULL;
2649
2650 temp_filenames.safe_push (xstrdup (temp_filename));
2651 }
2652 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2653 if (ltrans_output_list_stream == NULL)
2654 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2655 for (i = 0; i < n_sets; i++)
2656 {
2657 unsigned int len = strlen (temp_filenames[i]);
2658 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2659 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2660 fatal_error ("writing to LTRANS output list %s: %m",
2661 ltrans_output_list);
2662 free (temp_filenames[i]);
2663 }
2664 temp_filenames.release();
2665
2666 lto_stats.num_output_files += n_sets;
2667
2668 /* Close the LTRANS output list. */
2669 if (fclose (ltrans_output_list_stream))
2670 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2671
2672 free_ltrans_partitions();
2673 free (temp_filename);
2674
2675 timevar_pop (TV_WHOPR_WPA_IO);
2676 }
2677
2678
2679 /* If TT is a variable or function decl replace it with its
2680 prevailing variant. */
2681 #define LTO_SET_PREVAIL(tt) \
2682 do {\
2683 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2684 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2685 { \
2686 tt = lto_symtab_prevailing_decl (tt); \
2687 fixed = true; \
2688 } \
2689 } while (0)
2690
2691 /* Ensure that TT isn't a replacable var of function decl. */
2692 #define LTO_NO_PREVAIL(tt) \
2693 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2694
2695 /* Given a tree T replace all fields referring to variables or functions
2696 with their prevailing variant. */
2697 static void
2698 lto_fixup_prevailing_decls (tree t)
2699 {
2700 enum tree_code code = TREE_CODE (t);
2701 bool fixed = false;
2702
2703 gcc_checking_assert (code != TREE_BINFO);
2704 LTO_NO_PREVAIL (TREE_TYPE (t));
2705 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2706 LTO_NO_PREVAIL (TREE_CHAIN (t));
2707 if (DECL_P (t))
2708 {
2709 LTO_NO_PREVAIL (DECL_NAME (t));
2710 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2711 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2712 {
2713 LTO_SET_PREVAIL (DECL_SIZE (t));
2714 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2715 LTO_SET_PREVAIL (DECL_INITIAL (t));
2716 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2717 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2718 }
2719 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2720 {
2721 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2722 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2723 }
2724 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2725 {
2726 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2727 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2728 LTO_NO_PREVAIL (DECL_VINDEX (t));
2729 }
2730 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2731 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2732 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2733 {
2734 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2735 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2736 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2737 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2738 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2739 }
2740 }
2741 else if (TYPE_P (t))
2742 {
2743 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2744 LTO_SET_PREVAIL (TYPE_SIZE (t));
2745 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2746 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2747 LTO_NO_PREVAIL (TYPE_NAME (t));
2748
2749 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2750 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2751 LTO_NO_PREVAIL (t->type_non_common.binfo);
2752
2753 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2754
2755 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2756 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2757 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2758 }
2759 else if (EXPR_P (t))
2760 {
2761 int i;
2762 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2763 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2764 }
2765 else if (TREE_CODE (t) == CONSTRUCTOR)
2766 {
2767 unsigned i;
2768 tree val;
2769 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2770 LTO_SET_PREVAIL (val);
2771 }
2772 else
2773 {
2774 switch (code)
2775 {
2776 case TREE_LIST:
2777 LTO_SET_PREVAIL (TREE_VALUE (t));
2778 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2779 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2780 break;
2781 default:
2782 gcc_unreachable ();
2783 }
2784 }
2785 /* If we fixed nothing, then we missed something seen by
2786 mentions_vars_p. */
2787 gcc_checking_assert (fixed);
2788 }
2789 #undef LTO_SET_PREVAIL
2790 #undef LTO_NO_PREVAIL
2791
2792 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2793 replaces var and function decls with the corresponding prevailing def. */
2794
2795 static void
2796 lto_fixup_state (struct lto_in_decl_state *state)
2797 {
2798 unsigned i, si;
2799 struct lto_tree_ref_table *table;
2800
2801 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2802 we still need to walk from all DECLs to find the reachable
2803 FUNCTION_DECLs and VAR_DECLs. */
2804 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2805 {
2806 table = &state->streams[si];
2807 for (i = 0; i < table->size; i++)
2808 {
2809 tree *tp = table->trees + i;
2810 if (VAR_OR_FUNCTION_DECL_P (*tp)
2811 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2812 *tp = lto_symtab_prevailing_decl (*tp);
2813 }
2814 }
2815 }
2816
2817 /* A callback of htab_traverse. Just extracts a state from SLOT
2818 and calls lto_fixup_state. */
2819
2820 static int
2821 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2822 {
2823 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2824 lto_fixup_state (state);
2825 return 1;
2826 }
2827
2828 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2829 prevailing one. */
2830
2831 static void
2832 lto_fixup_decls (struct lto_file_decl_data **files)
2833 {
2834 unsigned int i;
2835 tree t;
2836
2837 if (tree_with_vars)
2838 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2839 lto_fixup_prevailing_decls (t);
2840
2841 for (i = 0; files[i]; i++)
2842 {
2843 struct lto_file_decl_data *file = files[i];
2844 struct lto_in_decl_state *state = file->global_decl_state;
2845 lto_fixup_state (state);
2846
2847 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2848 }
2849 }
2850
2851 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2852
2853 /* Turn file datas for sub files into a single array, so that they look
2854 like separate files for further passes. */
2855
2856 static void
2857 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2858 {
2859 struct lto_file_decl_data *n, *next;
2860 int i, k;
2861
2862 lto_stats.num_input_files = count;
2863 all_file_decl_data
2864 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2865 /* Set the hooks so that all of the ipa passes can read in their data. */
2866 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2867 for (i = 0, k = 0; i < last_file_ix; i++)
2868 {
2869 for (n = orig[i]; n != NULL; n = next)
2870 {
2871 all_file_decl_data[k++] = n;
2872 next = n->next;
2873 n->next = NULL;
2874 }
2875 }
2876 all_file_decl_data[k] = NULL;
2877 gcc_assert (k == count);
2878 }
2879
2880 /* Input file data before flattening (i.e. splitting them to subfiles to support
2881 incremental linking. */
2882 static int real_file_count;
2883 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2884
2885 static void print_lto_report_1 (void);
2886
2887 /* Read all the symbols from the input files FNAMES. NFILES is the
2888 number of files requested in the command line. Instantiate a
2889 global call graph by aggregating all the sub-graphs found in each
2890 file. */
2891
2892 static void
2893 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2894 {
2895 unsigned int i, last_file_ix;
2896 FILE *resolution;
2897 int count = 0;
2898 struct lto_file_decl_data **decl_data;
2899 void **res;
2900 symtab_node *snode;
2901
2902 init_cgraph ();
2903
2904 timevar_push (TV_IPA_LTO_DECL_IN);
2905
2906 real_file_decl_data
2907 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2908 real_file_count = nfiles;
2909
2910 /* Read the resolution file. */
2911 resolution = NULL;
2912 if (resolution_file_name)
2913 {
2914 int t;
2915 unsigned num_objects;
2916
2917 resolution = fopen (resolution_file_name, "r");
2918 if (resolution == NULL)
2919 fatal_error ("could not open symbol resolution file: %m");
2920
2921 t = fscanf (resolution, "%u", &num_objects);
2922 gcc_assert (t == 1);
2923
2924 /* True, since the plugin splits the archives. */
2925 gcc_assert (num_objects == nfiles);
2926 }
2927 cgraph_state = CGRAPH_LTO_STREAMING;
2928
2929 canonical_type_hash_cache = new pointer_map <hashval_t>;
2930 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2931 gimple_canonical_type_eq, 0);
2932 gcc_obstack_init (&tree_scc_hash_obstack);
2933 tree_scc_hash.create (4096);
2934
2935 /* Register the common node types with the canonical type machinery so
2936 we properly share alias-sets across languages and TUs. Do not
2937 expose the common nodes as type merge target - those that should be
2938 are already exposed so by pre-loading the LTO streamer caches.
2939 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2940 for (i = 0; i < itk_none; ++i)
2941 lto_register_canonical_types (integer_types[i], true);
2942 for (i = 0; i < stk_type_kind_last; ++i)
2943 lto_register_canonical_types (sizetype_tab[i], true);
2944 for (i = 0; i < TI_MAX; ++i)
2945 lto_register_canonical_types (global_trees[i], true);
2946 for (i = 0; i < itk_none; ++i)
2947 lto_register_canonical_types (integer_types[i], false);
2948 for (i = 0; i < stk_type_kind_last; ++i)
2949 lto_register_canonical_types (sizetype_tab[i], false);
2950 for (i = 0; i < TI_MAX; ++i)
2951 lto_register_canonical_types (global_trees[i], false);
2952
2953 if (!quiet_flag)
2954 fprintf (stderr, "Reading object files:");
2955
2956 /* Read all of the object files specified on the command line. */
2957 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2958 {
2959 struct lto_file_decl_data *file_data = NULL;
2960 if (!quiet_flag)
2961 {
2962 fprintf (stderr, " %s", fnames[i]);
2963 fflush (stderr);
2964 }
2965
2966 current_lto_file = lto_obj_file_open (fnames[i], false);
2967 if (!current_lto_file)
2968 break;
2969
2970 file_data = lto_file_read (current_lto_file, resolution, &count);
2971 if (!file_data)
2972 {
2973 lto_obj_file_close (current_lto_file);
2974 free (current_lto_file);
2975 current_lto_file = NULL;
2976 break;
2977 }
2978
2979 decl_data[last_file_ix++] = file_data;
2980
2981 lto_obj_file_close (current_lto_file);
2982 free (current_lto_file);
2983 current_lto_file = NULL;
2984 }
2985
2986 lto_flatten_files (decl_data, count, last_file_ix);
2987 lto_stats.num_input_files = count;
2988 ggc_free(decl_data);
2989 real_file_decl_data = NULL;
2990
2991 if (resolution_file_name)
2992 fclose (resolution);
2993
2994 /* Show the LTO report before launching LTRANS. */
2995 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2996 print_lto_report_1 ();
2997
2998 /* Free gimple type merging datastructures. */
2999 tree_scc_hash.dispose ();
3000 obstack_free (&tree_scc_hash_obstack, NULL);
3001 htab_delete (gimple_canonical_types);
3002 gimple_canonical_types = NULL;
3003 delete canonical_type_hash_cache;
3004 canonical_type_hash_cache = NULL;
3005 ggc_collect ();
3006
3007 /* Set the hooks so that all of the ipa passes can read in their data. */
3008 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3009
3010 timevar_pop (TV_IPA_LTO_DECL_IN);
3011
3012 if (!quiet_flag)
3013 fprintf (stderr, "\nReading the callgraph\n");
3014
3015 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3016 /* Read the symtab. */
3017 input_symtab ();
3018
3019 /* Store resolutions into the symbol table. */
3020
3021 FOR_EACH_SYMBOL (snode)
3022 if (symtab_real_symbol_p (snode)
3023 && snode->lto_file_data
3024 && snode->lto_file_data->resolution_map
3025 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3026 snode->decl)))
3027 snode->resolution
3028 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3029 for (i = 0; all_file_decl_data[i]; i++)
3030 if (all_file_decl_data[i]->resolution_map)
3031 {
3032 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3033 all_file_decl_data[i]->resolution_map = NULL;
3034 }
3035
3036 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3037
3038 if (!quiet_flag)
3039 fprintf (stderr, "Merging declarations\n");
3040
3041 timevar_push (TV_IPA_LTO_DECL_MERGE);
3042 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3043 need to care about resolving symbols again, we only need to replace
3044 duplicated declarations read from the callgraph and from function
3045 sections. */
3046 if (!flag_ltrans)
3047 {
3048 lto_symtab_merge_decls ();
3049
3050 /* If there were errors during symbol merging bail out, we have no
3051 good way to recover here. */
3052 if (seen_error ())
3053 fatal_error ("errors during merging of translation units");
3054
3055 /* Fixup all decls. */
3056 lto_fixup_decls (all_file_decl_data);
3057 }
3058 if (tree_with_vars)
3059 ggc_free (tree_with_vars);
3060 tree_with_vars = NULL;
3061 ggc_collect ();
3062
3063 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3064 /* Each pass will set the appropriate timer. */
3065
3066 if (!quiet_flag)
3067 fprintf (stderr, "Reading summaries\n");
3068
3069 /* Read the IPA summary data. */
3070 if (flag_ltrans)
3071 ipa_read_optimization_summaries ();
3072 else
3073 ipa_read_summaries ();
3074
3075 for (i = 0; all_file_decl_data[i]; i++)
3076 {
3077 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3078 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3079 all_file_decl_data[i]->symtab_node_encoder = NULL;
3080 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3081 all_file_decl_data[i]->global_decl_state = NULL;
3082 all_file_decl_data[i]->current_decl_state = NULL;
3083 }
3084
3085 /* Finally merge the cgraph according to the decl merging decisions. */
3086 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3087 if (cgraph_dump_file)
3088 {
3089 fprintf (cgraph_dump_file, "Before merging:\n");
3090 dump_symtab (cgraph_dump_file);
3091 }
3092 lto_symtab_merge_symbols ();
3093 ggc_collect ();
3094 cgraph_state = CGRAPH_STATE_IPA_SSA;
3095
3096 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3097
3098 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3099
3100 /* Indicate that the cgraph is built and ready. */
3101 cgraph_function_flags_ready = true;
3102
3103 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3104 ggc_free (all_file_decl_data);
3105 all_file_decl_data = NULL;
3106 }
3107
3108
3109 /* Materialize all the bodies for all the nodes in the callgraph. */
3110
3111 static void
3112 materialize_cgraph (void)
3113 {
3114 struct cgraph_node *node;
3115 timevar_id_t lto_timer;
3116
3117 if (!quiet_flag)
3118 fprintf (stderr,
3119 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3120
3121 /* Now that we have input the cgraph, we need to clear all of the aux
3122 nodes and read the functions if we are not running in WPA mode. */
3123 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3124
3125 FOR_EACH_FUNCTION (node)
3126 {
3127 if (node->lto_file_data)
3128 {
3129 lto_materialize_function (node);
3130 lto_stats.num_input_cgraph_nodes++;
3131 }
3132 }
3133
3134 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3135
3136 /* Start the appropriate timer depending on the mode that we are
3137 operating in. */
3138 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3139 : (flag_ltrans) ? TV_WHOPR_LTRANS
3140 : TV_LTO;
3141 timevar_push (lto_timer);
3142
3143 current_function_decl = NULL;
3144 set_cfun (NULL);
3145
3146 if (!quiet_flag)
3147 fprintf (stderr, "\n");
3148
3149 timevar_pop (lto_timer);
3150 }
3151
3152
3153 /* Show various memory usage statistics related to LTO. */
3154 static void
3155 print_lto_report_1 (void)
3156 {
3157 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3158 fprintf (stderr, "%s statistics\n", pfx);
3159
3160 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3161 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3162 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3163 if (flag_wpa && tree_scc_hash.is_created ())
3164 {
3165 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3166 "collision ratio: %f\n", pfx,
3167 (long) tree_scc_hash.size (),
3168 (long) tree_scc_hash.elements (),
3169 tree_scc_hash.collisions ());
3170 hash_table<tree_scc_hasher>::iterator hiter;
3171 tree_scc *scc, *max_scc = NULL;
3172 unsigned max_length = 0;
3173 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3174 {
3175 unsigned length = 0;
3176 tree_scc *s = scc;
3177 for (; s; s = s->next)
3178 length++;
3179 if (length > max_length)
3180 {
3181 max_length = length;
3182 max_scc = scc;
3183 }
3184 }
3185 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3186 pfx, max_length, max_scc->len);
3187 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3188 num_scc_compares, num_scc_compare_collisions,
3189 num_scc_compare_collisions / (double) num_scc_compares);
3190 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3191 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3192 total_scc_size_merged);
3193 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3194 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3195 pfx, num_prevailing_types, num_type_scc_trees);
3196 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3197 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3198 (long) htab_size (gimple_canonical_types),
3199 (long) htab_elements (gimple_canonical_types),
3200 (long) gimple_canonical_types->searches,
3201 (long) gimple_canonical_types->collisions,
3202 htab_collisions (gimple_canonical_types));
3203 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3204 "%lu elements, %ld searches\n", pfx,
3205 num_canonical_type_hash_entries,
3206 num_canonical_type_hash_queries);
3207 }
3208
3209 print_lto_report (pfx);
3210 }
3211
3212 /* Perform whole program analysis (WPA) on the callgraph and write out the
3213 optimization plan. */
3214
3215 static void
3216 do_whole_program_analysis (void)
3217 {
3218 symtab_node *node;
3219
3220 lto_parallelism = 1;
3221
3222 /* TODO: jobserver communicatoin is not supported, yet. */
3223 if (!strcmp (flag_wpa, "jobserver"))
3224 lto_parallelism = -1;
3225 else
3226 {
3227 lto_parallelism = atoi (flag_wpa);
3228 if (lto_parallelism <= 0)
3229 lto_parallelism = 0;
3230 }
3231
3232 timevar_start (TV_PHASE_OPT_GEN);
3233
3234 /* Note that since we are in WPA mode, materialize_cgraph will not
3235 actually read in all the function bodies. It only materializes
3236 the decls and cgraph nodes so that analysis can be performed. */
3237 materialize_cgraph ();
3238
3239 /* Reading in the cgraph uses different timers, start timing WPA now. */
3240 timevar_push (TV_WHOPR_WPA);
3241
3242 if (pre_ipa_mem_report)
3243 {
3244 fprintf (stderr, "Memory consumption before IPA\n");
3245 dump_memory_report (false);
3246 }
3247
3248 cgraph_function_flags_ready = true;
3249
3250 if (cgraph_dump_file)
3251 dump_symtab (cgraph_dump_file);
3252 bitmap_obstack_initialize (NULL);
3253 cgraph_state = CGRAPH_STATE_IPA_SSA;
3254
3255 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3256 symtab_remove_unreachable_nodes (false, dump_file);
3257
3258 if (cgraph_dump_file)
3259 {
3260 fprintf (cgraph_dump_file, "Optimized ");
3261 dump_symtab (cgraph_dump_file);
3262 }
3263 #ifdef ENABLE_CHECKING
3264 verify_cgraph ();
3265 #endif
3266 bitmap_obstack_release (NULL);
3267
3268 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3269 timevar_pop (TV_WHOPR_WPA);
3270
3271 timevar_push (TV_WHOPR_PARTITIONING);
3272 if (flag_lto_partition == LTO_PARTITION_1TO1)
3273 lto_1_to_1_map ();
3274 else if (flag_lto_partition == LTO_PARTITION_MAX)
3275 lto_max_map ();
3276 else if (flag_lto_partition == LTO_PARTITION_ONE)
3277 lto_balanced_map (1);
3278 else if (flag_lto_partition == LTO_PARTITION_BALANCED)
3279 lto_balanced_map (PARAM_VALUE (PARAM_LTO_PARTITIONS));
3280 else
3281 gcc_unreachable ();
3282
3283 /* Inline summaries are needed for balanced partitioning. Free them now so
3284 the memory can be used for streamer caches. */
3285 inline_free_summary ();
3286
3287 /* AUX pointers are used by partitioning code to bookkeep number of
3288 partitions symbol is in. This is no longer needed. */
3289 FOR_EACH_SYMBOL (node)
3290 node->aux = NULL;
3291
3292 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3293
3294 /* Find out statics that need to be promoted
3295 to globals with hidden visibility because they are accessed from multiple
3296 partitions. */
3297 lto_promote_cross_file_statics ();
3298 timevar_pop (TV_WHOPR_PARTITIONING);
3299
3300 timevar_stop (TV_PHASE_OPT_GEN);
3301
3302 /* Collect a last time - in lto_wpa_write_files we may end up forking
3303 with the idea that this doesn't increase memory usage. So we
3304 absoultely do not want to collect after that. */
3305 ggc_collect ();
3306
3307 timevar_start (TV_PHASE_STREAM_OUT);
3308 if (!quiet_flag)
3309 {
3310 fprintf (stderr, "\nStreaming out");
3311 fflush (stderr);
3312 }
3313 lto_wpa_write_files ();
3314 if (!quiet_flag)
3315 fprintf (stderr, "\n");
3316 timevar_stop (TV_PHASE_STREAM_OUT);
3317
3318 if (post_ipa_mem_report)
3319 {
3320 fprintf (stderr, "Memory consumption after IPA\n");
3321 dump_memory_report (false);
3322 }
3323
3324 /* Show the LTO report before launching LTRANS. */
3325 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3326 print_lto_report_1 ();
3327 if (mem_report_wpa)
3328 dump_memory_report (true);
3329 }
3330
3331
3332 static GTY(()) tree lto_eh_personality_decl;
3333
3334 /* Return the LTO personality function decl. */
3335
3336 tree
3337 lto_eh_personality (void)
3338 {
3339 if (!lto_eh_personality_decl)
3340 {
3341 /* Use the first personality DECL for our personality if we don't
3342 support multiple ones. This ensures that we don't artificially
3343 create the need for them in a single-language program. */
3344 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3345 lto_eh_personality_decl = first_personality_decl;
3346 else
3347 lto_eh_personality_decl = lhd_gcc_personality ();
3348 }
3349
3350 return lto_eh_personality_decl;
3351 }
3352
3353 /* Set the process name based on the LTO mode. */
3354
3355 static void
3356 lto_process_name (void)
3357 {
3358 if (flag_lto)
3359 setproctitle ("lto1-lto");
3360 if (flag_wpa)
3361 setproctitle ("lto1-wpa");
3362 if (flag_ltrans)
3363 setproctitle ("lto1-ltrans");
3364 }
3365
3366
3367 /* Initialize the LTO front end. */
3368
3369 static void
3370 lto_init (void)
3371 {
3372 lto_process_name ();
3373 lto_streamer_hooks_init ();
3374 lto_reader_init ();
3375 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3376 memset (&lto_stats, 0, sizeof (lto_stats));
3377 bitmap_obstack_initialize (NULL);
3378 gimple_register_cfg_hooks ();
3379 }
3380
3381
3382 /* Main entry point for the GIMPLE front end. This front end has
3383 three main personalities:
3384
3385 - LTO (-flto). All the object files on the command line are
3386 loaded in memory and processed as a single translation unit.
3387 This is the traditional link-time optimization behavior.
3388
3389 - WPA (-fwpa). Only the callgraph and summary information for
3390 files in the command file are loaded. A single callgraph
3391 (without function bodies) is instantiated for the whole set of
3392 files. IPA passes are only allowed to analyze the call graph
3393 and make transformation decisions. The callgraph is
3394 partitioned, each partition is written to a new object file
3395 together with the transformation decisions.
3396
3397 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3398 summary files from running again. Since WPA computed summary
3399 information and decided what transformations to apply, LTRANS
3400 simply applies them. */
3401
3402 void
3403 lto_main (void)
3404 {
3405 /* LTO is called as a front end, even though it is not a front end.
3406 Because it is called as a front end, TV_PHASE_PARSING and
3407 TV_PARSE_GLOBAL are active, and we need to turn them off while
3408 doing LTO. Later we turn them back on so they are active up in
3409 toplev.c. */
3410 timevar_pop (TV_PARSE_GLOBAL);
3411 timevar_stop (TV_PHASE_PARSING);
3412
3413 timevar_start (TV_PHASE_SETUP);
3414
3415 /* Initialize the LTO front end. */
3416 lto_init ();
3417
3418 timevar_stop (TV_PHASE_SETUP);
3419 timevar_start (TV_PHASE_STREAM_IN);
3420
3421 /* Read all the symbols and call graph from all the files in the
3422 command line. */
3423 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3424
3425 timevar_stop (TV_PHASE_STREAM_IN);
3426
3427 if (!seen_error ())
3428 {
3429 /* If WPA is enabled analyze the whole call graph and create an
3430 optimization plan. Otherwise, read in all the function
3431 bodies and continue with optimization. */
3432 if (flag_wpa)
3433 do_whole_program_analysis ();
3434 else
3435 {
3436 timevar_start (TV_PHASE_OPT_GEN);
3437
3438 materialize_cgraph ();
3439 if (!flag_ltrans)
3440 lto_promote_statics_nonwpa ();
3441
3442 /* Let the middle end know that we have read and merged all of
3443 the input files. */
3444 compile ();
3445
3446 timevar_stop (TV_PHASE_OPT_GEN);
3447
3448 /* FIXME lto, if the processes spawned by WPA fail, we miss
3449 the chance to print WPA's report, so WPA will call
3450 print_lto_report before launching LTRANS. If LTRANS was
3451 launched directly by the driver we would not need to do
3452 this. */
3453 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3454 print_lto_report_1 ();
3455 }
3456 }
3457
3458 /* Here we make LTO pretend to be a parser. */
3459 timevar_start (TV_PHASE_PARSING);
3460 timevar_push (TV_PARSE_GLOBAL);
3461 }
3462
3463 #include "gt-lto-lto.h"