* gcc-interface/trans.c (Case_Statement_to_gnu): Revert latest change.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "tree-streamer.h"
47 #include "splay-tree.h"
48 #include "lto-partition.h"
49 #include "data-streamer.h"
50 #include "context.h"
51 #include "pass_manager.h"
52 #include "ipa-inline.h"
53
54
55 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
56 static int lto_parallelism;
57
58 static GTY(()) tree first_personality_decl;
59
60 /* Returns a hash code for P. */
61
62 static hashval_t
63 hash_name (const void *p)
64 {
65 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
66 return (hashval_t) htab_hash_string (ds->name);
67 }
68
69
70 /* Returns nonzero if P1 and P2 are equal. */
71
72 static int
73 eq_name (const void *p1, const void *p2)
74 {
75 const struct lto_section_slot *s1 =
76 (const struct lto_section_slot *) p1;
77 const struct lto_section_slot *s2 =
78 (const struct lto_section_slot *) p2;
79
80 return strcmp (s1->name, s2->name) == 0;
81 }
82
83 /* Free lto_section_slot */
84
85 static void
86 free_with_string (void *arg)
87 {
88 struct lto_section_slot *s = (struct lto_section_slot *)arg;
89
90 free (CONST_CAST (char *, s->name));
91 free (arg);
92 }
93
94 /* Create section hash table */
95
96 htab_t
97 lto_obj_create_section_hash_table (void)
98 {
99 return htab_create (37, hash_name, eq_name, free_with_string);
100 }
101
102 /* Delete an allocated integer KEY in the splay tree. */
103
104 static void
105 lto_splay_tree_delete_id (splay_tree_key key)
106 {
107 free ((void *) key);
108 }
109
110 /* Compare splay tree node ids A and B. */
111
112 static int
113 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
114 {
115 unsigned HOST_WIDE_INT ai;
116 unsigned HOST_WIDE_INT bi;
117
118 ai = *(unsigned HOST_WIDE_INT *) a;
119 bi = *(unsigned HOST_WIDE_INT *) b;
120
121 if (ai < bi)
122 return -1;
123 else if (ai > bi)
124 return 1;
125 return 0;
126 }
127
128 /* Look up splay tree node by ID in splay tree T. */
129
130 static splay_tree_node
131 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
132 {
133 return splay_tree_lookup (t, (splay_tree_key) &id);
134 }
135
136 /* Check if KEY has ID. */
137
138 static bool
139 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
140 {
141 return *(unsigned HOST_WIDE_INT *) key == id;
142 }
143
144 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
145 The ID is allocated separately because we need HOST_WIDE_INTs which may
146 be wider than a splay_tree_key. */
147
148 static void
149 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
150 struct lto_file_decl_data *file_data)
151 {
152 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
153 *idp = id;
154 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
155 }
156
157 /* Create a splay tree. */
158
159 static splay_tree
160 lto_splay_tree_new (void)
161 {
162 return splay_tree_new (lto_splay_tree_compare_ids,
163 lto_splay_tree_delete_id,
164 NULL);
165 }
166
167 /* Return true when NODE has a clone that is analyzed (i.e. we need
168 to load its body even if the node itself is not needed). */
169
170 static bool
171 has_analyzed_clone_p (struct cgraph_node *node)
172 {
173 struct cgraph_node *orig = node;
174 node = node->clones;
175 if (node)
176 while (node != orig)
177 {
178 if (node->analyzed)
179 return true;
180 if (node->clones)
181 node = node->clones;
182 else if (node->next_sibling_clone)
183 node = node->next_sibling_clone;
184 else
185 {
186 while (node != orig && !node->next_sibling_clone)
187 node = node->clone_of;
188 if (node != orig)
189 node = node->next_sibling_clone;
190 }
191 }
192 return false;
193 }
194
195 /* Read the function body for the function associated with NODE. */
196
197 static void
198 lto_materialize_function (struct cgraph_node *node)
199 {
200 tree decl;
201
202 decl = node->decl;
203 /* Read in functions with body (analyzed nodes)
204 and also functions that are needed to produce virtual clones. */
205 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
206 || node->used_as_abstract_origin
207 || has_analyzed_clone_p (node))
208 {
209 /* Clones don't need to be read. */
210 if (node->clone_of)
211 return;
212 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
213 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
214 }
215
216 /* Let the middle end know about the function. */
217 rest_of_decl_compilation (decl, 1, 0);
218 }
219
220
221 /* Decode the content of memory pointed to by DATA in the in decl
222 state object STATE. DATA_IN points to a data_in structure for
223 decoding. Return the address after the decoded object in the
224 input. */
225
226 static const uint32_t *
227 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
228 struct lto_in_decl_state *state)
229 {
230 uint32_t ix;
231 tree decl;
232 uint32_t i, j;
233
234 ix = *data++;
235 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
236 if (TREE_CODE (decl) != FUNCTION_DECL)
237 {
238 gcc_assert (decl == void_type_node);
239 decl = NULL_TREE;
240 }
241 state->fn_decl = decl;
242
243 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
244 {
245 uint32_t size = *data++;
246 tree *decls = ggc_alloc_vec_tree (size);
247
248 for (j = 0; j < size; j++)
249 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
250
251 state->streams[i].size = size;
252 state->streams[i].trees = decls;
253 data += size;
254 }
255
256 return data;
257 }
258
259
260 /* Global canonical type table. */
261 static htab_t gimple_canonical_types;
262 static pointer_map <hashval_t> *canonical_type_hash_cache;
263 static unsigned long num_canonical_type_hash_entries;
264 static unsigned long num_canonical_type_hash_queries;
265
266 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
267 static hashval_t gimple_canonical_type_hash (const void *p);
268 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
269
270 /* Returning a hash value for gimple type TYPE.
271
272 The hash value returned is equal for types considered compatible
273 by gimple_canonical_types_compatible_p. */
274
275 static hashval_t
276 hash_canonical_type (tree type)
277 {
278 hashval_t v;
279
280 /* Combine a few common features of types so that types are grouped into
281 smaller sets; when searching for existing matching types to merge,
282 only existing types having the same features as the new type will be
283 checked. */
284 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
285 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
286
287 /* Incorporate common features of numerical types. */
288 if (INTEGRAL_TYPE_P (type)
289 || SCALAR_FLOAT_TYPE_P (type)
290 || FIXED_POINT_TYPE_P (type)
291 || TREE_CODE (type) == OFFSET_TYPE
292 || POINTER_TYPE_P (type))
293 {
294 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
295 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
296 }
297
298 if (VECTOR_TYPE_P (type))
299 {
300 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
301 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
302 }
303
304 if (TREE_CODE (type) == COMPLEX_TYPE)
305 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
306
307 /* For pointer and reference types, fold in information about the type
308 pointed to but do not recurse to the pointed-to type. */
309 if (POINTER_TYPE_P (type))
310 {
311 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
312 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
313 }
314
315 /* For integer types hash only the string flag. */
316 if (TREE_CODE (type) == INTEGER_TYPE)
317 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
318
319 /* For array types hash the domain bounds and the string flag. */
320 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
321 {
322 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
323 /* OMP lowering can introduce error_mark_node in place of
324 random local decls in types. */
325 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
326 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
327 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
328 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
329 }
330
331 /* Recurse for aggregates with a single element type. */
332 if (TREE_CODE (type) == ARRAY_TYPE
333 || TREE_CODE (type) == COMPLEX_TYPE
334 || TREE_CODE (type) == VECTOR_TYPE)
335 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
336
337 /* Incorporate function return and argument types. */
338 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
339 {
340 unsigned na;
341 tree p;
342
343 /* For method types also incorporate their parent class. */
344 if (TREE_CODE (type) == METHOD_TYPE)
345 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
346
347 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
348
349 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
350 {
351 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
352 na++;
353 }
354
355 v = iterative_hash_hashval_t (na, v);
356 }
357
358 if (RECORD_OR_UNION_TYPE_P (type))
359 {
360 unsigned nf;
361 tree f;
362
363 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
364 if (TREE_CODE (f) == FIELD_DECL)
365 {
366 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
367 nf++;
368 }
369
370 v = iterative_hash_hashval_t (nf, v);
371 }
372
373 return v;
374 }
375
376 /* Returning a hash value for gimple type TYPE combined with VAL. */
377
378 static hashval_t
379 iterative_hash_canonical_type (tree type, hashval_t val)
380 {
381 hashval_t v;
382 /* An already processed type. */
383 if (TYPE_CANONICAL (type))
384 {
385 type = TYPE_CANONICAL (type);
386 v = gimple_canonical_type_hash (type);
387 }
388 else
389 {
390 /* Canonical types should not be able to form SCCs by design, this
391 recursion is just because we do not register canonical types in
392 optimal order. To avoid quadratic behavior also register the
393 type here. */
394 v = hash_canonical_type (type);
395 gimple_register_canonical_type_1 (type, v);
396 }
397 return iterative_hash_hashval_t (v, val);
398 }
399
400 /* Returns the hash for a canonical type P. */
401
402 static hashval_t
403 gimple_canonical_type_hash (const void *p)
404 {
405 num_canonical_type_hash_queries++;
406 hashval_t *slot
407 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
408 gcc_assert (slot != NULL);
409 return *slot;
410 }
411
412
413 /* The TYPE_CANONICAL merging machinery. It should closely resemble
414 the middle-end types_compatible_p function. It needs to avoid
415 claiming types are different for types that should be treated
416 the same with respect to TBAA. Canonical types are also used
417 for IL consistency checks via the useless_type_conversion_p
418 predicate which does not handle all type kinds itself but falls
419 back to pointer-comparison of TYPE_CANONICAL for aggregates
420 for example. */
421
422 /* Return true iff T1 and T2 are structurally identical for what
423 TBAA is concerned. */
424
425 static bool
426 gimple_canonical_types_compatible_p (tree t1, tree t2)
427 {
428 /* Before starting to set up the SCC machinery handle simple cases. */
429
430 /* Check first for the obvious case of pointer identity. */
431 if (t1 == t2)
432 return true;
433
434 /* Check that we have two types to compare. */
435 if (t1 == NULL_TREE || t2 == NULL_TREE)
436 return false;
437
438 /* If the types have been previously registered and found equal
439 they still are. */
440 if (TYPE_CANONICAL (t1)
441 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
442 return true;
443
444 /* Can't be the same type if the types don't have the same code. */
445 if (TREE_CODE (t1) != TREE_CODE (t2))
446 return false;
447
448 /* Qualifiers do not matter for canonical type comparison purposes. */
449
450 /* Void types and nullptr types are always the same. */
451 if (TREE_CODE (t1) == VOID_TYPE
452 || TREE_CODE (t1) == NULLPTR_TYPE)
453 return true;
454
455 /* Can't be the same type if they have different mode. */
456 if (TYPE_MODE (t1) != TYPE_MODE (t2))
457 return false;
458
459 /* Non-aggregate types can be handled cheaply. */
460 if (INTEGRAL_TYPE_P (t1)
461 || SCALAR_FLOAT_TYPE_P (t1)
462 || FIXED_POINT_TYPE_P (t1)
463 || TREE_CODE (t1) == VECTOR_TYPE
464 || TREE_CODE (t1) == COMPLEX_TYPE
465 || TREE_CODE (t1) == OFFSET_TYPE
466 || POINTER_TYPE_P (t1))
467 {
468 /* Can't be the same type if they have different sign or precision. */
469 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
470 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
471 return false;
472
473 if (TREE_CODE (t1) == INTEGER_TYPE
474 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
475 return false;
476
477 /* For canonical type comparisons we do not want to build SCCs
478 so we cannot compare pointed-to types. But we can, for now,
479 require the same pointed-to type kind and match what
480 useless_type_conversion_p would do. */
481 if (POINTER_TYPE_P (t1))
482 {
483 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
484 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
485 return false;
486
487 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
488 return false;
489 }
490
491 /* Tail-recurse to components. */
492 if (TREE_CODE (t1) == VECTOR_TYPE
493 || TREE_CODE (t1) == COMPLEX_TYPE)
494 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
495 TREE_TYPE (t2));
496
497 return true;
498 }
499
500 /* Do type-specific comparisons. */
501 switch (TREE_CODE (t1))
502 {
503 case ARRAY_TYPE:
504 /* Array types are the same if the element types are the same and
505 the number of elements are the same. */
506 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
507 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
508 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
509 return false;
510 else
511 {
512 tree i1 = TYPE_DOMAIN (t1);
513 tree i2 = TYPE_DOMAIN (t2);
514
515 /* For an incomplete external array, the type domain can be
516 NULL_TREE. Check this condition also. */
517 if (i1 == NULL_TREE && i2 == NULL_TREE)
518 return true;
519 else if (i1 == NULL_TREE || i2 == NULL_TREE)
520 return false;
521 else
522 {
523 tree min1 = TYPE_MIN_VALUE (i1);
524 tree min2 = TYPE_MIN_VALUE (i2);
525 tree max1 = TYPE_MAX_VALUE (i1);
526 tree max2 = TYPE_MAX_VALUE (i2);
527
528 /* The minimum/maximum values have to be the same. */
529 if ((min1 == min2
530 || (min1 && min2
531 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
532 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
533 || operand_equal_p (min1, min2, 0))))
534 && (max1 == max2
535 || (max1 && max2
536 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
537 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
538 || operand_equal_p (max1, max2, 0)))))
539 return true;
540 else
541 return false;
542 }
543 }
544
545 case METHOD_TYPE:
546 case FUNCTION_TYPE:
547 /* Function types are the same if the return type and arguments types
548 are the same. */
549 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
550 return false;
551
552 if (!comp_type_attributes (t1, t2))
553 return false;
554
555 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
556 return true;
557 else
558 {
559 tree parms1, parms2;
560
561 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
562 parms1 && parms2;
563 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
564 {
565 if (!gimple_canonical_types_compatible_p
566 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
567 return false;
568 }
569
570 if (parms1 || parms2)
571 return false;
572
573 return true;
574 }
575
576 case RECORD_TYPE:
577 case UNION_TYPE:
578 case QUAL_UNION_TYPE:
579 {
580 tree f1, f2;
581
582 /* For aggregate types, all the fields must be the same. */
583 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
584 f1 || f2;
585 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
586 {
587 /* Skip non-fields. */
588 while (f1 && TREE_CODE (f1) != FIELD_DECL)
589 f1 = TREE_CHAIN (f1);
590 while (f2 && TREE_CODE (f2) != FIELD_DECL)
591 f2 = TREE_CHAIN (f2);
592 if (!f1 || !f2)
593 break;
594 /* The fields must have the same name, offset and type. */
595 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
596 || !gimple_compare_field_offset (f1, f2)
597 || !gimple_canonical_types_compatible_p
598 (TREE_TYPE (f1), TREE_TYPE (f2)))
599 return false;
600 }
601
602 /* If one aggregate has more fields than the other, they
603 are not the same. */
604 if (f1 || f2)
605 return false;
606
607 return true;
608 }
609
610 default:
611 gcc_unreachable ();
612 }
613 }
614
615
616 /* Returns nonzero if P1 and P2 are equal. */
617
618 static int
619 gimple_canonical_type_eq (const void *p1, const void *p2)
620 {
621 const_tree t1 = (const_tree) p1;
622 const_tree t2 = (const_tree) p2;
623 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
624 CONST_CAST_TREE (t2));
625 }
626
627 /* Main worker for gimple_register_canonical_type. */
628
629 static void
630 gimple_register_canonical_type_1 (tree t, hashval_t hash)
631 {
632 void **slot;
633
634 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
635
636 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
637 if (*slot)
638 {
639 tree new_type = (tree)(*slot);
640 gcc_checking_assert (new_type != t);
641 TYPE_CANONICAL (t) = new_type;
642 }
643 else
644 {
645 TYPE_CANONICAL (t) = t;
646 *slot = (void *) t;
647 /* Cache the just computed hash value. */
648 num_canonical_type_hash_entries++;
649 bool existed_p;
650 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
651 gcc_assert (!existed_p);
652 *hslot = hash;
653 }
654 }
655
656 /* Register type T in the global type table gimple_types and set
657 TYPE_CANONICAL of T accordingly.
658 This is used by LTO to merge structurally equivalent types for
659 type-based aliasing purposes across different TUs and languages.
660
661 ??? This merging does not exactly match how the tree.c middle-end
662 functions will assign TYPE_CANONICAL when new types are created
663 during optimization (which at least happens for pointer and array
664 types). */
665
666 static void
667 gimple_register_canonical_type (tree t)
668 {
669 if (TYPE_CANONICAL (t))
670 return;
671
672 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
673 }
674
675 /* Re-compute TYPE_CANONICAL for NODE and related types. */
676
677 static void
678 lto_register_canonical_types (tree node, bool first_p)
679 {
680 if (!node
681 || !TYPE_P (node))
682 return;
683
684 if (first_p)
685 TYPE_CANONICAL (node) = NULL_TREE;
686
687 if (POINTER_TYPE_P (node)
688 || TREE_CODE (node) == COMPLEX_TYPE
689 || TREE_CODE (node) == ARRAY_TYPE)
690 lto_register_canonical_types (TREE_TYPE (node), first_p);
691
692 if (!first_p)
693 gimple_register_canonical_type (node);
694 }
695
696
697 /* Remember trees that contains references to declarations. */
698 static GTY(()) vec <tree, va_gc> *tree_with_vars;
699
700 #define CHECK_VAR(tt) \
701 do \
702 { \
703 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
704 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
705 return true; \
706 } while (0)
707
708 #define CHECK_NO_VAR(tt) \
709 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
710
711 /* Check presence of pointers to decls in fields of a tree_typed T. */
712
713 static inline bool
714 mentions_vars_p_typed (tree t)
715 {
716 CHECK_NO_VAR (TREE_TYPE (t));
717 return false;
718 }
719
720 /* Check presence of pointers to decls in fields of a tree_common T. */
721
722 static inline bool
723 mentions_vars_p_common (tree t)
724 {
725 if (mentions_vars_p_typed (t))
726 return true;
727 CHECK_NO_VAR (TREE_CHAIN (t));
728 return false;
729 }
730
731 /* Check presence of pointers to decls in fields of a decl_minimal T. */
732
733 static inline bool
734 mentions_vars_p_decl_minimal (tree t)
735 {
736 if (mentions_vars_p_common (t))
737 return true;
738 CHECK_NO_VAR (DECL_NAME (t));
739 CHECK_VAR (DECL_CONTEXT (t));
740 return false;
741 }
742
743 /* Check presence of pointers to decls in fields of a decl_common T. */
744
745 static inline bool
746 mentions_vars_p_decl_common (tree t)
747 {
748 if (mentions_vars_p_decl_minimal (t))
749 return true;
750 CHECK_VAR (DECL_SIZE (t));
751 CHECK_VAR (DECL_SIZE_UNIT (t));
752 CHECK_VAR (DECL_INITIAL (t));
753 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
754 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
755 return false;
756 }
757
758 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
759
760 static inline bool
761 mentions_vars_p_decl_with_vis (tree t)
762 {
763 if (mentions_vars_p_decl_common (t))
764 return true;
765
766 /* Accessor macro has side-effects, use field-name here. */
767 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
768 CHECK_NO_VAR (DECL_SECTION_NAME (t));
769 return false;
770 }
771
772 /* Check presence of pointers to decls in fields of a decl_non_common T. */
773
774 static inline bool
775 mentions_vars_p_decl_non_common (tree t)
776 {
777 if (mentions_vars_p_decl_with_vis (t))
778 return true;
779 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
780 CHECK_NO_VAR (DECL_RESULT_FLD (t));
781 CHECK_NO_VAR (DECL_VINDEX (t));
782 return false;
783 }
784
785 /* Check presence of pointers to decls in fields of a decl_non_common T. */
786
787 static bool
788 mentions_vars_p_function (tree t)
789 {
790 if (mentions_vars_p_decl_non_common (t))
791 return true;
792 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
793 return false;
794 }
795
796 /* Check presence of pointers to decls in fields of a field_decl T. */
797
798 static bool
799 mentions_vars_p_field_decl (tree t)
800 {
801 if (mentions_vars_p_decl_common (t))
802 return true;
803 CHECK_VAR (DECL_FIELD_OFFSET (t));
804 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
805 CHECK_NO_VAR (DECL_QUALIFIER (t));
806 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
807 CHECK_NO_VAR (DECL_FCONTEXT (t));
808 return false;
809 }
810
811 /* Check presence of pointers to decls in fields of a type T. */
812
813 static bool
814 mentions_vars_p_type (tree t)
815 {
816 if (mentions_vars_p_common (t))
817 return true;
818 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
819 CHECK_VAR (TYPE_SIZE (t));
820 CHECK_VAR (TYPE_SIZE_UNIT (t));
821 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
822 CHECK_NO_VAR (TYPE_NAME (t));
823
824 CHECK_VAR (TYPE_MINVAL (t));
825 CHECK_VAR (TYPE_MAXVAL (t));
826
827 /* Accessor is for derived node types only. */
828 CHECK_NO_VAR (t->type_non_common.binfo);
829
830 CHECK_VAR (TYPE_CONTEXT (t));
831 CHECK_NO_VAR (TYPE_CANONICAL (t));
832 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
833 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
834 return false;
835 }
836
837 /* Check presence of pointers to decls in fields of a BINFO T. */
838
839 static bool
840 mentions_vars_p_binfo (tree t)
841 {
842 unsigned HOST_WIDE_INT i, n;
843
844 if (mentions_vars_p_common (t))
845 return true;
846 CHECK_VAR (BINFO_VTABLE (t));
847 CHECK_NO_VAR (BINFO_OFFSET (t));
848 CHECK_NO_VAR (BINFO_VIRTUALS (t));
849 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
850 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
851 for (i = 0; i < n; i++)
852 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
853 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
854 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
855 n = BINFO_N_BASE_BINFOS (t);
856 for (i = 0; i < n; i++)
857 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
858 return false;
859 }
860
861 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
862
863 static bool
864 mentions_vars_p_constructor (tree t)
865 {
866 unsigned HOST_WIDE_INT idx;
867 constructor_elt *ce;
868
869 if (mentions_vars_p_typed (t))
870 return true;
871
872 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
873 {
874 CHECK_NO_VAR (ce->index);
875 CHECK_VAR (ce->value);
876 }
877 return false;
878 }
879
880 /* Check presence of pointers to decls in fields of an expression tree T. */
881
882 static bool
883 mentions_vars_p_expr (tree t)
884 {
885 int i;
886 if (mentions_vars_p_typed (t))
887 return true;
888 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
889 CHECK_VAR (TREE_OPERAND (t, i));
890 return false;
891 }
892
893 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
894
895 static bool
896 mentions_vars_p_omp_clause (tree t)
897 {
898 int i;
899 if (mentions_vars_p_common (t))
900 return true;
901 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
902 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
903 return false;
904 }
905
906 /* Check presence of pointers to decls that needs later fixup in T. */
907
908 static bool
909 mentions_vars_p (tree t)
910 {
911 switch (TREE_CODE (t))
912 {
913 case IDENTIFIER_NODE:
914 break;
915
916 case TREE_LIST:
917 CHECK_VAR (TREE_VALUE (t));
918 CHECK_VAR (TREE_PURPOSE (t));
919 CHECK_NO_VAR (TREE_CHAIN (t));
920 break;
921
922 case FIELD_DECL:
923 return mentions_vars_p_field_decl (t);
924
925 case LABEL_DECL:
926 case CONST_DECL:
927 case PARM_DECL:
928 case RESULT_DECL:
929 case IMPORTED_DECL:
930 case NAMESPACE_DECL:
931 case NAMELIST_DECL:
932 return mentions_vars_p_decl_common (t);
933
934 case VAR_DECL:
935 return mentions_vars_p_decl_with_vis (t);
936
937 case TYPE_DECL:
938 return mentions_vars_p_decl_non_common (t);
939
940 case FUNCTION_DECL:
941 return mentions_vars_p_function (t);
942
943 case TREE_BINFO:
944 return mentions_vars_p_binfo (t);
945
946 case PLACEHOLDER_EXPR:
947 return mentions_vars_p_common (t);
948
949 case BLOCK:
950 case TRANSLATION_UNIT_DECL:
951 case OPTIMIZATION_NODE:
952 case TARGET_OPTION_NODE:
953 break;
954
955 case CONSTRUCTOR:
956 return mentions_vars_p_constructor (t);
957
958 case OMP_CLAUSE:
959 return mentions_vars_p_omp_clause (t);
960
961 default:
962 if (TYPE_P (t))
963 {
964 if (mentions_vars_p_type (t))
965 return true;
966 }
967 else if (EXPR_P (t))
968 {
969 if (mentions_vars_p_expr (t))
970 return true;
971 }
972 else if (CONSTANT_CLASS_P (t))
973 CHECK_NO_VAR (TREE_TYPE (t));
974 else
975 gcc_unreachable ();
976 }
977 return false;
978 }
979
980
981 /* Return the resolution for the decl with index INDEX from DATA_IN. */
982
983 static enum ld_plugin_symbol_resolution
984 get_resolution (struct data_in *data_in, unsigned index)
985 {
986 if (data_in->globals_resolution.exists ())
987 {
988 ld_plugin_symbol_resolution_t ret;
989 /* We can have references to not emitted functions in
990 DECL_FUNCTION_PERSONALITY at least. So we can and have
991 to indeed return LDPR_UNKNOWN in some cases. */
992 if (data_in->globals_resolution.length () <= index)
993 return LDPR_UNKNOWN;
994 ret = data_in->globals_resolution[index];
995 return ret;
996 }
997 else
998 /* Delay resolution finding until decl merging. */
999 return LDPR_UNKNOWN;
1000 }
1001
1002 /* We need to record resolutions until symbol table is read. */
1003 static void
1004 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1005 enum ld_plugin_symbol_resolution resolution)
1006 {
1007 if (resolution == LDPR_UNKNOWN)
1008 return;
1009 if (!file_data->resolution_map)
1010 file_data->resolution_map = pointer_map_create ();
1011 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1012 }
1013
1014 /* Register DECL with the global symbol table and change its
1015 name if necessary to avoid name clashes for static globals across
1016 different files. */
1017
1018 static void
1019 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1020 unsigned ix)
1021 {
1022 tree context;
1023
1024 /* Variable has file scope, not local. */
1025 if (!TREE_PUBLIC (decl)
1026 && !((context = decl_function_context (decl))
1027 && auto_var_in_fn_p (decl, context)))
1028 rest_of_decl_compilation (decl, 1, 0);
1029
1030 /* If this variable has already been declared, queue the
1031 declaration for merging. */
1032 if (TREE_PUBLIC (decl))
1033 register_resolution (data_in->file_data,
1034 decl, get_resolution (data_in, ix));
1035 }
1036
1037
1038 /* Register DECL with the global symbol table and change its
1039 name if necessary to avoid name clashes for static globals across
1040 different files. DATA_IN contains descriptors and tables for the
1041 file being read. */
1042
1043 static void
1044 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1045 unsigned ix)
1046 {
1047 /* If this variable has already been declared, queue the
1048 declaration for merging. */
1049 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1050 register_resolution (data_in->file_data,
1051 decl, get_resolution (data_in, ix));
1052 }
1053
1054
1055 /* For the type T re-materialize it in the type variant list and
1056 the pointer/reference-to chains. */
1057
1058 static void
1059 lto_fixup_prevailing_type (tree t)
1060 {
1061 /* The following re-creates proper variant lists while fixing up
1062 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1063 variant list state before fixup is broken. */
1064
1065 /* If we are not our own variant leader link us into our new leaders
1066 variant list. */
1067 if (TYPE_MAIN_VARIANT (t) != t)
1068 {
1069 tree mv = TYPE_MAIN_VARIANT (t);
1070 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1071 TYPE_NEXT_VARIANT (mv) = t;
1072 }
1073
1074 /* The following reconstructs the pointer chains
1075 of the new pointed-to type if we are a main variant. We do
1076 not stream those so they are broken before fixup. */
1077 if (TREE_CODE (t) == POINTER_TYPE
1078 && TYPE_MAIN_VARIANT (t) == t)
1079 {
1080 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1081 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1082 }
1083 else if (TREE_CODE (t) == REFERENCE_TYPE
1084 && TYPE_MAIN_VARIANT (t) == t)
1085 {
1086 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1087 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1088 }
1089 }
1090
1091
1092 /* We keep prevailing tree SCCs in a hashtable with manual collision
1093 handling (in case all hashes compare the same) and keep the colliding
1094 entries in the tree_scc->next chain. */
1095
1096 struct tree_scc
1097 {
1098 tree_scc *next;
1099 /* Hash of the whole SCC. */
1100 hashval_t hash;
1101 /* Number of trees in the SCC. */
1102 unsigned len;
1103 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1104 which share the same individual tree hash). */
1105 unsigned entry_len;
1106 /* The members of the SCC.
1107 We only need to remember the first entry node candidate for prevailing
1108 SCCs (but of course have access to all entries for SCCs we are
1109 processing).
1110 ??? For prevailing SCCs we really only need hash and the first
1111 entry candidate, but that's too awkward to implement. */
1112 tree entries[1];
1113 };
1114
1115 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1116 {
1117 typedef tree_scc value_type;
1118 typedef tree_scc compare_type;
1119 static inline hashval_t hash (const value_type *);
1120 static inline bool equal (const value_type *, const compare_type *);
1121 };
1122
1123 hashval_t
1124 tree_scc_hasher::hash (const value_type *scc)
1125 {
1126 return scc->hash;
1127 }
1128
1129 bool
1130 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1131 {
1132 if (scc1->hash != scc2->hash
1133 || scc1->len != scc2->len
1134 || scc1->entry_len != scc2->entry_len)
1135 return false;
1136 return true;
1137 }
1138
1139 static hash_table <tree_scc_hasher> tree_scc_hash;
1140 static struct obstack tree_scc_hash_obstack;
1141
1142 static unsigned long num_merged_types;
1143 static unsigned long num_prevailing_types;
1144 static unsigned long num_type_scc_trees;
1145 static unsigned long total_scc_size;
1146 static unsigned long num_sccs_read;
1147 static unsigned long total_scc_size_merged;
1148 static unsigned long num_sccs_merged;
1149 static unsigned long num_scc_compares;
1150 static unsigned long num_scc_compare_collisions;
1151
1152
1153 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1154 recursing through in-SCC tree edges. Returns true if the SCCs entered
1155 through T1 and T2 are equal and fills in *MAP with the pairs of
1156 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1157
1158 static bool
1159 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1160 {
1161 enum tree_code code;
1162
1163 /* Mark already visited nodes. */
1164 TREE_ASM_WRITTEN (t2) = 1;
1165
1166 /* Push the pair onto map. */
1167 (*map)[0] = t1;
1168 (*map)[1] = t2;
1169 *map = *map + 2;
1170
1171 /* Compare value-fields. */
1172 #define compare_values(X) \
1173 do { \
1174 if (X(t1) != X(t2)) \
1175 return false; \
1176 } while (0)
1177
1178 compare_values (TREE_CODE);
1179 code = TREE_CODE (t1);
1180
1181 if (!TYPE_P (t1))
1182 {
1183 compare_values (TREE_SIDE_EFFECTS);
1184 compare_values (TREE_CONSTANT);
1185 compare_values (TREE_READONLY);
1186 compare_values (TREE_PUBLIC);
1187 }
1188 compare_values (TREE_ADDRESSABLE);
1189 compare_values (TREE_THIS_VOLATILE);
1190 if (DECL_P (t1))
1191 compare_values (DECL_UNSIGNED);
1192 else if (TYPE_P (t1))
1193 compare_values (TYPE_UNSIGNED);
1194 if (TYPE_P (t1))
1195 compare_values (TYPE_ARTIFICIAL);
1196 else
1197 compare_values (TREE_NO_WARNING);
1198 compare_values (TREE_NOTHROW);
1199 compare_values (TREE_STATIC);
1200 if (code != TREE_BINFO)
1201 compare_values (TREE_PRIVATE);
1202 compare_values (TREE_PROTECTED);
1203 compare_values (TREE_DEPRECATED);
1204 if (TYPE_P (t1))
1205 {
1206 compare_values (TYPE_SATURATING);
1207 compare_values (TYPE_ADDR_SPACE);
1208 }
1209 else if (code == SSA_NAME)
1210 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1211
1212 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1213 {
1214 compare_values (TREE_INT_CST_LOW);
1215 compare_values (TREE_INT_CST_HIGH);
1216 }
1217
1218 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1219 {
1220 /* ??? No suitable compare routine available. */
1221 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1222 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1223 if (r1.cl != r2.cl
1224 || r1.decimal != r2.decimal
1225 || r1.sign != r2.sign
1226 || r1.signalling != r2.signalling
1227 || r1.canonical != r2.canonical
1228 || r1.uexp != r2.uexp)
1229 return false;
1230 for (unsigned i = 0; i < SIGSZ; ++i)
1231 if (r1.sig[i] != r2.sig[i])
1232 return false;
1233 }
1234
1235 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1236 if (!fixed_compare (EQ_EXPR,
1237 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1238 return false;
1239
1240
1241 /* We don't want to compare locations, so there is nothing do compare
1242 for TS_DECL_MINIMAL. */
1243
1244 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1245 {
1246 compare_values (DECL_MODE);
1247 compare_values (DECL_NONLOCAL);
1248 compare_values (DECL_VIRTUAL_P);
1249 compare_values (DECL_IGNORED_P);
1250 compare_values (DECL_ABSTRACT);
1251 compare_values (DECL_ARTIFICIAL);
1252 compare_values (DECL_USER_ALIGN);
1253 compare_values (DECL_PRESERVE_P);
1254 compare_values (DECL_EXTERNAL);
1255 compare_values (DECL_GIMPLE_REG_P);
1256 compare_values (DECL_ALIGN);
1257 if (code == LABEL_DECL)
1258 {
1259 compare_values (EH_LANDING_PAD_NR);
1260 compare_values (LABEL_DECL_UID);
1261 }
1262 else if (code == FIELD_DECL)
1263 {
1264 compare_values (DECL_PACKED);
1265 compare_values (DECL_NONADDRESSABLE_P);
1266 compare_values (DECL_OFFSET_ALIGN);
1267 }
1268 else if (code == VAR_DECL)
1269 {
1270 compare_values (DECL_HAS_DEBUG_EXPR_P);
1271 compare_values (DECL_NONLOCAL_FRAME);
1272 }
1273 if (code == RESULT_DECL
1274 || code == PARM_DECL
1275 || code == VAR_DECL)
1276 {
1277 compare_values (DECL_BY_REFERENCE);
1278 if (code == VAR_DECL
1279 || code == PARM_DECL)
1280 compare_values (DECL_HAS_VALUE_EXPR_P);
1281 }
1282 }
1283
1284 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1285 compare_values (DECL_REGISTER);
1286
1287 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1288 {
1289 compare_values (DECL_COMMON);
1290 compare_values (DECL_DLLIMPORT_P);
1291 compare_values (DECL_WEAK);
1292 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1293 compare_values (DECL_COMDAT);
1294 compare_values (DECL_VISIBILITY);
1295 compare_values (DECL_VISIBILITY_SPECIFIED);
1296 if (code == VAR_DECL)
1297 {
1298 compare_values (DECL_HARD_REGISTER);
1299 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1300 compare_values (DECL_IN_CONSTANT_POOL);
1301 compare_values (DECL_TLS_MODEL);
1302 }
1303 if (VAR_OR_FUNCTION_DECL_P (t1))
1304 compare_values (DECL_INIT_PRIORITY);
1305 }
1306
1307 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1308 {
1309 compare_values (DECL_BUILT_IN_CLASS);
1310 compare_values (DECL_STATIC_CONSTRUCTOR);
1311 compare_values (DECL_STATIC_DESTRUCTOR);
1312 compare_values (DECL_UNINLINABLE);
1313 compare_values (DECL_POSSIBLY_INLINED);
1314 compare_values (DECL_IS_NOVOPS);
1315 compare_values (DECL_IS_RETURNS_TWICE);
1316 compare_values (DECL_IS_MALLOC);
1317 compare_values (DECL_IS_OPERATOR_NEW);
1318 compare_values (DECL_DECLARED_INLINE_P);
1319 compare_values (DECL_STATIC_CHAIN);
1320 compare_values (DECL_NO_INLINE_WARNING_P);
1321 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1322 compare_values (DECL_NO_LIMIT_STACK);
1323 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1324 compare_values (DECL_PURE_P);
1325 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1326 compare_values (DECL_FINAL_P);
1327 compare_values (DECL_CXX_CONSTRUCTOR_P);
1328 compare_values (DECL_CXX_DESTRUCTOR_P);
1329 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1330 compare_values (DECL_FUNCTION_CODE);
1331 if (DECL_STATIC_DESTRUCTOR (t1))
1332 compare_values (DECL_FINI_PRIORITY);
1333 }
1334
1335 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1336 {
1337 compare_values (TYPE_MODE);
1338 compare_values (TYPE_STRING_FLAG);
1339 compare_values (TYPE_NO_FORCE_BLK);
1340 compare_values (TYPE_NEEDS_CONSTRUCTING);
1341 if (RECORD_OR_UNION_TYPE_P (t1))
1342 {
1343 compare_values (TYPE_TRANSPARENT_AGGR);
1344 compare_values (TYPE_FINAL_P);
1345 }
1346 else if (code == ARRAY_TYPE)
1347 compare_values (TYPE_NONALIASED_COMPONENT);
1348 compare_values (TYPE_PACKED);
1349 compare_values (TYPE_RESTRICT);
1350 compare_values (TYPE_USER_ALIGN);
1351 compare_values (TYPE_READONLY);
1352 compare_values (TYPE_PRECISION);
1353 compare_values (TYPE_ALIGN);
1354 compare_values (TYPE_ALIAS_SET);
1355 }
1356
1357 /* We don't want to compare locations, so there is nothing do compare
1358 for TS_EXP. */
1359
1360 /* BLOCKs are function local and we don't merge anything there, so
1361 simply refuse to merge. */
1362 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1363 return false;
1364
1365 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1366 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1367 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1368 return false;
1369
1370 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1371 gcc_unreachable ();
1372
1373 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1374 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1375 sizeof (struct cl_optimization)) != 0)
1376 return false;
1377
1378 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1379 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1380 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1381 return false;
1382
1383 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1384 compare_values (CONSTRUCTOR_NELTS);
1385
1386 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1387 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1388 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1389 IDENTIFIER_LENGTH (t1)) != 0)
1390 return false;
1391
1392 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1393 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1394 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1395 TREE_STRING_LENGTH (t1)) != 0)
1396 return false;
1397
1398 if (code == OMP_CLAUSE)
1399 {
1400 compare_values (OMP_CLAUSE_CODE);
1401 switch (OMP_CLAUSE_CODE (t1))
1402 {
1403 case OMP_CLAUSE_DEFAULT:
1404 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1405 break;
1406 case OMP_CLAUSE_SCHEDULE:
1407 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1408 break;
1409 case OMP_CLAUSE_DEPEND:
1410 compare_values (OMP_CLAUSE_DEPEND_KIND);
1411 break;
1412 case OMP_CLAUSE_MAP:
1413 compare_values (OMP_CLAUSE_MAP_KIND);
1414 break;
1415 case OMP_CLAUSE_PROC_BIND:
1416 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1417 break;
1418 case OMP_CLAUSE_REDUCTION:
1419 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1420 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1421 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1422 break;
1423 default:
1424 break;
1425 }
1426 }
1427
1428 #undef compare_values
1429
1430
1431 /* Compare pointer fields. */
1432
1433 /* Recurse. Search & Replaced from DFS_write_tree_body.
1434 Folding the early checks into the compare_tree_edges recursion
1435 macro makes debugging way quicker as you are able to break on
1436 compare_tree_sccs_1 and simply finish until a call returns false
1437 to spot the SCC members with the difference. */
1438 #define compare_tree_edges(E1, E2) \
1439 do { \
1440 tree t1_ = (E1), t2_ = (E2); \
1441 if (t1_ != t2_ \
1442 && (!t1_ || !t2_ \
1443 || !TREE_VISITED (t2_) \
1444 || (!TREE_ASM_WRITTEN (t2_) \
1445 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1446 return false; \
1447 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1448 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1449 } while (0)
1450
1451 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1452 {
1453 if (code != IDENTIFIER_NODE)
1454 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1455 }
1456
1457 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1458 {
1459 unsigned i;
1460 /* Note that the number of elements for EXPR has already been emitted
1461 in EXPR's header (see streamer_write_tree_header). */
1462 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1463 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1464 }
1465
1466 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1467 {
1468 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1469 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1470 }
1471
1472 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1473 {
1474 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1475 /* ??? Global decls from different TUs have non-matching
1476 TRANSLATION_UNIT_DECLs. Only consider a small set of
1477 decls equivalent, we should not end up merging others. */
1478 if ((code == TYPE_DECL
1479 || code == NAMESPACE_DECL
1480 || code == IMPORTED_DECL
1481 || code == CONST_DECL
1482 || (VAR_OR_FUNCTION_DECL_P (t1)
1483 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1484 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1485 ;
1486 else
1487 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1488 }
1489
1490 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1491 {
1492 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1493 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1494 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1495 if ((code == VAR_DECL
1496 || code == PARM_DECL)
1497 && DECL_HAS_VALUE_EXPR_P (t1))
1498 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1499 if (code == VAR_DECL
1500 && DECL_HAS_DEBUG_EXPR_P (t1))
1501 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1502 /* LTO specific edges. */
1503 if (code != FUNCTION_DECL
1504 && code != TRANSLATION_UNIT_DECL)
1505 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1506 }
1507
1508 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1509 {
1510 if (code == FUNCTION_DECL)
1511 {
1512 tree a1, a2;
1513 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1514 a1 || a2;
1515 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1516 compare_tree_edges (a1, a2);
1517 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1518 }
1519 else if (code == TYPE_DECL)
1520 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1521 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1522 }
1523
1524 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1525 {
1526 /* Make sure we don't inadvertently set the assembler name. */
1527 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1528 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1529 DECL_ASSEMBLER_NAME (t2));
1530 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
1531 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
1532 }
1533
1534 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1535 {
1536 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1537 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1538 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1539 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1540 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1541 DECL_FIELD_BIT_OFFSET (t2));
1542 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1543 }
1544
1545 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1546 {
1547 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1548 DECL_FUNCTION_PERSONALITY (t2));
1549 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1550 the attribute list instead. */
1551 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1552 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1553 }
1554
1555 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1556 {
1557 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1558 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1559 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1560 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1561 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1562 reconstructed during fixup. */
1563 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1564 during fixup. */
1565 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1566 /* ??? Global types from different TUs have non-matching
1567 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1568 equal. */
1569 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1570 ;
1571 else
1572 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1573 /* TYPE_CANONICAL is re-computed during type merging, so do not
1574 compare it here. */
1575 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1576 }
1577
1578 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1579 {
1580 if (code == ENUMERAL_TYPE)
1581 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1582 else if (code == ARRAY_TYPE)
1583 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1584 else if (RECORD_OR_UNION_TYPE_P (t1))
1585 {
1586 tree f1, f2;
1587 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1588 f1 || f2;
1589 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1590 compare_tree_edges (f1, f2);
1591 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1592 }
1593 else if (code == FUNCTION_TYPE
1594 || code == METHOD_TYPE)
1595 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1596 if (!POINTER_TYPE_P (t1))
1597 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1598 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1599 }
1600
1601 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1602 {
1603 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1604 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1605 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1606 }
1607
1608 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1609 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1610 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1611
1612 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1613 {
1614 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1615 compare_tree_edges (TREE_OPERAND (t1, i),
1616 TREE_OPERAND (t2, i));
1617
1618 /* BLOCKs are function local and we don't merge anything there. */
1619 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1620 return false;
1621 }
1622
1623 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1624 {
1625 unsigned i;
1626 tree t;
1627 /* Lengths have already been compared above. */
1628 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1629 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1630 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1631 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1632 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1633 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1634 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1635 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1636 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1637 }
1638
1639 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1640 {
1641 unsigned i;
1642 tree index, value;
1643 /* Lengths have already been compared above. */
1644 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1645 {
1646 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1647 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1648 }
1649 }
1650
1651 if (code == OMP_CLAUSE)
1652 {
1653 int i;
1654
1655 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1656 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1657 OMP_CLAUSE_OPERAND (t2, i));
1658 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1659 }
1660
1661 #undef compare_tree_edges
1662
1663 return true;
1664 }
1665
1666 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1667 out MAP if they are equal. */
1668
1669 static bool
1670 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1671 tree *map)
1672 {
1673 /* Assume SCC entry hashes are sorted after their cardinality. Which
1674 means we can simply take the first n-tuple of equal hashes
1675 (which is recorded as entry_len) and do n SCC entry candidate
1676 comparisons. */
1677 for (unsigned i = 0; i < pscc->entry_len; ++i)
1678 {
1679 tree *mapp = map;
1680 num_scc_compare_collisions++;
1681 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1682 {
1683 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1684 on the scc as all trees will be freed. */
1685 return true;
1686 }
1687 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1688 the SCC prevails. */
1689 for (unsigned j = 0; j < scc->len; ++j)
1690 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1691 }
1692
1693 return false;
1694 }
1695
1696 /* QSort sort function to sort a map of two pointers after the 2nd
1697 pointer. */
1698
1699 static int
1700 cmp_tree (const void *p1_, const void *p2_)
1701 {
1702 tree *p1 = (tree *)(const_cast<void *>(p1_));
1703 tree *p2 = (tree *)(const_cast<void *>(p2_));
1704 if (p1[1] == p2[1])
1705 return 0;
1706 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1707 }
1708
1709 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1710 hash value SCC_HASH with an already recorded SCC. Return true if
1711 that was successful, otherwise return false. */
1712
1713 static bool
1714 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1715 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1716 {
1717 bool unified_p = false;
1718 tree_scc *scc
1719 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1720 scc->next = NULL;
1721 scc->hash = scc_hash;
1722 scc->len = len;
1723 scc->entry_len = scc_entry_len;
1724 for (unsigned i = 0; i < len; ++i)
1725 {
1726 tree t = streamer_tree_cache_get_tree (cache, from + i);
1727 scc->entries[i] = t;
1728 /* Do not merge SCCs with local entities inside them. Also do
1729 not merge TRANSLATION_UNIT_DECLs. */
1730 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1731 || (VAR_OR_FUNCTION_DECL_P (t)
1732 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1733 || TREE_CODE (t) == LABEL_DECL)
1734 {
1735 /* Avoid doing any work for these cases and do not worry to
1736 record the SCCs for further merging. */
1737 return false;
1738 }
1739 }
1740
1741 /* Look for the list of candidate SCCs to compare against. */
1742 tree_scc **slot;
1743 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1744 if (*slot)
1745 {
1746 /* Try unifying against each candidate. */
1747 num_scc_compares++;
1748
1749 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1750 outside of the scc when following tree edges. Make sure
1751 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1752 to track whether we visited the SCC member during the compare.
1753 We cannot use TREE_VISITED on the pscc members as the extended
1754 scc and pscc can overlap. */
1755 for (unsigned i = 0; i < scc->len; ++i)
1756 {
1757 TREE_VISITED (scc->entries[i]) = 1;
1758 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1759 }
1760
1761 tree *map = XALLOCAVEC (tree, 2 * len);
1762 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1763 {
1764 if (!compare_tree_sccs (pscc, scc, map))
1765 continue;
1766
1767 /* Found an equal SCC. */
1768 unified_p = true;
1769 num_scc_compare_collisions--;
1770 num_sccs_merged++;
1771 total_scc_size_merged += len;
1772
1773 #ifdef ENABLE_CHECKING
1774 for (unsigned i = 0; i < len; ++i)
1775 {
1776 tree t = map[2*i+1];
1777 enum tree_code code = TREE_CODE (t);
1778 /* IDENTIFIER_NODEs should be singletons and are merged by the
1779 streamer. The others should be singletons, too, and we
1780 should not merge them in any way. */
1781 gcc_assert (code != TRANSLATION_UNIT_DECL
1782 && code != IDENTIFIER_NODE
1783 && !streamer_handle_as_builtin_p (t));
1784 }
1785 #endif
1786
1787 /* Fixup the streamer cache with the prevailing nodes according
1788 to the tree node mapping computed by compare_tree_sccs. */
1789 if (len == 1)
1790 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1791 else
1792 {
1793 tree *map2 = XALLOCAVEC (tree, 2 * len);
1794 for (unsigned i = 0; i < len; ++i)
1795 {
1796 map2[i*2] = (tree)(uintptr_t)(from + i);
1797 map2[i*2+1] = scc->entries[i];
1798 }
1799 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1800 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1801 for (unsigned i = 0; i < len; ++i)
1802 streamer_tree_cache_replace_tree (cache, map[2*i],
1803 (uintptr_t)map2[2*i]);
1804 }
1805
1806 /* Free the tree nodes from the read SCC. */
1807 for (unsigned i = 0; i < len; ++i)
1808 {
1809 enum tree_code code;
1810 if (TYPE_P (scc->entries[i]))
1811 num_merged_types++;
1812 code = TREE_CODE (scc->entries[i]);
1813 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1814 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1815 ggc_free (scc->entries[i]);
1816 }
1817
1818 break;
1819 }
1820
1821 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1822 if (!unified_p)
1823 for (unsigned i = 0; i < scc->len; ++i)
1824 TREE_VISITED (scc->entries[i]) = 0;
1825 }
1826
1827 /* If we didn't unify it to any candidate duplicate the relevant
1828 pieces to permanent storage and link it into the chain. */
1829 if (!unified_p)
1830 {
1831 tree_scc *pscc
1832 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1833 memcpy (pscc, scc, sizeof (tree_scc));
1834 pscc->next = (*slot);
1835 *slot = pscc;
1836 }
1837 return unified_p;
1838 }
1839
1840
1841 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1842 RESOLUTIONS is the set of symbols picked by the linker (read from the
1843 resolution file when the linker plugin is being used). */
1844
1845 static void
1846 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1847 vec<ld_plugin_symbol_resolution_t> resolutions)
1848 {
1849 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1850 const int decl_offset = sizeof (struct lto_decl_header);
1851 const int main_offset = decl_offset + header->decl_state_size;
1852 const int string_offset = main_offset + header->main_size;
1853 struct lto_input_block ib_main;
1854 struct data_in *data_in;
1855 unsigned int i;
1856 const uint32_t *data_ptr, *data_end;
1857 uint32_t num_decl_states;
1858
1859 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1860 header->main_size);
1861
1862 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1863 header->string_size, resolutions);
1864
1865 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1866 internal types that should not be merged. */
1867
1868 /* Read the global declarations and types. */
1869 while (ib_main.p < ib_main.len)
1870 {
1871 tree t;
1872 unsigned from = data_in->reader_cache->nodes.length ();
1873 /* Read and uniquify SCCs as in the input stream. */
1874 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1875 if (tag == LTO_tree_scc)
1876 {
1877 unsigned len_;
1878 unsigned scc_entry_len;
1879 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1880 &scc_entry_len);
1881 unsigned len = data_in->reader_cache->nodes.length () - from;
1882 gcc_assert (len == len_);
1883
1884 total_scc_size += len;
1885 num_sccs_read++;
1886
1887 /* We have the special case of size-1 SCCs that are pre-merged
1888 by means of identifier and string sharing for example.
1889 ??? Maybe we should avoid streaming those as SCCs. */
1890 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1891 from);
1892 if (len == 1
1893 && (TREE_CODE (first) == IDENTIFIER_NODE
1894 || TREE_CODE (first) == INTEGER_CST
1895 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1896 || streamer_handle_as_builtin_p (first)))
1897 continue;
1898
1899 /* Try to unify the SCC with already existing ones. */
1900 if (!flag_ltrans
1901 && unify_scc (data_in->reader_cache, from,
1902 len, scc_entry_len, scc_hash))
1903 continue;
1904
1905 /* Do remaining fixup tasks for prevailing nodes. */
1906 bool seen_type = false;
1907 for (unsigned i = 0; i < len; ++i)
1908 {
1909 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1910 from + i);
1911 /* Reconstruct the type variant and pointer-to/reference-to
1912 chains. */
1913 if (TYPE_P (t))
1914 {
1915 seen_type = true;
1916 num_prevailing_types++;
1917 lto_fixup_prevailing_type (t);
1918 }
1919 /* Compute the canonical type of all types.
1920 ??? Should be able to assert that !TYPE_CANONICAL. */
1921 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1922 gimple_register_canonical_type (t);
1923 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1924 type which is also member of this SCC. */
1925 if (TREE_CODE (t) == INTEGER_CST
1926 && !TREE_OVERFLOW (t))
1927 cache_integer_cst (t);
1928 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1929 for both WPA and LTRANS stage. */
1930 if (TREE_CODE (t) == FUNCTION_DECL)
1931 {
1932 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1933 if (attr)
1934 targetm.target_option.valid_attribute_p
1935 (t, NULL_TREE, TREE_VALUE (attr), 0);
1936 }
1937 /* Register TYPE_DECLs with the debuginfo machinery. */
1938 if (!flag_wpa
1939 && TREE_CODE (t) == TYPE_DECL)
1940 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1941 if (!flag_ltrans)
1942 {
1943 /* Register variables and functions with the
1944 symbol table. */
1945 if (TREE_CODE (t) == VAR_DECL)
1946 lto_register_var_decl_in_symtab (data_in, t, from + i);
1947 else if (TREE_CODE (t) == FUNCTION_DECL
1948 && !DECL_BUILT_IN (t))
1949 lto_register_function_decl_in_symtab (data_in, t, from + i);
1950 /* Scan the tree for references to global functions or
1951 variables and record those for later fixup. */
1952 if (mentions_vars_p (t))
1953 vec_safe_push (tree_with_vars, t);
1954 }
1955 }
1956 if (seen_type)
1957 num_type_scc_trees += len;
1958 }
1959 else
1960 {
1961 /* Pickle stray references. */
1962 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1963 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1964 }
1965 }
1966
1967 /* Read in lto_in_decl_state objects. */
1968 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1969 data_end =
1970 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1971 num_decl_states = *data_ptr++;
1972
1973 gcc_assert (num_decl_states > 0);
1974 decl_data->global_decl_state = lto_new_in_decl_state ();
1975 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1976 decl_data->global_decl_state);
1977
1978 /* Read in per-function decl states and enter them in hash table. */
1979 decl_data->function_decl_states =
1980 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1981
1982 for (i = 1; i < num_decl_states; i++)
1983 {
1984 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1985 void **slot;
1986
1987 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1988 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1989 gcc_assert (*slot == NULL);
1990 *slot = state;
1991 }
1992
1993 if (data_ptr != data_end)
1994 internal_error ("bytecode stream: garbage at the end of symbols section");
1995
1996 /* Set the current decl state to be the global state. */
1997 decl_data->current_decl_state = decl_data->global_decl_state;
1998
1999 lto_data_in_delete (data_in);
2000 }
2001
2002 /* Custom version of strtoll, which is not portable. */
2003
2004 static HOST_WIDEST_INT
2005 lto_parse_hex (const char *p)
2006 {
2007 HOST_WIDEST_INT ret = 0;
2008
2009 for (; *p != '\0'; ++p)
2010 {
2011 char c = *p;
2012 unsigned char part;
2013 ret <<= 4;
2014 if (c >= '0' && c <= '9')
2015 part = c - '0';
2016 else if (c >= 'a' && c <= 'f')
2017 part = c - 'a' + 10;
2018 else if (c >= 'A' && c <= 'F')
2019 part = c - 'A' + 10;
2020 else
2021 internal_error ("could not parse hex number");
2022 ret |= part;
2023 }
2024
2025 return ret;
2026 }
2027
2028 /* Read resolution for file named FILE_NAME. The resolution is read from
2029 RESOLUTION. */
2030
2031 static void
2032 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2033 {
2034 /* We require that objects in the resolution file are in the same
2035 order as the lto1 command line. */
2036 unsigned int name_len;
2037 char *obj_name;
2038 unsigned int num_symbols;
2039 unsigned int i;
2040 struct lto_file_decl_data *file_data;
2041 splay_tree_node nd = NULL;
2042
2043 if (!resolution)
2044 return;
2045
2046 name_len = strlen (file->filename);
2047 obj_name = XNEWVEC (char, name_len + 1);
2048 fscanf (resolution, " "); /* Read white space. */
2049
2050 fread (obj_name, sizeof (char), name_len, resolution);
2051 obj_name[name_len] = '\0';
2052 if (filename_cmp (obj_name, file->filename) != 0)
2053 internal_error ("unexpected file name %s in linker resolution file. "
2054 "Expected %s", obj_name, file->filename);
2055 if (file->offset != 0)
2056 {
2057 int t;
2058 char offset_p[17];
2059 HOST_WIDEST_INT offset;
2060 t = fscanf (resolution, "@0x%16s", offset_p);
2061 if (t != 1)
2062 internal_error ("could not parse file offset");
2063 offset = lto_parse_hex (offset_p);
2064 if (offset != file->offset)
2065 internal_error ("unexpected offset");
2066 }
2067
2068 free (obj_name);
2069
2070 fscanf (resolution, "%u", &num_symbols);
2071
2072 for (i = 0; i < num_symbols; i++)
2073 {
2074 int t;
2075 unsigned index;
2076 unsigned HOST_WIDE_INT id;
2077 char r_str[27];
2078 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2079 unsigned int j;
2080 unsigned int lto_resolution_str_len =
2081 sizeof (lto_resolution_str) / sizeof (char *);
2082 res_pair rp;
2083
2084 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2085 &index, &id, r_str);
2086 if (t != 3)
2087 internal_error ("invalid line in the resolution file");
2088
2089 for (j = 0; j < lto_resolution_str_len; j++)
2090 {
2091 if (strcmp (lto_resolution_str[j], r_str) == 0)
2092 {
2093 r = (enum ld_plugin_symbol_resolution) j;
2094 break;
2095 }
2096 }
2097 if (j == lto_resolution_str_len)
2098 internal_error ("invalid resolution in the resolution file");
2099
2100 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2101 {
2102 nd = lto_splay_tree_lookup (file_ids, id);
2103 if (nd == NULL)
2104 internal_error ("resolution sub id %wx not in object file", id);
2105 }
2106
2107 file_data = (struct lto_file_decl_data *)nd->value;
2108 /* The indexes are very sparse. To save memory save them in a compact
2109 format that is only unpacked later when the subfile is processed. */
2110 rp.res = r;
2111 rp.index = index;
2112 file_data->respairs.safe_push (rp);
2113 if (file_data->max_index < index)
2114 file_data->max_index = index;
2115 }
2116 }
2117
2118 /* List of file_decl_datas */
2119 struct file_data_list
2120 {
2121 struct lto_file_decl_data *first, *last;
2122 };
2123
2124 /* Is the name for a id'ed LTO section? */
2125
2126 static int
2127 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2128 {
2129 const char *s;
2130
2131 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2132 return 0;
2133 s = strrchr (name, '.');
2134 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2135 }
2136
2137 /* Create file_data of each sub file id */
2138
2139 static int
2140 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2141 struct file_data_list *list)
2142 {
2143 struct lto_section_slot s_slot, *new_slot;
2144 unsigned HOST_WIDE_INT id;
2145 splay_tree_node nd;
2146 void **hash_slot;
2147 char *new_name;
2148 struct lto_file_decl_data *file_data;
2149
2150 if (!lto_section_with_id (ls->name, &id))
2151 return 1;
2152
2153 /* Find hash table of sub module id */
2154 nd = lto_splay_tree_lookup (file_ids, id);
2155 if (nd != NULL)
2156 {
2157 file_data = (struct lto_file_decl_data *)nd->value;
2158 }
2159 else
2160 {
2161 file_data = ggc_alloc_lto_file_decl_data ();
2162 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2163 file_data->id = id;
2164 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2165 lto_splay_tree_insert (file_ids, id, file_data);
2166
2167 /* Maintain list in linker order */
2168 if (!list->first)
2169 list->first = file_data;
2170 if (list->last)
2171 list->last->next = file_data;
2172 list->last = file_data;
2173 }
2174
2175 /* Copy section into sub module hash table */
2176 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2177 s_slot.name = new_name;
2178 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2179 gcc_assert (*hash_slot == NULL);
2180
2181 new_slot = XDUP (struct lto_section_slot, ls);
2182 new_slot->name = new_name;
2183 *hash_slot = new_slot;
2184 return 1;
2185 }
2186
2187 /* Read declarations and other initializations for a FILE_DATA. */
2188
2189 static void
2190 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2191 {
2192 const char *data;
2193 size_t len;
2194 vec<ld_plugin_symbol_resolution_t>
2195 resolutions = vNULL;
2196 int i;
2197 res_pair *rp;
2198
2199 /* Create vector for fast access of resolution. We do this lazily
2200 to save memory. */
2201 resolutions.safe_grow_cleared (file_data->max_index + 1);
2202 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2203 resolutions[rp->index] = rp->res;
2204 file_data->respairs.release ();
2205
2206 file_data->renaming_hash_table = lto_create_renaming_table ();
2207 file_data->file_name = file->filename;
2208 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2209 if (data == NULL)
2210 {
2211 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2212 return;
2213 }
2214 /* Frees resolutions */
2215 lto_read_decls (file_data, data, resolutions);
2216 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2217 }
2218
2219 /* Finalize FILE_DATA in FILE and increase COUNT. */
2220
2221 static int
2222 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2223 int *count)
2224 {
2225 lto_file_finalize (file_data, file);
2226 if (cgraph_dump_file)
2227 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2228 file_data->file_name, file_data->id);
2229 (*count)++;
2230 return 0;
2231 }
2232
2233 /* Generate a TREE representation for all types and external decls
2234 entities in FILE.
2235
2236 Read all of the globals out of the file. Then read the cgraph
2237 and process the .o index into the cgraph nodes so that it can open
2238 the .o file to load the functions and ipa information. */
2239
2240 static struct lto_file_decl_data *
2241 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2242 {
2243 struct lto_file_decl_data *file_data = NULL;
2244 splay_tree file_ids;
2245 htab_t section_hash_table;
2246 struct lto_section_slot *section;
2247 struct file_data_list file_list;
2248 struct lto_section_list section_list;
2249
2250 memset (&section_list, 0, sizeof (struct lto_section_list));
2251 section_hash_table = lto_obj_build_section_table (file, &section_list);
2252
2253 /* Find all sub modules in the object and put their sections into new hash
2254 tables in a splay tree. */
2255 file_ids = lto_splay_tree_new ();
2256 memset (&file_list, 0, sizeof (struct file_data_list));
2257 for (section = section_list.first; section != NULL; section = section->next)
2258 create_subid_section_table (section, file_ids, &file_list);
2259
2260 /* Add resolutions to file ids */
2261 lto_resolution_read (file_ids, resolution_file, file);
2262
2263 /* Finalize each lto file for each submodule in the merged object */
2264 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2265 lto_create_files_from_ids (file, file_data, count);
2266
2267 splay_tree_delete (file_ids);
2268 htab_delete (section_hash_table);
2269
2270 return file_list.first;
2271 }
2272
2273 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2274 #define LTO_MMAP_IO 1
2275 #endif
2276
2277 #if LTO_MMAP_IO
2278 /* Page size of machine is used for mmap and munmap calls. */
2279 static size_t page_mask;
2280 #endif
2281
2282 /* Get the section data of length LEN from FILENAME starting at
2283 OFFSET. The data segment must be freed by the caller when the
2284 caller is finished. Returns NULL if all was not well. */
2285
2286 static char *
2287 lto_read_section_data (struct lto_file_decl_data *file_data,
2288 intptr_t offset, size_t len)
2289 {
2290 char *result;
2291 static int fd = -1;
2292 static char *fd_name;
2293 #if LTO_MMAP_IO
2294 intptr_t computed_len;
2295 intptr_t computed_offset;
2296 intptr_t diff;
2297 #endif
2298
2299 /* Keep a single-entry file-descriptor cache. The last file we
2300 touched will get closed at exit.
2301 ??? Eventually we want to add a more sophisticated larger cache
2302 or rather fix function body streaming to not stream them in
2303 practically random order. */
2304 if (fd != -1
2305 && filename_cmp (fd_name, file_data->file_name) != 0)
2306 {
2307 free (fd_name);
2308 close (fd);
2309 fd = -1;
2310 }
2311 if (fd == -1)
2312 {
2313 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2314 if (fd == -1)
2315 {
2316 fatal_error ("Cannot open %s", file_data->file_name);
2317 return NULL;
2318 }
2319 fd_name = xstrdup (file_data->file_name);
2320 }
2321
2322 #if LTO_MMAP_IO
2323 if (!page_mask)
2324 {
2325 size_t page_size = sysconf (_SC_PAGE_SIZE);
2326 page_mask = ~(page_size - 1);
2327 }
2328
2329 computed_offset = offset & page_mask;
2330 diff = offset - computed_offset;
2331 computed_len = len + diff;
2332
2333 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2334 fd, computed_offset);
2335 if (result == MAP_FAILED)
2336 {
2337 fatal_error ("Cannot map %s", file_data->file_name);
2338 return NULL;
2339 }
2340
2341 return result + diff;
2342 #else
2343 result = (char *) xmalloc (len);
2344 if (lseek (fd, offset, SEEK_SET) != offset
2345 || read (fd, result, len) != (ssize_t) len)
2346 {
2347 free (result);
2348 fatal_error ("Cannot read %s", file_data->file_name);
2349 result = NULL;
2350 }
2351 #ifdef __MINGW32__
2352 /* Native windows doesn't supports delayed unlink on opened file. So
2353 we close file here again. This produces higher I/O load, but at least
2354 it prevents to have dangling file handles preventing unlink. */
2355 free (fd_name);
2356 fd_name = NULL;
2357 close (fd);
2358 fd = -1;
2359 #endif
2360 return result;
2361 #endif
2362 }
2363
2364
2365 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2366 NAME will be NULL unless the section type is for a function
2367 body. */
2368
2369 static const char *
2370 get_section_data (struct lto_file_decl_data *file_data,
2371 enum lto_section_type section_type,
2372 const char *name,
2373 size_t *len)
2374 {
2375 htab_t section_hash_table = file_data->section_hash_table;
2376 struct lto_section_slot *f_slot;
2377 struct lto_section_slot s_slot;
2378 const char *section_name = lto_get_section_name (section_type, name, file_data);
2379 char *data = NULL;
2380
2381 *len = 0;
2382 s_slot.name = section_name;
2383 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2384 if (f_slot)
2385 {
2386 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2387 *len = f_slot->len;
2388 }
2389
2390 free (CONST_CAST (char *, section_name));
2391 return data;
2392 }
2393
2394
2395 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2396 starts at OFFSET and has LEN bytes. */
2397
2398 static void
2399 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2400 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2401 const char *name ATTRIBUTE_UNUSED,
2402 const char *offset, size_t len ATTRIBUTE_UNUSED)
2403 {
2404 #if LTO_MMAP_IO
2405 intptr_t computed_len;
2406 intptr_t computed_offset;
2407 intptr_t diff;
2408 #endif
2409
2410 #if LTO_MMAP_IO
2411 computed_offset = ((intptr_t) offset) & page_mask;
2412 diff = (intptr_t) offset - computed_offset;
2413 computed_len = len + diff;
2414
2415 munmap ((caddr_t) computed_offset, computed_len);
2416 #else
2417 free (CONST_CAST(char *, offset));
2418 #endif
2419 }
2420
2421 static lto_file *current_lto_file;
2422
2423 /* Helper for qsort; compare partitions and return one with smaller size.
2424 We sort from greatest to smallest so parallel build doesn't stale on the
2425 longest compilation being executed too late. */
2426
2427 static int
2428 cmp_partitions_size (const void *a, const void *b)
2429 {
2430 const struct ltrans_partition_def *pa
2431 = *(struct ltrans_partition_def *const *)a;
2432 const struct ltrans_partition_def *pb
2433 = *(struct ltrans_partition_def *const *)b;
2434 return pb->insns - pa->insns;
2435 }
2436
2437 /* Helper for qsort; compare partitions and return one with smaller order. */
2438
2439 static int
2440 cmp_partitions_order (const void *a, const void *b)
2441 {
2442 const struct ltrans_partition_def *pa
2443 = *(struct ltrans_partition_def *const *)a;
2444 const struct ltrans_partition_def *pb
2445 = *(struct ltrans_partition_def *const *)b;
2446 int ordera = -1, orderb = -1;
2447
2448 if (lto_symtab_encoder_size (pa->encoder))
2449 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2450 if (lto_symtab_encoder_size (pb->encoder))
2451 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2452 return orderb - ordera;
2453 }
2454
2455 /* Actually stream out ENCODER into TEMP_FILENAME. */
2456
2457 static void
2458 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2459 {
2460 lto_file *file = lto_obj_file_open (temp_filename, true);
2461 if (!file)
2462 fatal_error ("lto_obj_file_open() failed");
2463 lto_set_current_out_file (file);
2464
2465 ipa_write_optimization_summaries (encoder);
2466
2467 lto_set_current_out_file (NULL);
2468 lto_obj_file_close (file);
2469 free (file);
2470 }
2471
2472 /* Wait for forked process and signal errors. */
2473 #ifdef HAVE_WORKING_FORK
2474 static void
2475 wait_for_child ()
2476 {
2477 int status;
2478 do
2479 {
2480 #ifndef WCONTINUED
2481 #define WCONTINUED 0
2482 #endif
2483 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2484 if (w == -1)
2485 fatal_error ("waitpid failed");
2486
2487 if (WIFEXITED (status) && WEXITSTATUS (status))
2488 fatal_error ("streaming subprocess failed");
2489 else if (WIFSIGNALED (status))
2490 fatal_error ("streaming subprocess was killed by signal");
2491 }
2492 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2493 }
2494 #endif
2495
2496 /* Stream out ENCODER into TEMP_FILENAME
2497 Fork if that seems to help. */
2498
2499 static void
2500 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2501 {
2502 #ifdef HAVE_WORKING_FORK
2503 static int nruns;
2504
2505 if (lto_parallelism <= 1)
2506 {
2507 do_stream_out (temp_filename, encoder);
2508 return;
2509 }
2510
2511 /* Do not run more than LTO_PARALLELISM streamings
2512 FIXME: we ignore limits on jobserver. */
2513 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2514 {
2515 wait_for_child ();
2516 nruns --;
2517 }
2518 /* If this is not the last parallel partition, execute new
2519 streaming process. */
2520 if (!last)
2521 {
2522 pid_t cpid = fork ();
2523
2524 if (!cpid)
2525 {
2526 setproctitle ("lto1-wpa-streaming");
2527 do_stream_out (temp_filename, encoder);
2528 exit (0);
2529 }
2530 /* Fork failed; lets do the job ourseleves. */
2531 else if (cpid == -1)
2532 do_stream_out (temp_filename, encoder);
2533 else
2534 nruns++;
2535 }
2536 /* Last partition; stream it and wait for all children to die. */
2537 else
2538 {
2539 int i;
2540 do_stream_out (temp_filename, encoder);
2541 for (i = 0; i < nruns; i++)
2542 wait_for_child ();
2543 }
2544 asm_nodes_output = true;
2545 #else
2546 do_stream_out (temp_filename, encoder);
2547 #endif
2548 }
2549
2550 /* Write all output files in WPA mode and the file with the list of
2551 LTRANS units. */
2552
2553 static void
2554 lto_wpa_write_files (void)
2555 {
2556 unsigned i, n_sets;
2557 ltrans_partition part;
2558 FILE *ltrans_output_list_stream;
2559 char *temp_filename;
2560 vec <char *>temp_filenames = vNULL;
2561 size_t blen;
2562
2563 /* Open the LTRANS output list. */
2564 if (!ltrans_output_list)
2565 fatal_error ("no LTRANS output list filename provided");
2566
2567 timevar_push (TV_WHOPR_WPA);
2568
2569 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2570 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2571
2572 timevar_pop (TV_WHOPR_WPA);
2573
2574 timevar_push (TV_WHOPR_WPA_IO);
2575
2576 /* Generate a prefix for the LTRANS unit files. */
2577 blen = strlen (ltrans_output_list);
2578 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2579 strcpy (temp_filename, ltrans_output_list);
2580 if (blen > sizeof (".out")
2581 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2582 ".out") == 0)
2583 temp_filename[blen - sizeof (".out") + 1] = '\0';
2584 blen = strlen (temp_filename);
2585
2586 n_sets = ltrans_partitions.length ();
2587
2588 /* Sort partitions by size so small ones are compiled last.
2589 FIXME: Even when not reordering we may want to output one list for parallel make
2590 and other for final link command. */
2591
2592 if (!flag_profile_reorder_functions || !flag_profile_use)
2593 ltrans_partitions.qsort (flag_toplevel_reorder
2594 ? cmp_partitions_size
2595 : cmp_partitions_order);
2596
2597 for (i = 0; i < n_sets; i++)
2598 {
2599 ltrans_partition part = ltrans_partitions[i];
2600
2601 /* Write all the nodes in SET. */
2602 sprintf (temp_filename + blen, "%u.o", i);
2603
2604 if (!quiet_flag)
2605 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2606 if (cgraph_dump_file)
2607 {
2608 lto_symtab_encoder_iterator lsei;
2609
2610 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2611 part->name, temp_filename, part->insns);
2612 fprintf (cgraph_dump_file, " Symbols in partition: ");
2613 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2614 lsei_next_in_partition (&lsei))
2615 {
2616 symtab_node *node = lsei_node (lsei);
2617 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2618 }
2619 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2620 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2621 lsei_next (&lsei))
2622 {
2623 symtab_node *node = lsei_node (lsei);
2624 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2625 {
2626 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2627 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2628 if (cnode
2629 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2630 fprintf (cgraph_dump_file, "(body included)");
2631 else
2632 {
2633 varpool_node *vnode = dyn_cast <varpool_node> (node);
2634 if (vnode
2635 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2636 fprintf (cgraph_dump_file, "(initializer included)");
2637 }
2638 }
2639 }
2640 fprintf (cgraph_dump_file, "\n");
2641 }
2642 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2643
2644 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2645
2646 part->encoder = NULL;
2647
2648 temp_filenames.safe_push (xstrdup (temp_filename));
2649 }
2650 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2651 if (ltrans_output_list_stream == NULL)
2652 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2653 for (i = 0; i < n_sets; i++)
2654 {
2655 unsigned int len = strlen (temp_filenames[i]);
2656 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2657 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2658 fatal_error ("writing to LTRANS output list %s: %m",
2659 ltrans_output_list);
2660 free (temp_filenames[i]);
2661 }
2662 temp_filenames.release();
2663
2664 lto_stats.num_output_files += n_sets;
2665
2666 /* Close the LTRANS output list. */
2667 if (fclose (ltrans_output_list_stream))
2668 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2669
2670 free_ltrans_partitions();
2671 free (temp_filename);
2672
2673 timevar_pop (TV_WHOPR_WPA_IO);
2674 }
2675
2676
2677 /* If TT is a variable or function decl replace it with its
2678 prevailing variant. */
2679 #define LTO_SET_PREVAIL(tt) \
2680 do {\
2681 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2682 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2683 { \
2684 tt = lto_symtab_prevailing_decl (tt); \
2685 fixed = true; \
2686 } \
2687 } while (0)
2688
2689 /* Ensure that TT isn't a replacable var of function decl. */
2690 #define LTO_NO_PREVAIL(tt) \
2691 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2692
2693 /* Given a tree T replace all fields referring to variables or functions
2694 with their prevailing variant. */
2695 static void
2696 lto_fixup_prevailing_decls (tree t)
2697 {
2698 enum tree_code code = TREE_CODE (t);
2699 bool fixed = false;
2700
2701 gcc_checking_assert (code != TREE_BINFO);
2702 LTO_NO_PREVAIL (TREE_TYPE (t));
2703 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2704 LTO_NO_PREVAIL (TREE_CHAIN (t));
2705 if (DECL_P (t))
2706 {
2707 LTO_NO_PREVAIL (DECL_NAME (t));
2708 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2709 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2710 {
2711 LTO_SET_PREVAIL (DECL_SIZE (t));
2712 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2713 LTO_SET_PREVAIL (DECL_INITIAL (t));
2714 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2715 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2716 }
2717 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2718 {
2719 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2720 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2721 }
2722 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2723 {
2724 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2725 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2726 LTO_NO_PREVAIL (DECL_VINDEX (t));
2727 }
2728 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2729 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2730 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2731 {
2732 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2733 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2734 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2735 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2736 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2737 }
2738 }
2739 else if (TYPE_P (t))
2740 {
2741 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2742 LTO_SET_PREVAIL (TYPE_SIZE (t));
2743 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2744 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2745 LTO_NO_PREVAIL (TYPE_NAME (t));
2746
2747 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2748 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2749 LTO_NO_PREVAIL (t->type_non_common.binfo);
2750
2751 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2752
2753 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2754 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2755 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2756 }
2757 else if (EXPR_P (t))
2758 {
2759 int i;
2760 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2761 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2762 }
2763 else if (TREE_CODE (t) == CONSTRUCTOR)
2764 {
2765 unsigned i;
2766 tree val;
2767 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2768 LTO_SET_PREVAIL (val);
2769 }
2770 else
2771 {
2772 switch (code)
2773 {
2774 case TREE_LIST:
2775 LTO_SET_PREVAIL (TREE_VALUE (t));
2776 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2777 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2778 break;
2779 default:
2780 gcc_unreachable ();
2781 }
2782 }
2783 /* If we fixed nothing, then we missed something seen by
2784 mentions_vars_p. */
2785 gcc_checking_assert (fixed);
2786 }
2787 #undef LTO_SET_PREVAIL
2788 #undef LTO_NO_PREVAIL
2789
2790 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2791 replaces var and function decls with the corresponding prevailing def. */
2792
2793 static void
2794 lto_fixup_state (struct lto_in_decl_state *state)
2795 {
2796 unsigned i, si;
2797 struct lto_tree_ref_table *table;
2798
2799 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2800 we still need to walk from all DECLs to find the reachable
2801 FUNCTION_DECLs and VAR_DECLs. */
2802 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2803 {
2804 table = &state->streams[si];
2805 for (i = 0; i < table->size; i++)
2806 {
2807 tree *tp = table->trees + i;
2808 if (VAR_OR_FUNCTION_DECL_P (*tp)
2809 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2810 *tp = lto_symtab_prevailing_decl (*tp);
2811 }
2812 }
2813 }
2814
2815 /* A callback of htab_traverse. Just extracts a state from SLOT
2816 and calls lto_fixup_state. */
2817
2818 static int
2819 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2820 {
2821 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2822 lto_fixup_state (state);
2823 return 1;
2824 }
2825
2826 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2827 prevailing one. */
2828
2829 static void
2830 lto_fixup_decls (struct lto_file_decl_data **files)
2831 {
2832 unsigned int i;
2833 tree t;
2834
2835 if (tree_with_vars)
2836 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2837 lto_fixup_prevailing_decls (t);
2838
2839 for (i = 0; files[i]; i++)
2840 {
2841 struct lto_file_decl_data *file = files[i];
2842 struct lto_in_decl_state *state = file->global_decl_state;
2843 lto_fixup_state (state);
2844
2845 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2846 }
2847 }
2848
2849 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2850
2851 /* Turn file datas for sub files into a single array, so that they look
2852 like separate files for further passes. */
2853
2854 static void
2855 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2856 {
2857 struct lto_file_decl_data *n, *next;
2858 int i, k;
2859
2860 lto_stats.num_input_files = count;
2861 all_file_decl_data
2862 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2863 /* Set the hooks so that all of the ipa passes can read in their data. */
2864 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2865 for (i = 0, k = 0; i < last_file_ix; i++)
2866 {
2867 for (n = orig[i]; n != NULL; n = next)
2868 {
2869 all_file_decl_data[k++] = n;
2870 next = n->next;
2871 n->next = NULL;
2872 }
2873 }
2874 all_file_decl_data[k] = NULL;
2875 gcc_assert (k == count);
2876 }
2877
2878 /* Input file data before flattening (i.e. splitting them to subfiles to support
2879 incremental linking. */
2880 static int real_file_count;
2881 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2882
2883 static void print_lto_report_1 (void);
2884
2885 /* Read all the symbols from the input files FNAMES. NFILES is the
2886 number of files requested in the command line. Instantiate a
2887 global call graph by aggregating all the sub-graphs found in each
2888 file. */
2889
2890 static void
2891 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2892 {
2893 unsigned int i, last_file_ix;
2894 FILE *resolution;
2895 int count = 0;
2896 struct lto_file_decl_data **decl_data;
2897 void **res;
2898 symtab_node *snode;
2899
2900 init_cgraph ();
2901
2902 timevar_push (TV_IPA_LTO_DECL_IN);
2903
2904 real_file_decl_data
2905 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2906 real_file_count = nfiles;
2907
2908 /* Read the resolution file. */
2909 resolution = NULL;
2910 if (resolution_file_name)
2911 {
2912 int t;
2913 unsigned num_objects;
2914
2915 resolution = fopen (resolution_file_name, "r");
2916 if (resolution == NULL)
2917 fatal_error ("could not open symbol resolution file: %m");
2918
2919 t = fscanf (resolution, "%u", &num_objects);
2920 gcc_assert (t == 1);
2921
2922 /* True, since the plugin splits the archives. */
2923 gcc_assert (num_objects == nfiles);
2924 }
2925 cgraph_state = CGRAPH_LTO_STREAMING;
2926
2927 canonical_type_hash_cache = new pointer_map <hashval_t>;
2928 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2929 gimple_canonical_type_eq, 0);
2930 gcc_obstack_init (&tree_scc_hash_obstack);
2931 tree_scc_hash.create (4096);
2932
2933 /* Register the common node types with the canonical type machinery so
2934 we properly share alias-sets across languages and TUs. Do not
2935 expose the common nodes as type merge target - those that should be
2936 are already exposed so by pre-loading the LTO streamer caches.
2937 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2938 for (i = 0; i < itk_none; ++i)
2939 lto_register_canonical_types (integer_types[i], true);
2940 for (i = 0; i < stk_type_kind_last; ++i)
2941 lto_register_canonical_types (sizetype_tab[i], true);
2942 for (i = 0; i < TI_MAX; ++i)
2943 lto_register_canonical_types (global_trees[i], true);
2944 for (i = 0; i < itk_none; ++i)
2945 lto_register_canonical_types (integer_types[i], false);
2946 for (i = 0; i < stk_type_kind_last; ++i)
2947 lto_register_canonical_types (sizetype_tab[i], false);
2948 for (i = 0; i < TI_MAX; ++i)
2949 lto_register_canonical_types (global_trees[i], false);
2950
2951 if (!quiet_flag)
2952 fprintf (stderr, "Reading object files:");
2953
2954 /* Read all of the object files specified on the command line. */
2955 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2956 {
2957 struct lto_file_decl_data *file_data = NULL;
2958 if (!quiet_flag)
2959 {
2960 fprintf (stderr, " %s", fnames[i]);
2961 fflush (stderr);
2962 }
2963
2964 current_lto_file = lto_obj_file_open (fnames[i], false);
2965 if (!current_lto_file)
2966 break;
2967
2968 file_data = lto_file_read (current_lto_file, resolution, &count);
2969 if (!file_data)
2970 {
2971 lto_obj_file_close (current_lto_file);
2972 free (current_lto_file);
2973 current_lto_file = NULL;
2974 break;
2975 }
2976
2977 decl_data[last_file_ix++] = file_data;
2978
2979 lto_obj_file_close (current_lto_file);
2980 free (current_lto_file);
2981 current_lto_file = NULL;
2982 }
2983
2984 lto_flatten_files (decl_data, count, last_file_ix);
2985 lto_stats.num_input_files = count;
2986 ggc_free(decl_data);
2987 real_file_decl_data = NULL;
2988
2989 if (resolution_file_name)
2990 fclose (resolution);
2991
2992 /* Show the LTO report before launching LTRANS. */
2993 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2994 print_lto_report_1 ();
2995
2996 /* Free gimple type merging datastructures. */
2997 tree_scc_hash.dispose ();
2998 obstack_free (&tree_scc_hash_obstack, NULL);
2999 htab_delete (gimple_canonical_types);
3000 gimple_canonical_types = NULL;
3001 delete canonical_type_hash_cache;
3002 canonical_type_hash_cache = NULL;
3003 ggc_collect ();
3004
3005 /* Set the hooks so that all of the ipa passes can read in their data. */
3006 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3007
3008 timevar_pop (TV_IPA_LTO_DECL_IN);
3009
3010 if (!quiet_flag)
3011 fprintf (stderr, "\nReading the callgraph\n");
3012
3013 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3014 /* Read the symtab. */
3015 input_symtab ();
3016
3017 /* Store resolutions into the symbol table. */
3018
3019 FOR_EACH_SYMBOL (snode)
3020 if (symtab_real_symbol_p (snode)
3021 && snode->lto_file_data
3022 && snode->lto_file_data->resolution_map
3023 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3024 snode->decl)))
3025 snode->resolution
3026 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3027 for (i = 0; all_file_decl_data[i]; i++)
3028 if (all_file_decl_data[i]->resolution_map)
3029 {
3030 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3031 all_file_decl_data[i]->resolution_map = NULL;
3032 }
3033
3034 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3035
3036 if (!quiet_flag)
3037 fprintf (stderr, "Merging declarations\n");
3038
3039 timevar_push (TV_IPA_LTO_DECL_MERGE);
3040 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3041 need to care about resolving symbols again, we only need to replace
3042 duplicated declarations read from the callgraph and from function
3043 sections. */
3044 if (!flag_ltrans)
3045 {
3046 lto_symtab_merge_decls ();
3047
3048 /* If there were errors during symbol merging bail out, we have no
3049 good way to recover here. */
3050 if (seen_error ())
3051 fatal_error ("errors during merging of translation units");
3052
3053 /* Fixup all decls. */
3054 lto_fixup_decls (all_file_decl_data);
3055 }
3056 if (tree_with_vars)
3057 ggc_free (tree_with_vars);
3058 tree_with_vars = NULL;
3059 ggc_collect ();
3060
3061 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3062 /* Each pass will set the appropriate timer. */
3063
3064 if (!quiet_flag)
3065 fprintf (stderr, "Reading summaries\n");
3066
3067 /* Read the IPA summary data. */
3068 if (flag_ltrans)
3069 ipa_read_optimization_summaries ();
3070 else
3071 ipa_read_summaries ();
3072
3073 for (i = 0; all_file_decl_data[i]; i++)
3074 {
3075 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3076 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3077 all_file_decl_data[i]->symtab_node_encoder = NULL;
3078 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3079 all_file_decl_data[i]->global_decl_state = NULL;
3080 all_file_decl_data[i]->current_decl_state = NULL;
3081 }
3082
3083 /* Finally merge the cgraph according to the decl merging decisions. */
3084 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3085 if (cgraph_dump_file)
3086 {
3087 fprintf (cgraph_dump_file, "Before merging:\n");
3088 dump_symtab (cgraph_dump_file);
3089 }
3090 lto_symtab_merge_symbols ();
3091 ggc_collect ();
3092 cgraph_state = CGRAPH_STATE_IPA_SSA;
3093
3094 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3095
3096 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3097
3098 /* Indicate that the cgraph is built and ready. */
3099 cgraph_function_flags_ready = true;
3100
3101 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3102 ggc_free (all_file_decl_data);
3103 all_file_decl_data = NULL;
3104 }
3105
3106
3107 /* Materialize all the bodies for all the nodes in the callgraph. */
3108
3109 static void
3110 materialize_cgraph (void)
3111 {
3112 struct cgraph_node *node;
3113 timevar_id_t lto_timer;
3114
3115 if (!quiet_flag)
3116 fprintf (stderr,
3117 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3118
3119 /* Now that we have input the cgraph, we need to clear all of the aux
3120 nodes and read the functions if we are not running in WPA mode. */
3121 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3122
3123 FOR_EACH_FUNCTION (node)
3124 {
3125 if (node->lto_file_data)
3126 {
3127 lto_materialize_function (node);
3128 lto_stats.num_input_cgraph_nodes++;
3129 }
3130 }
3131
3132 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3133
3134 /* Start the appropriate timer depending on the mode that we are
3135 operating in. */
3136 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3137 : (flag_ltrans) ? TV_WHOPR_LTRANS
3138 : TV_LTO;
3139 timevar_push (lto_timer);
3140
3141 current_function_decl = NULL;
3142 set_cfun (NULL);
3143
3144 if (!quiet_flag)
3145 fprintf (stderr, "\n");
3146
3147 timevar_pop (lto_timer);
3148 }
3149
3150
3151 /* Show various memory usage statistics related to LTO. */
3152 static void
3153 print_lto_report_1 (void)
3154 {
3155 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3156 fprintf (stderr, "%s statistics\n", pfx);
3157
3158 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3159 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3160 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3161 if (flag_wpa && tree_scc_hash.is_created ())
3162 {
3163 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3164 "collision ratio: %f\n", pfx,
3165 (long) tree_scc_hash.size (),
3166 (long) tree_scc_hash.elements (),
3167 tree_scc_hash.collisions ());
3168 hash_table<tree_scc_hasher>::iterator hiter;
3169 tree_scc *scc, *max_scc = NULL;
3170 unsigned max_length = 0;
3171 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3172 {
3173 unsigned length = 0;
3174 tree_scc *s = scc;
3175 for (; s; s = s->next)
3176 length++;
3177 if (length > max_length)
3178 {
3179 max_length = length;
3180 max_scc = scc;
3181 }
3182 }
3183 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3184 pfx, max_length, max_scc->len);
3185 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3186 num_scc_compares, num_scc_compare_collisions,
3187 num_scc_compare_collisions / (double) num_scc_compares);
3188 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3189 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3190 total_scc_size_merged);
3191 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3192 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3193 pfx, num_prevailing_types, num_type_scc_trees);
3194 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3195 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3196 (long) htab_size (gimple_canonical_types),
3197 (long) htab_elements (gimple_canonical_types),
3198 (long) gimple_canonical_types->searches,
3199 (long) gimple_canonical_types->collisions,
3200 htab_collisions (gimple_canonical_types));
3201 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3202 "%lu elements, %ld searches\n", pfx,
3203 num_canonical_type_hash_entries,
3204 num_canonical_type_hash_queries);
3205 }
3206
3207 print_lto_report (pfx);
3208 }
3209
3210 /* Perform whole program analysis (WPA) on the callgraph and write out the
3211 optimization plan. */
3212
3213 static void
3214 do_whole_program_analysis (void)
3215 {
3216 symtab_node *node;
3217
3218 lto_parallelism = 1;
3219
3220 /* TODO: jobserver communicatoin is not supported, yet. */
3221 if (!strcmp (flag_wpa, "jobserver"))
3222 lto_parallelism = -1;
3223 else
3224 {
3225 lto_parallelism = atoi (flag_wpa);
3226 if (lto_parallelism <= 0)
3227 lto_parallelism = 0;
3228 }
3229
3230 timevar_start (TV_PHASE_OPT_GEN);
3231
3232 /* Note that since we are in WPA mode, materialize_cgraph will not
3233 actually read in all the function bodies. It only materializes
3234 the decls and cgraph nodes so that analysis can be performed. */
3235 materialize_cgraph ();
3236
3237 /* Reading in the cgraph uses different timers, start timing WPA now. */
3238 timevar_push (TV_WHOPR_WPA);
3239
3240 if (pre_ipa_mem_report)
3241 {
3242 fprintf (stderr, "Memory consumption before IPA\n");
3243 dump_memory_report (false);
3244 }
3245
3246 cgraph_function_flags_ready = true;
3247
3248 if (cgraph_dump_file)
3249 dump_symtab (cgraph_dump_file);
3250 bitmap_obstack_initialize (NULL);
3251 cgraph_state = CGRAPH_STATE_IPA_SSA;
3252
3253 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3254 symtab_remove_unreachable_nodes (false, dump_file);
3255
3256 if (cgraph_dump_file)
3257 {
3258 fprintf (cgraph_dump_file, "Optimized ");
3259 dump_symtab (cgraph_dump_file);
3260 }
3261 #ifdef ENABLE_CHECKING
3262 verify_cgraph ();
3263 #endif
3264 bitmap_obstack_release (NULL);
3265
3266 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3267 timevar_pop (TV_WHOPR_WPA);
3268
3269 timevar_push (TV_WHOPR_PARTITIONING);
3270 if (flag_lto_partition_1to1)
3271 lto_1_to_1_map ();
3272 else if (flag_lto_partition_max)
3273 lto_max_map ();
3274 else
3275 lto_balanced_map ();
3276
3277 /* Inline summaries are needed for balanced partitioning. Free them now so
3278 the memory can be used for streamer caches. */
3279 inline_free_summary ();
3280
3281 /* AUX pointers are used by partitioning code to bookkeep number of
3282 partitions symbol is in. This is no longer needed. */
3283 FOR_EACH_SYMBOL (node)
3284 node->aux = NULL;
3285
3286 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3287
3288 /* Find out statics that need to be promoted
3289 to globals with hidden visibility because they are accessed from multiple
3290 partitions. */
3291 lto_promote_cross_file_statics ();
3292 timevar_pop (TV_WHOPR_PARTITIONING);
3293
3294 timevar_stop (TV_PHASE_OPT_GEN);
3295
3296 /* Collect a last time - in lto_wpa_write_files we may end up forking
3297 with the idea that this doesn't increase memory usage. So we
3298 absoultely do not want to collect after that. */
3299 ggc_collect ();
3300
3301 timevar_start (TV_PHASE_STREAM_OUT);
3302 if (!quiet_flag)
3303 {
3304 fprintf (stderr, "\nStreaming out");
3305 fflush (stderr);
3306 }
3307 lto_wpa_write_files ();
3308 if (!quiet_flag)
3309 fprintf (stderr, "\n");
3310 timevar_stop (TV_PHASE_STREAM_OUT);
3311
3312 if (post_ipa_mem_report)
3313 {
3314 fprintf (stderr, "Memory consumption after IPA\n");
3315 dump_memory_report (false);
3316 }
3317
3318 /* Show the LTO report before launching LTRANS. */
3319 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3320 print_lto_report_1 ();
3321 if (mem_report_wpa)
3322 dump_memory_report (true);
3323 }
3324
3325
3326 static GTY(()) tree lto_eh_personality_decl;
3327
3328 /* Return the LTO personality function decl. */
3329
3330 tree
3331 lto_eh_personality (void)
3332 {
3333 if (!lto_eh_personality_decl)
3334 {
3335 /* Use the first personality DECL for our personality if we don't
3336 support multiple ones. This ensures that we don't artificially
3337 create the need for them in a single-language program. */
3338 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3339 lto_eh_personality_decl = first_personality_decl;
3340 else
3341 lto_eh_personality_decl = lhd_gcc_personality ();
3342 }
3343
3344 return lto_eh_personality_decl;
3345 }
3346
3347 /* Set the process name based on the LTO mode. */
3348
3349 static void
3350 lto_process_name (void)
3351 {
3352 if (flag_lto)
3353 setproctitle ("lto1-lto");
3354 if (flag_wpa)
3355 setproctitle ("lto1-wpa");
3356 if (flag_ltrans)
3357 setproctitle ("lto1-ltrans");
3358 }
3359
3360
3361 /* Initialize the LTO front end. */
3362
3363 static void
3364 lto_init (void)
3365 {
3366 lto_process_name ();
3367 lto_streamer_hooks_init ();
3368 lto_reader_init ();
3369 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3370 memset (&lto_stats, 0, sizeof (lto_stats));
3371 bitmap_obstack_initialize (NULL);
3372 gimple_register_cfg_hooks ();
3373 }
3374
3375
3376 /* Main entry point for the GIMPLE front end. This front end has
3377 three main personalities:
3378
3379 - LTO (-flto). All the object files on the command line are
3380 loaded in memory and processed as a single translation unit.
3381 This is the traditional link-time optimization behavior.
3382
3383 - WPA (-fwpa). Only the callgraph and summary information for
3384 files in the command file are loaded. A single callgraph
3385 (without function bodies) is instantiated for the whole set of
3386 files. IPA passes are only allowed to analyze the call graph
3387 and make transformation decisions. The callgraph is
3388 partitioned, each partition is written to a new object file
3389 together with the transformation decisions.
3390
3391 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3392 summary files from running again. Since WPA computed summary
3393 information and decided what transformations to apply, LTRANS
3394 simply applies them. */
3395
3396 void
3397 lto_main (void)
3398 {
3399 /* LTO is called as a front end, even though it is not a front end.
3400 Because it is called as a front end, TV_PHASE_PARSING and
3401 TV_PARSE_GLOBAL are active, and we need to turn them off while
3402 doing LTO. Later we turn them back on so they are active up in
3403 toplev.c. */
3404 timevar_pop (TV_PARSE_GLOBAL);
3405 timevar_stop (TV_PHASE_PARSING);
3406
3407 timevar_start (TV_PHASE_SETUP);
3408
3409 /* Initialize the LTO front end. */
3410 lto_init ();
3411
3412 timevar_stop (TV_PHASE_SETUP);
3413 timevar_start (TV_PHASE_STREAM_IN);
3414
3415 /* Read all the symbols and call graph from all the files in the
3416 command line. */
3417 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3418
3419 timevar_stop (TV_PHASE_STREAM_IN);
3420
3421 if (!seen_error ())
3422 {
3423 /* If WPA is enabled analyze the whole call graph and create an
3424 optimization plan. Otherwise, read in all the function
3425 bodies and continue with optimization. */
3426 if (flag_wpa)
3427 do_whole_program_analysis ();
3428 else
3429 {
3430 timevar_start (TV_PHASE_OPT_GEN);
3431
3432 materialize_cgraph ();
3433 if (!flag_ltrans)
3434 lto_promote_statics_nonwpa ();
3435
3436 /* Let the middle end know that we have read and merged all of
3437 the input files. */
3438 compile ();
3439
3440 timevar_stop (TV_PHASE_OPT_GEN);
3441
3442 /* FIXME lto, if the processes spawned by WPA fail, we miss
3443 the chance to print WPA's report, so WPA will call
3444 print_lto_report before launching LTRANS. If LTRANS was
3445 launched directly by the driver we would not need to do
3446 this. */
3447 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3448 print_lto_report_1 ();
3449 }
3450 }
3451
3452 /* Here we make LTO pretend to be a parser. */
3453 timevar_start (TV_PHASE_PARSING);
3454 timevar_push (TV_PARSE_GLOBAL);
3455 }
3456
3457 #include "gt-lto-lto.h"