common.opt (lto_partition_model): New enum.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "tree-streamer.h"
47 #include "splay-tree.h"
48 #include "lto-partition.h"
49 #include "data-streamer.h"
50 #include "context.h"
51 #include "pass_manager.h"
52 #include "ipa-inline.h"
53 #include "params.h"
54
55
56 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
57 static int lto_parallelism;
58
59 static GTY(()) tree first_personality_decl;
60
61 /* Returns a hash code for P. */
62
63 static hashval_t
64 hash_name (const void *p)
65 {
66 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
67 return (hashval_t) htab_hash_string (ds->name);
68 }
69
70
71 /* Returns nonzero if P1 and P2 are equal. */
72
73 static int
74 eq_name (const void *p1, const void *p2)
75 {
76 const struct lto_section_slot *s1 =
77 (const struct lto_section_slot *) p1;
78 const struct lto_section_slot *s2 =
79 (const struct lto_section_slot *) p2;
80
81 return strcmp (s1->name, s2->name) == 0;
82 }
83
84 /* Free lto_section_slot */
85
86 static void
87 free_with_string (void *arg)
88 {
89 struct lto_section_slot *s = (struct lto_section_slot *)arg;
90
91 free (CONST_CAST (char *, s->name));
92 free (arg);
93 }
94
95 /* Create section hash table */
96
97 htab_t
98 lto_obj_create_section_hash_table (void)
99 {
100 return htab_create (37, hash_name, eq_name, free_with_string);
101 }
102
103 /* Delete an allocated integer KEY in the splay tree. */
104
105 static void
106 lto_splay_tree_delete_id (splay_tree_key key)
107 {
108 free ((void *) key);
109 }
110
111 /* Compare splay tree node ids A and B. */
112
113 static int
114 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
115 {
116 unsigned HOST_WIDE_INT ai;
117 unsigned HOST_WIDE_INT bi;
118
119 ai = *(unsigned HOST_WIDE_INT *) a;
120 bi = *(unsigned HOST_WIDE_INT *) b;
121
122 if (ai < bi)
123 return -1;
124 else if (ai > bi)
125 return 1;
126 return 0;
127 }
128
129 /* Look up splay tree node by ID in splay tree T. */
130
131 static splay_tree_node
132 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
133 {
134 return splay_tree_lookup (t, (splay_tree_key) &id);
135 }
136
137 /* Check if KEY has ID. */
138
139 static bool
140 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
141 {
142 return *(unsigned HOST_WIDE_INT *) key == id;
143 }
144
145 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
146 The ID is allocated separately because we need HOST_WIDE_INTs which may
147 be wider than a splay_tree_key. */
148
149 static void
150 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
151 struct lto_file_decl_data *file_data)
152 {
153 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
154 *idp = id;
155 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
156 }
157
158 /* Create a splay tree. */
159
160 static splay_tree
161 lto_splay_tree_new (void)
162 {
163 return splay_tree_new (lto_splay_tree_compare_ids,
164 lto_splay_tree_delete_id,
165 NULL);
166 }
167
168 /* Return true when NODE has a clone that is analyzed (i.e. we need
169 to load its body even if the node itself is not needed). */
170
171 static bool
172 has_analyzed_clone_p (struct cgraph_node *node)
173 {
174 struct cgraph_node *orig = node;
175 node = node->clones;
176 if (node)
177 while (node != orig)
178 {
179 if (node->analyzed)
180 return true;
181 if (node->clones)
182 node = node->clones;
183 else if (node->next_sibling_clone)
184 node = node->next_sibling_clone;
185 else
186 {
187 while (node != orig && !node->next_sibling_clone)
188 node = node->clone_of;
189 if (node != orig)
190 node = node->next_sibling_clone;
191 }
192 }
193 return false;
194 }
195
196 /* Read the function body for the function associated with NODE. */
197
198 static void
199 lto_materialize_function (struct cgraph_node *node)
200 {
201 tree decl;
202
203 decl = node->decl;
204 /* Read in functions with body (analyzed nodes)
205 and also functions that are needed to produce virtual clones. */
206 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
207 || node->used_as_abstract_origin
208 || has_analyzed_clone_p (node))
209 {
210 /* Clones don't need to be read. */
211 if (node->clone_of)
212 return;
213 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
214 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
215 }
216
217 /* Let the middle end know about the function. */
218 rest_of_decl_compilation (decl, 1, 0);
219 }
220
221
222 /* Decode the content of memory pointed to by DATA in the in decl
223 state object STATE. DATA_IN points to a data_in structure for
224 decoding. Return the address after the decoded object in the
225 input. */
226
227 static const uint32_t *
228 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
229 struct lto_in_decl_state *state)
230 {
231 uint32_t ix;
232 tree decl;
233 uint32_t i, j;
234
235 ix = *data++;
236 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
237 if (TREE_CODE (decl) != FUNCTION_DECL)
238 {
239 gcc_assert (decl == void_type_node);
240 decl = NULL_TREE;
241 }
242 state->fn_decl = decl;
243
244 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
245 {
246 uint32_t size = *data++;
247 tree *decls = ggc_alloc_vec_tree (size);
248
249 for (j = 0; j < size; j++)
250 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
251
252 state->streams[i].size = size;
253 state->streams[i].trees = decls;
254 data += size;
255 }
256
257 return data;
258 }
259
260
261 /* Global canonical type table. */
262 static htab_t gimple_canonical_types;
263 static pointer_map <hashval_t> *canonical_type_hash_cache;
264 static unsigned long num_canonical_type_hash_entries;
265 static unsigned long num_canonical_type_hash_queries;
266
267 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
268 static hashval_t gimple_canonical_type_hash (const void *p);
269 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
270
271 /* Returning a hash value for gimple type TYPE.
272
273 The hash value returned is equal for types considered compatible
274 by gimple_canonical_types_compatible_p. */
275
276 static hashval_t
277 hash_canonical_type (tree type)
278 {
279 hashval_t v;
280
281 /* Combine a few common features of types so that types are grouped into
282 smaller sets; when searching for existing matching types to merge,
283 only existing types having the same features as the new type will be
284 checked. */
285 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
286 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
287
288 /* Incorporate common features of numerical types. */
289 if (INTEGRAL_TYPE_P (type)
290 || SCALAR_FLOAT_TYPE_P (type)
291 || FIXED_POINT_TYPE_P (type)
292 || TREE_CODE (type) == OFFSET_TYPE
293 || POINTER_TYPE_P (type))
294 {
295 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
296 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
297 }
298
299 if (VECTOR_TYPE_P (type))
300 {
301 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
302 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
303 }
304
305 if (TREE_CODE (type) == COMPLEX_TYPE)
306 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
307
308 /* For pointer and reference types, fold in information about the type
309 pointed to but do not recurse to the pointed-to type. */
310 if (POINTER_TYPE_P (type))
311 {
312 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
313 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
314 }
315
316 /* For integer types hash only the string flag. */
317 if (TREE_CODE (type) == INTEGER_TYPE)
318 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
319
320 /* For array types hash the domain bounds and the string flag. */
321 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
322 {
323 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
324 /* OMP lowering can introduce error_mark_node in place of
325 random local decls in types. */
326 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
327 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
328 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
329 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
330 }
331
332 /* Recurse for aggregates with a single element type. */
333 if (TREE_CODE (type) == ARRAY_TYPE
334 || TREE_CODE (type) == COMPLEX_TYPE
335 || TREE_CODE (type) == VECTOR_TYPE)
336 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
337
338 /* Incorporate function return and argument types. */
339 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
340 {
341 unsigned na;
342 tree p;
343
344 /* For method types also incorporate their parent class. */
345 if (TREE_CODE (type) == METHOD_TYPE)
346 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
347
348 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
349
350 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
351 {
352 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
353 na++;
354 }
355
356 v = iterative_hash_hashval_t (na, v);
357 }
358
359 if (RECORD_OR_UNION_TYPE_P (type))
360 {
361 unsigned nf;
362 tree f;
363
364 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
365 if (TREE_CODE (f) == FIELD_DECL)
366 {
367 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
368 nf++;
369 }
370
371 v = iterative_hash_hashval_t (nf, v);
372 }
373
374 return v;
375 }
376
377 /* Returning a hash value for gimple type TYPE combined with VAL. */
378
379 static hashval_t
380 iterative_hash_canonical_type (tree type, hashval_t val)
381 {
382 hashval_t v;
383 /* An already processed type. */
384 if (TYPE_CANONICAL (type))
385 {
386 type = TYPE_CANONICAL (type);
387 v = gimple_canonical_type_hash (type);
388 }
389 else
390 {
391 /* Canonical types should not be able to form SCCs by design, this
392 recursion is just because we do not register canonical types in
393 optimal order. To avoid quadratic behavior also register the
394 type here. */
395 v = hash_canonical_type (type);
396 gimple_register_canonical_type_1 (type, v);
397 }
398 return iterative_hash_hashval_t (v, val);
399 }
400
401 /* Returns the hash for a canonical type P. */
402
403 static hashval_t
404 gimple_canonical_type_hash (const void *p)
405 {
406 num_canonical_type_hash_queries++;
407 hashval_t *slot
408 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
409 gcc_assert (slot != NULL);
410 return *slot;
411 }
412
413
414 /* The TYPE_CANONICAL merging machinery. It should closely resemble
415 the middle-end types_compatible_p function. It needs to avoid
416 claiming types are different for types that should be treated
417 the same with respect to TBAA. Canonical types are also used
418 for IL consistency checks via the useless_type_conversion_p
419 predicate which does not handle all type kinds itself but falls
420 back to pointer-comparison of TYPE_CANONICAL for aggregates
421 for example. */
422
423 /* Return true iff T1 and T2 are structurally identical for what
424 TBAA is concerned. */
425
426 static bool
427 gimple_canonical_types_compatible_p (tree t1, tree t2)
428 {
429 /* Before starting to set up the SCC machinery handle simple cases. */
430
431 /* Check first for the obvious case of pointer identity. */
432 if (t1 == t2)
433 return true;
434
435 /* Check that we have two types to compare. */
436 if (t1 == NULL_TREE || t2 == NULL_TREE)
437 return false;
438
439 /* If the types have been previously registered and found equal
440 they still are. */
441 if (TYPE_CANONICAL (t1)
442 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
443 return true;
444
445 /* Can't be the same type if the types don't have the same code. */
446 if (TREE_CODE (t1) != TREE_CODE (t2))
447 return false;
448
449 /* Qualifiers do not matter for canonical type comparison purposes. */
450
451 /* Void types and nullptr types are always the same. */
452 if (TREE_CODE (t1) == VOID_TYPE
453 || TREE_CODE (t1) == NULLPTR_TYPE)
454 return true;
455
456 /* Can't be the same type if they have different mode. */
457 if (TYPE_MODE (t1) != TYPE_MODE (t2))
458 return false;
459
460 /* Non-aggregate types can be handled cheaply. */
461 if (INTEGRAL_TYPE_P (t1)
462 || SCALAR_FLOAT_TYPE_P (t1)
463 || FIXED_POINT_TYPE_P (t1)
464 || TREE_CODE (t1) == VECTOR_TYPE
465 || TREE_CODE (t1) == COMPLEX_TYPE
466 || TREE_CODE (t1) == OFFSET_TYPE
467 || POINTER_TYPE_P (t1))
468 {
469 /* Can't be the same type if they have different sign or precision. */
470 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
471 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
472 return false;
473
474 if (TREE_CODE (t1) == INTEGER_TYPE
475 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
476 return false;
477
478 /* For canonical type comparisons we do not want to build SCCs
479 so we cannot compare pointed-to types. But we can, for now,
480 require the same pointed-to type kind and match what
481 useless_type_conversion_p would do. */
482 if (POINTER_TYPE_P (t1))
483 {
484 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
485 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
486 return false;
487
488 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
489 return false;
490 }
491
492 /* Tail-recurse to components. */
493 if (TREE_CODE (t1) == VECTOR_TYPE
494 || TREE_CODE (t1) == COMPLEX_TYPE)
495 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
496 TREE_TYPE (t2));
497
498 return true;
499 }
500
501 /* Do type-specific comparisons. */
502 switch (TREE_CODE (t1))
503 {
504 case ARRAY_TYPE:
505 /* Array types are the same if the element types are the same and
506 the number of elements are the same. */
507 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
508 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
509 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
510 return false;
511 else
512 {
513 tree i1 = TYPE_DOMAIN (t1);
514 tree i2 = TYPE_DOMAIN (t2);
515
516 /* For an incomplete external array, the type domain can be
517 NULL_TREE. Check this condition also. */
518 if (i1 == NULL_TREE && i2 == NULL_TREE)
519 return true;
520 else if (i1 == NULL_TREE || i2 == NULL_TREE)
521 return false;
522 else
523 {
524 tree min1 = TYPE_MIN_VALUE (i1);
525 tree min2 = TYPE_MIN_VALUE (i2);
526 tree max1 = TYPE_MAX_VALUE (i1);
527 tree max2 = TYPE_MAX_VALUE (i2);
528
529 /* The minimum/maximum values have to be the same. */
530 if ((min1 == min2
531 || (min1 && min2
532 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
533 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
534 || operand_equal_p (min1, min2, 0))))
535 && (max1 == max2
536 || (max1 && max2
537 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
538 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
539 || operand_equal_p (max1, max2, 0)))))
540 return true;
541 else
542 return false;
543 }
544 }
545
546 case METHOD_TYPE:
547 case FUNCTION_TYPE:
548 /* Function types are the same if the return type and arguments types
549 are the same. */
550 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
551 return false;
552
553 if (!comp_type_attributes (t1, t2))
554 return false;
555
556 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
557 return true;
558 else
559 {
560 tree parms1, parms2;
561
562 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
563 parms1 && parms2;
564 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
565 {
566 if (!gimple_canonical_types_compatible_p
567 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
568 return false;
569 }
570
571 if (parms1 || parms2)
572 return false;
573
574 return true;
575 }
576
577 case RECORD_TYPE:
578 case UNION_TYPE:
579 case QUAL_UNION_TYPE:
580 {
581 tree f1, f2;
582
583 /* For aggregate types, all the fields must be the same. */
584 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
585 f1 || f2;
586 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
587 {
588 /* Skip non-fields. */
589 while (f1 && TREE_CODE (f1) != FIELD_DECL)
590 f1 = TREE_CHAIN (f1);
591 while (f2 && TREE_CODE (f2) != FIELD_DECL)
592 f2 = TREE_CHAIN (f2);
593 if (!f1 || !f2)
594 break;
595 /* The fields must have the same name, offset and type. */
596 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
597 || !gimple_compare_field_offset (f1, f2)
598 || !gimple_canonical_types_compatible_p
599 (TREE_TYPE (f1), TREE_TYPE (f2)))
600 return false;
601 }
602
603 /* If one aggregate has more fields than the other, they
604 are not the same. */
605 if (f1 || f2)
606 return false;
607
608 return true;
609 }
610
611 default:
612 gcc_unreachable ();
613 }
614 }
615
616
617 /* Returns nonzero if P1 and P2 are equal. */
618
619 static int
620 gimple_canonical_type_eq (const void *p1, const void *p2)
621 {
622 const_tree t1 = (const_tree) p1;
623 const_tree t2 = (const_tree) p2;
624 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
625 CONST_CAST_TREE (t2));
626 }
627
628 /* Main worker for gimple_register_canonical_type. */
629
630 static void
631 gimple_register_canonical_type_1 (tree t, hashval_t hash)
632 {
633 void **slot;
634
635 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
636
637 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
638 if (*slot)
639 {
640 tree new_type = (tree)(*slot);
641 gcc_checking_assert (new_type != t);
642 TYPE_CANONICAL (t) = new_type;
643 }
644 else
645 {
646 TYPE_CANONICAL (t) = t;
647 *slot = (void *) t;
648 /* Cache the just computed hash value. */
649 num_canonical_type_hash_entries++;
650 bool existed_p;
651 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
652 gcc_assert (!existed_p);
653 *hslot = hash;
654 }
655 }
656
657 /* Register type T in the global type table gimple_types and set
658 TYPE_CANONICAL of T accordingly.
659 This is used by LTO to merge structurally equivalent types for
660 type-based aliasing purposes across different TUs and languages.
661
662 ??? This merging does not exactly match how the tree.c middle-end
663 functions will assign TYPE_CANONICAL when new types are created
664 during optimization (which at least happens for pointer and array
665 types). */
666
667 static void
668 gimple_register_canonical_type (tree t)
669 {
670 if (TYPE_CANONICAL (t))
671 return;
672
673 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
674 }
675
676 /* Re-compute TYPE_CANONICAL for NODE and related types. */
677
678 static void
679 lto_register_canonical_types (tree node, bool first_p)
680 {
681 if (!node
682 || !TYPE_P (node))
683 return;
684
685 if (first_p)
686 TYPE_CANONICAL (node) = NULL_TREE;
687
688 if (POINTER_TYPE_P (node)
689 || TREE_CODE (node) == COMPLEX_TYPE
690 || TREE_CODE (node) == ARRAY_TYPE)
691 lto_register_canonical_types (TREE_TYPE (node), first_p);
692
693 if (!first_p)
694 gimple_register_canonical_type (node);
695 }
696
697
698 /* Remember trees that contains references to declarations. */
699 static GTY(()) vec <tree, va_gc> *tree_with_vars;
700
701 #define CHECK_VAR(tt) \
702 do \
703 { \
704 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
705 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
706 return true; \
707 } while (0)
708
709 #define CHECK_NO_VAR(tt) \
710 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
711
712 /* Check presence of pointers to decls in fields of a tree_typed T. */
713
714 static inline bool
715 mentions_vars_p_typed (tree t)
716 {
717 CHECK_NO_VAR (TREE_TYPE (t));
718 return false;
719 }
720
721 /* Check presence of pointers to decls in fields of a tree_common T. */
722
723 static inline bool
724 mentions_vars_p_common (tree t)
725 {
726 if (mentions_vars_p_typed (t))
727 return true;
728 CHECK_NO_VAR (TREE_CHAIN (t));
729 return false;
730 }
731
732 /* Check presence of pointers to decls in fields of a decl_minimal T. */
733
734 static inline bool
735 mentions_vars_p_decl_minimal (tree t)
736 {
737 if (mentions_vars_p_common (t))
738 return true;
739 CHECK_NO_VAR (DECL_NAME (t));
740 CHECK_VAR (DECL_CONTEXT (t));
741 return false;
742 }
743
744 /* Check presence of pointers to decls in fields of a decl_common T. */
745
746 static inline bool
747 mentions_vars_p_decl_common (tree t)
748 {
749 if (mentions_vars_p_decl_minimal (t))
750 return true;
751 CHECK_VAR (DECL_SIZE (t));
752 CHECK_VAR (DECL_SIZE_UNIT (t));
753 CHECK_VAR (DECL_INITIAL (t));
754 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
755 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
756 return false;
757 }
758
759 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
760
761 static inline bool
762 mentions_vars_p_decl_with_vis (tree t)
763 {
764 if (mentions_vars_p_decl_common (t))
765 return true;
766
767 /* Accessor macro has side-effects, use field-name here. */
768 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
769 CHECK_NO_VAR (DECL_SECTION_NAME (t));
770 return false;
771 }
772
773 /* Check presence of pointers to decls in fields of a decl_non_common T. */
774
775 static inline bool
776 mentions_vars_p_decl_non_common (tree t)
777 {
778 if (mentions_vars_p_decl_with_vis (t))
779 return true;
780 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
781 CHECK_NO_VAR (DECL_RESULT_FLD (t));
782 CHECK_NO_VAR (DECL_VINDEX (t));
783 return false;
784 }
785
786 /* Check presence of pointers to decls in fields of a decl_non_common T. */
787
788 static bool
789 mentions_vars_p_function (tree t)
790 {
791 if (mentions_vars_p_decl_non_common (t))
792 return true;
793 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
794 return false;
795 }
796
797 /* Check presence of pointers to decls in fields of a field_decl T. */
798
799 static bool
800 mentions_vars_p_field_decl (tree t)
801 {
802 if (mentions_vars_p_decl_common (t))
803 return true;
804 CHECK_VAR (DECL_FIELD_OFFSET (t));
805 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
806 CHECK_NO_VAR (DECL_QUALIFIER (t));
807 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
808 CHECK_NO_VAR (DECL_FCONTEXT (t));
809 return false;
810 }
811
812 /* Check presence of pointers to decls in fields of a type T. */
813
814 static bool
815 mentions_vars_p_type (tree t)
816 {
817 if (mentions_vars_p_common (t))
818 return true;
819 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
820 CHECK_VAR (TYPE_SIZE (t));
821 CHECK_VAR (TYPE_SIZE_UNIT (t));
822 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
823 CHECK_NO_VAR (TYPE_NAME (t));
824
825 CHECK_VAR (TYPE_MINVAL (t));
826 CHECK_VAR (TYPE_MAXVAL (t));
827
828 /* Accessor is for derived node types only. */
829 CHECK_NO_VAR (t->type_non_common.binfo);
830
831 CHECK_VAR (TYPE_CONTEXT (t));
832 CHECK_NO_VAR (TYPE_CANONICAL (t));
833 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
834 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
835 return false;
836 }
837
838 /* Check presence of pointers to decls in fields of a BINFO T. */
839
840 static bool
841 mentions_vars_p_binfo (tree t)
842 {
843 unsigned HOST_WIDE_INT i, n;
844
845 if (mentions_vars_p_common (t))
846 return true;
847 CHECK_VAR (BINFO_VTABLE (t));
848 CHECK_NO_VAR (BINFO_OFFSET (t));
849 CHECK_NO_VAR (BINFO_VIRTUALS (t));
850 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
851 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
852 for (i = 0; i < n; i++)
853 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
854 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
855 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
856 n = BINFO_N_BASE_BINFOS (t);
857 for (i = 0; i < n; i++)
858 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
859 return false;
860 }
861
862 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
863
864 static bool
865 mentions_vars_p_constructor (tree t)
866 {
867 unsigned HOST_WIDE_INT idx;
868 constructor_elt *ce;
869
870 if (mentions_vars_p_typed (t))
871 return true;
872
873 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
874 {
875 CHECK_NO_VAR (ce->index);
876 CHECK_VAR (ce->value);
877 }
878 return false;
879 }
880
881 /* Check presence of pointers to decls in fields of an expression tree T. */
882
883 static bool
884 mentions_vars_p_expr (tree t)
885 {
886 int i;
887 if (mentions_vars_p_typed (t))
888 return true;
889 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
890 CHECK_VAR (TREE_OPERAND (t, i));
891 return false;
892 }
893
894 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
895
896 static bool
897 mentions_vars_p_omp_clause (tree t)
898 {
899 int i;
900 if (mentions_vars_p_common (t))
901 return true;
902 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
903 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
904 return false;
905 }
906
907 /* Check presence of pointers to decls that needs later fixup in T. */
908
909 static bool
910 mentions_vars_p (tree t)
911 {
912 switch (TREE_CODE (t))
913 {
914 case IDENTIFIER_NODE:
915 break;
916
917 case TREE_LIST:
918 CHECK_VAR (TREE_VALUE (t));
919 CHECK_VAR (TREE_PURPOSE (t));
920 CHECK_NO_VAR (TREE_CHAIN (t));
921 break;
922
923 case FIELD_DECL:
924 return mentions_vars_p_field_decl (t);
925
926 case LABEL_DECL:
927 case CONST_DECL:
928 case PARM_DECL:
929 case RESULT_DECL:
930 case IMPORTED_DECL:
931 case NAMESPACE_DECL:
932 case NAMELIST_DECL:
933 return mentions_vars_p_decl_common (t);
934
935 case VAR_DECL:
936 return mentions_vars_p_decl_with_vis (t);
937
938 case TYPE_DECL:
939 return mentions_vars_p_decl_non_common (t);
940
941 case FUNCTION_DECL:
942 return mentions_vars_p_function (t);
943
944 case TREE_BINFO:
945 return mentions_vars_p_binfo (t);
946
947 case PLACEHOLDER_EXPR:
948 return mentions_vars_p_common (t);
949
950 case BLOCK:
951 case TRANSLATION_UNIT_DECL:
952 case OPTIMIZATION_NODE:
953 case TARGET_OPTION_NODE:
954 break;
955
956 case CONSTRUCTOR:
957 return mentions_vars_p_constructor (t);
958
959 case OMP_CLAUSE:
960 return mentions_vars_p_omp_clause (t);
961
962 default:
963 if (TYPE_P (t))
964 {
965 if (mentions_vars_p_type (t))
966 return true;
967 }
968 else if (EXPR_P (t))
969 {
970 if (mentions_vars_p_expr (t))
971 return true;
972 }
973 else if (CONSTANT_CLASS_P (t))
974 CHECK_NO_VAR (TREE_TYPE (t));
975 else
976 gcc_unreachable ();
977 }
978 return false;
979 }
980
981
982 /* Return the resolution for the decl with index INDEX from DATA_IN. */
983
984 static enum ld_plugin_symbol_resolution
985 get_resolution (struct data_in *data_in, unsigned index)
986 {
987 if (data_in->globals_resolution.exists ())
988 {
989 ld_plugin_symbol_resolution_t ret;
990 /* We can have references to not emitted functions in
991 DECL_FUNCTION_PERSONALITY at least. So we can and have
992 to indeed return LDPR_UNKNOWN in some cases. */
993 if (data_in->globals_resolution.length () <= index)
994 return LDPR_UNKNOWN;
995 ret = data_in->globals_resolution[index];
996 return ret;
997 }
998 else
999 /* Delay resolution finding until decl merging. */
1000 return LDPR_UNKNOWN;
1001 }
1002
1003 /* We need to record resolutions until symbol table is read. */
1004 static void
1005 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1006 enum ld_plugin_symbol_resolution resolution)
1007 {
1008 if (resolution == LDPR_UNKNOWN)
1009 return;
1010 if (!file_data->resolution_map)
1011 file_data->resolution_map = pointer_map_create ();
1012 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1013 }
1014
1015 /* Register DECL with the global symbol table and change its
1016 name if necessary to avoid name clashes for static globals across
1017 different files. */
1018
1019 static void
1020 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1021 unsigned ix)
1022 {
1023 tree context;
1024
1025 /* Variable has file scope, not local. */
1026 if (!TREE_PUBLIC (decl)
1027 && !((context = decl_function_context (decl))
1028 && auto_var_in_fn_p (decl, context)))
1029 rest_of_decl_compilation (decl, 1, 0);
1030
1031 /* If this variable has already been declared, queue the
1032 declaration for merging. */
1033 if (TREE_PUBLIC (decl))
1034 register_resolution (data_in->file_data,
1035 decl, get_resolution (data_in, ix));
1036 }
1037
1038
1039 /* Register DECL with the global symbol table and change its
1040 name if necessary to avoid name clashes for static globals across
1041 different files. DATA_IN contains descriptors and tables for the
1042 file being read. */
1043
1044 static void
1045 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1046 unsigned ix)
1047 {
1048 /* If this variable has already been declared, queue the
1049 declaration for merging. */
1050 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1051 register_resolution (data_in->file_data,
1052 decl, get_resolution (data_in, ix));
1053 }
1054
1055
1056 /* For the type T re-materialize it in the type variant list and
1057 the pointer/reference-to chains. */
1058
1059 static void
1060 lto_fixup_prevailing_type (tree t)
1061 {
1062 /* The following re-creates proper variant lists while fixing up
1063 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1064 variant list state before fixup is broken. */
1065
1066 /* If we are not our own variant leader link us into our new leaders
1067 variant list. */
1068 if (TYPE_MAIN_VARIANT (t) != t)
1069 {
1070 tree mv = TYPE_MAIN_VARIANT (t);
1071 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1072 TYPE_NEXT_VARIANT (mv) = t;
1073 }
1074
1075 /* The following reconstructs the pointer chains
1076 of the new pointed-to type if we are a main variant. We do
1077 not stream those so they are broken before fixup. */
1078 if (TREE_CODE (t) == POINTER_TYPE
1079 && TYPE_MAIN_VARIANT (t) == t)
1080 {
1081 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1082 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1083 }
1084 else if (TREE_CODE (t) == REFERENCE_TYPE
1085 && TYPE_MAIN_VARIANT (t) == t)
1086 {
1087 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1088 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1089 }
1090 }
1091
1092
1093 /* We keep prevailing tree SCCs in a hashtable with manual collision
1094 handling (in case all hashes compare the same) and keep the colliding
1095 entries in the tree_scc->next chain. */
1096
1097 struct tree_scc
1098 {
1099 tree_scc *next;
1100 /* Hash of the whole SCC. */
1101 hashval_t hash;
1102 /* Number of trees in the SCC. */
1103 unsigned len;
1104 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1105 which share the same individual tree hash). */
1106 unsigned entry_len;
1107 /* The members of the SCC.
1108 We only need to remember the first entry node candidate for prevailing
1109 SCCs (but of course have access to all entries for SCCs we are
1110 processing).
1111 ??? For prevailing SCCs we really only need hash and the first
1112 entry candidate, but that's too awkward to implement. */
1113 tree entries[1];
1114 };
1115
1116 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1117 {
1118 typedef tree_scc value_type;
1119 typedef tree_scc compare_type;
1120 static inline hashval_t hash (const value_type *);
1121 static inline bool equal (const value_type *, const compare_type *);
1122 };
1123
1124 hashval_t
1125 tree_scc_hasher::hash (const value_type *scc)
1126 {
1127 return scc->hash;
1128 }
1129
1130 bool
1131 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1132 {
1133 if (scc1->hash != scc2->hash
1134 || scc1->len != scc2->len
1135 || scc1->entry_len != scc2->entry_len)
1136 return false;
1137 return true;
1138 }
1139
1140 static hash_table <tree_scc_hasher> tree_scc_hash;
1141 static struct obstack tree_scc_hash_obstack;
1142
1143 static unsigned long num_merged_types;
1144 static unsigned long num_prevailing_types;
1145 static unsigned long num_type_scc_trees;
1146 static unsigned long total_scc_size;
1147 static unsigned long num_sccs_read;
1148 static unsigned long total_scc_size_merged;
1149 static unsigned long num_sccs_merged;
1150 static unsigned long num_scc_compares;
1151 static unsigned long num_scc_compare_collisions;
1152
1153
1154 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1155 recursing through in-SCC tree edges. Returns true if the SCCs entered
1156 through T1 and T2 are equal and fills in *MAP with the pairs of
1157 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1158
1159 static bool
1160 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1161 {
1162 enum tree_code code;
1163
1164 /* Mark already visited nodes. */
1165 TREE_ASM_WRITTEN (t2) = 1;
1166
1167 /* Push the pair onto map. */
1168 (*map)[0] = t1;
1169 (*map)[1] = t2;
1170 *map = *map + 2;
1171
1172 /* Compare value-fields. */
1173 #define compare_values(X) \
1174 do { \
1175 if (X(t1) != X(t2)) \
1176 return false; \
1177 } while (0)
1178
1179 compare_values (TREE_CODE);
1180 code = TREE_CODE (t1);
1181
1182 if (!TYPE_P (t1))
1183 {
1184 compare_values (TREE_SIDE_EFFECTS);
1185 compare_values (TREE_CONSTANT);
1186 compare_values (TREE_READONLY);
1187 compare_values (TREE_PUBLIC);
1188 }
1189 compare_values (TREE_ADDRESSABLE);
1190 compare_values (TREE_THIS_VOLATILE);
1191 if (DECL_P (t1))
1192 compare_values (DECL_UNSIGNED);
1193 else if (TYPE_P (t1))
1194 compare_values (TYPE_UNSIGNED);
1195 if (TYPE_P (t1))
1196 compare_values (TYPE_ARTIFICIAL);
1197 else
1198 compare_values (TREE_NO_WARNING);
1199 compare_values (TREE_NOTHROW);
1200 compare_values (TREE_STATIC);
1201 if (code != TREE_BINFO)
1202 compare_values (TREE_PRIVATE);
1203 compare_values (TREE_PROTECTED);
1204 compare_values (TREE_DEPRECATED);
1205 if (TYPE_P (t1))
1206 {
1207 compare_values (TYPE_SATURATING);
1208 compare_values (TYPE_ADDR_SPACE);
1209 }
1210 else if (code == SSA_NAME)
1211 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1214 {
1215 compare_values (TREE_INT_CST_LOW);
1216 compare_values (TREE_INT_CST_HIGH);
1217 }
1218
1219 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1220 {
1221 /* ??? No suitable compare routine available. */
1222 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1223 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1224 if (r1.cl != r2.cl
1225 || r1.decimal != r2.decimal
1226 || r1.sign != r2.sign
1227 || r1.signalling != r2.signalling
1228 || r1.canonical != r2.canonical
1229 || r1.uexp != r2.uexp)
1230 return false;
1231 for (unsigned i = 0; i < SIGSZ; ++i)
1232 if (r1.sig[i] != r2.sig[i])
1233 return false;
1234 }
1235
1236 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1237 if (!fixed_compare (EQ_EXPR,
1238 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1239 return false;
1240
1241
1242 /* We don't want to compare locations, so there is nothing do compare
1243 for TS_DECL_MINIMAL. */
1244
1245 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1246 {
1247 compare_values (DECL_MODE);
1248 compare_values (DECL_NONLOCAL);
1249 compare_values (DECL_VIRTUAL_P);
1250 compare_values (DECL_IGNORED_P);
1251 compare_values (DECL_ABSTRACT);
1252 compare_values (DECL_ARTIFICIAL);
1253 compare_values (DECL_USER_ALIGN);
1254 compare_values (DECL_PRESERVE_P);
1255 compare_values (DECL_EXTERNAL);
1256 compare_values (DECL_GIMPLE_REG_P);
1257 compare_values (DECL_ALIGN);
1258 if (code == LABEL_DECL)
1259 {
1260 compare_values (EH_LANDING_PAD_NR);
1261 compare_values (LABEL_DECL_UID);
1262 }
1263 else if (code == FIELD_DECL)
1264 {
1265 compare_values (DECL_PACKED);
1266 compare_values (DECL_NONADDRESSABLE_P);
1267 compare_values (DECL_OFFSET_ALIGN);
1268 }
1269 else if (code == VAR_DECL)
1270 {
1271 compare_values (DECL_HAS_DEBUG_EXPR_P);
1272 compare_values (DECL_NONLOCAL_FRAME);
1273 }
1274 if (code == RESULT_DECL
1275 || code == PARM_DECL
1276 || code == VAR_DECL)
1277 {
1278 compare_values (DECL_BY_REFERENCE);
1279 if (code == VAR_DECL
1280 || code == PARM_DECL)
1281 compare_values (DECL_HAS_VALUE_EXPR_P);
1282 }
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1286 compare_values (DECL_REGISTER);
1287
1288 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1289 {
1290 compare_values (DECL_COMMON);
1291 compare_values (DECL_DLLIMPORT_P);
1292 compare_values (DECL_WEAK);
1293 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1294 compare_values (DECL_COMDAT);
1295 compare_values (DECL_VISIBILITY);
1296 compare_values (DECL_VISIBILITY_SPECIFIED);
1297 if (code == VAR_DECL)
1298 {
1299 compare_values (DECL_HARD_REGISTER);
1300 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1301 compare_values (DECL_IN_CONSTANT_POOL);
1302 compare_values (DECL_TLS_MODEL);
1303 }
1304 if (VAR_OR_FUNCTION_DECL_P (t1))
1305 compare_values (DECL_INIT_PRIORITY);
1306 }
1307
1308 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1309 {
1310 compare_values (DECL_BUILT_IN_CLASS);
1311 compare_values (DECL_STATIC_CONSTRUCTOR);
1312 compare_values (DECL_STATIC_DESTRUCTOR);
1313 compare_values (DECL_UNINLINABLE);
1314 compare_values (DECL_POSSIBLY_INLINED);
1315 compare_values (DECL_IS_NOVOPS);
1316 compare_values (DECL_IS_RETURNS_TWICE);
1317 compare_values (DECL_IS_MALLOC);
1318 compare_values (DECL_IS_OPERATOR_NEW);
1319 compare_values (DECL_DECLARED_INLINE_P);
1320 compare_values (DECL_STATIC_CHAIN);
1321 compare_values (DECL_NO_INLINE_WARNING_P);
1322 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1323 compare_values (DECL_NO_LIMIT_STACK);
1324 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1325 compare_values (DECL_PURE_P);
1326 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1327 compare_values (DECL_FINAL_P);
1328 compare_values (DECL_CXX_CONSTRUCTOR_P);
1329 compare_values (DECL_CXX_DESTRUCTOR_P);
1330 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1331 compare_values (DECL_FUNCTION_CODE);
1332 if (DECL_STATIC_DESTRUCTOR (t1))
1333 compare_values (DECL_FINI_PRIORITY);
1334 }
1335
1336 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1337 {
1338 compare_values (TYPE_MODE);
1339 compare_values (TYPE_STRING_FLAG);
1340 compare_values (TYPE_NO_FORCE_BLK);
1341 compare_values (TYPE_NEEDS_CONSTRUCTING);
1342 if (RECORD_OR_UNION_TYPE_P (t1))
1343 {
1344 compare_values (TYPE_TRANSPARENT_AGGR);
1345 compare_values (TYPE_FINAL_P);
1346 }
1347 else if (code == ARRAY_TYPE)
1348 compare_values (TYPE_NONALIASED_COMPONENT);
1349 compare_values (TYPE_PACKED);
1350 compare_values (TYPE_RESTRICT);
1351 compare_values (TYPE_USER_ALIGN);
1352 compare_values (TYPE_READONLY);
1353 compare_values (TYPE_PRECISION);
1354 compare_values (TYPE_ALIGN);
1355 compare_values (TYPE_ALIAS_SET);
1356 }
1357
1358 /* We don't want to compare locations, so there is nothing do compare
1359 for TS_EXP. */
1360
1361 /* BLOCKs are function local and we don't merge anything there, so
1362 simply refuse to merge. */
1363 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1364 return false;
1365
1366 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1367 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1368 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1369 return false;
1370
1371 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1372 gcc_unreachable ();
1373
1374 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1375 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1376 sizeof (struct cl_optimization)) != 0)
1377 return false;
1378
1379 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1380 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1381 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1382 return false;
1383
1384 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1385 compare_values (CONSTRUCTOR_NELTS);
1386
1387 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1388 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1389 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1390 IDENTIFIER_LENGTH (t1)) != 0)
1391 return false;
1392
1393 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1394 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1395 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1396 TREE_STRING_LENGTH (t1)) != 0)
1397 return false;
1398
1399 if (code == OMP_CLAUSE)
1400 {
1401 compare_values (OMP_CLAUSE_CODE);
1402 switch (OMP_CLAUSE_CODE (t1))
1403 {
1404 case OMP_CLAUSE_DEFAULT:
1405 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1406 break;
1407 case OMP_CLAUSE_SCHEDULE:
1408 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1409 break;
1410 case OMP_CLAUSE_DEPEND:
1411 compare_values (OMP_CLAUSE_DEPEND_KIND);
1412 break;
1413 case OMP_CLAUSE_MAP:
1414 compare_values (OMP_CLAUSE_MAP_KIND);
1415 break;
1416 case OMP_CLAUSE_PROC_BIND:
1417 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1418 break;
1419 case OMP_CLAUSE_REDUCTION:
1420 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1421 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1422 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1423 break;
1424 default:
1425 break;
1426 }
1427 }
1428
1429 #undef compare_values
1430
1431
1432 /* Compare pointer fields. */
1433
1434 /* Recurse. Search & Replaced from DFS_write_tree_body.
1435 Folding the early checks into the compare_tree_edges recursion
1436 macro makes debugging way quicker as you are able to break on
1437 compare_tree_sccs_1 and simply finish until a call returns false
1438 to spot the SCC members with the difference. */
1439 #define compare_tree_edges(E1, E2) \
1440 do { \
1441 tree t1_ = (E1), t2_ = (E2); \
1442 if (t1_ != t2_ \
1443 && (!t1_ || !t2_ \
1444 || !TREE_VISITED (t2_) \
1445 || (!TREE_ASM_WRITTEN (t2_) \
1446 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1447 return false; \
1448 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1449 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1450 } while (0)
1451
1452 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1453 {
1454 if (code != IDENTIFIER_NODE)
1455 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1456 }
1457
1458 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1459 {
1460 unsigned i;
1461 /* Note that the number of elements for EXPR has already been emitted
1462 in EXPR's header (see streamer_write_tree_header). */
1463 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1464 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1465 }
1466
1467 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1468 {
1469 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1470 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1471 }
1472
1473 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1474 {
1475 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1476 /* ??? Global decls from different TUs have non-matching
1477 TRANSLATION_UNIT_DECLs. Only consider a small set of
1478 decls equivalent, we should not end up merging others. */
1479 if ((code == TYPE_DECL
1480 || code == NAMESPACE_DECL
1481 || code == IMPORTED_DECL
1482 || code == CONST_DECL
1483 || (VAR_OR_FUNCTION_DECL_P (t1)
1484 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1485 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1486 ;
1487 else
1488 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1489 }
1490
1491 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1492 {
1493 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1494 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1495 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1496 if ((code == VAR_DECL
1497 || code == PARM_DECL)
1498 && DECL_HAS_VALUE_EXPR_P (t1))
1499 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1500 if (code == VAR_DECL
1501 && DECL_HAS_DEBUG_EXPR_P (t1))
1502 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1503 /* LTO specific edges. */
1504 if (code != FUNCTION_DECL
1505 && code != TRANSLATION_UNIT_DECL)
1506 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1507 }
1508
1509 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1510 {
1511 if (code == FUNCTION_DECL)
1512 {
1513 tree a1, a2;
1514 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1515 a1 || a2;
1516 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1517 compare_tree_edges (a1, a2);
1518 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1519 }
1520 else if (code == TYPE_DECL)
1521 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1522 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1523 }
1524
1525 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1526 {
1527 /* Make sure we don't inadvertently set the assembler name. */
1528 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1529 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1530 DECL_ASSEMBLER_NAME (t2));
1531 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
1532 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
1533 }
1534
1535 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1536 {
1537 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1538 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1539 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1540 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1541 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1542 DECL_FIELD_BIT_OFFSET (t2));
1543 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1544 }
1545
1546 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1547 {
1548 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1549 DECL_FUNCTION_PERSONALITY (t2));
1550 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1551 the attribute list instead. */
1552 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1553 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1554 }
1555
1556 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1557 {
1558 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1559 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1560 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1561 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1562 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1563 reconstructed during fixup. */
1564 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1565 during fixup. */
1566 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1567 /* ??? Global types from different TUs have non-matching
1568 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1569 equal. */
1570 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1571 ;
1572 else
1573 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1574 /* TYPE_CANONICAL is re-computed during type merging, so do not
1575 compare it here. */
1576 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1577 }
1578
1579 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1580 {
1581 if (code == ENUMERAL_TYPE)
1582 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1583 else if (code == ARRAY_TYPE)
1584 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1585 else if (RECORD_OR_UNION_TYPE_P (t1))
1586 {
1587 tree f1, f2;
1588 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1589 f1 || f2;
1590 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1591 compare_tree_edges (f1, f2);
1592 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1593 }
1594 else if (code == FUNCTION_TYPE
1595 || code == METHOD_TYPE)
1596 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1597 if (!POINTER_TYPE_P (t1))
1598 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1599 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1600 }
1601
1602 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1603 {
1604 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1605 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1606 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1607 }
1608
1609 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1610 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1611 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1612
1613 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1614 {
1615 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1616 compare_tree_edges (TREE_OPERAND (t1, i),
1617 TREE_OPERAND (t2, i));
1618
1619 /* BLOCKs are function local and we don't merge anything there. */
1620 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1621 return false;
1622 }
1623
1624 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1625 {
1626 unsigned i;
1627 tree t;
1628 /* Lengths have already been compared above. */
1629 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1630 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1631 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1632 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1633 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1634 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1635 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1636 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1637 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1638 }
1639
1640 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1641 {
1642 unsigned i;
1643 tree index, value;
1644 /* Lengths have already been compared above. */
1645 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1646 {
1647 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1648 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1649 }
1650 }
1651
1652 if (code == OMP_CLAUSE)
1653 {
1654 int i;
1655
1656 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1657 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1658 OMP_CLAUSE_OPERAND (t2, i));
1659 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1660 }
1661
1662 #undef compare_tree_edges
1663
1664 return true;
1665 }
1666
1667 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1668 out MAP if they are equal. */
1669
1670 static bool
1671 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1672 tree *map)
1673 {
1674 /* Assume SCC entry hashes are sorted after their cardinality. Which
1675 means we can simply take the first n-tuple of equal hashes
1676 (which is recorded as entry_len) and do n SCC entry candidate
1677 comparisons. */
1678 for (unsigned i = 0; i < pscc->entry_len; ++i)
1679 {
1680 tree *mapp = map;
1681 num_scc_compare_collisions++;
1682 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1683 {
1684 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1685 on the scc as all trees will be freed. */
1686 return true;
1687 }
1688 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1689 the SCC prevails. */
1690 for (unsigned j = 0; j < scc->len; ++j)
1691 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1692 }
1693
1694 return false;
1695 }
1696
1697 /* QSort sort function to sort a map of two pointers after the 2nd
1698 pointer. */
1699
1700 static int
1701 cmp_tree (const void *p1_, const void *p2_)
1702 {
1703 tree *p1 = (tree *)(const_cast<void *>(p1_));
1704 tree *p2 = (tree *)(const_cast<void *>(p2_));
1705 if (p1[1] == p2[1])
1706 return 0;
1707 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1708 }
1709
1710 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1711 hash value SCC_HASH with an already recorded SCC. Return true if
1712 that was successful, otherwise return false. */
1713
1714 static bool
1715 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1716 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1717 {
1718 bool unified_p = false;
1719 tree_scc *scc
1720 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1721 scc->next = NULL;
1722 scc->hash = scc_hash;
1723 scc->len = len;
1724 scc->entry_len = scc_entry_len;
1725 for (unsigned i = 0; i < len; ++i)
1726 {
1727 tree t = streamer_tree_cache_get_tree (cache, from + i);
1728 scc->entries[i] = t;
1729 /* Do not merge SCCs with local entities inside them. Also do
1730 not merge TRANSLATION_UNIT_DECLs. */
1731 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1732 || (VAR_OR_FUNCTION_DECL_P (t)
1733 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1734 || TREE_CODE (t) == LABEL_DECL)
1735 {
1736 /* Avoid doing any work for these cases and do not worry to
1737 record the SCCs for further merging. */
1738 return false;
1739 }
1740 }
1741
1742 /* Look for the list of candidate SCCs to compare against. */
1743 tree_scc **slot;
1744 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1745 if (*slot)
1746 {
1747 /* Try unifying against each candidate. */
1748 num_scc_compares++;
1749
1750 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1751 outside of the scc when following tree edges. Make sure
1752 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1753 to track whether we visited the SCC member during the compare.
1754 We cannot use TREE_VISITED on the pscc members as the extended
1755 scc and pscc can overlap. */
1756 for (unsigned i = 0; i < scc->len; ++i)
1757 {
1758 TREE_VISITED (scc->entries[i]) = 1;
1759 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1760 }
1761
1762 tree *map = XALLOCAVEC (tree, 2 * len);
1763 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1764 {
1765 if (!compare_tree_sccs (pscc, scc, map))
1766 continue;
1767
1768 /* Found an equal SCC. */
1769 unified_p = true;
1770 num_scc_compare_collisions--;
1771 num_sccs_merged++;
1772 total_scc_size_merged += len;
1773
1774 #ifdef ENABLE_CHECKING
1775 for (unsigned i = 0; i < len; ++i)
1776 {
1777 tree t = map[2*i+1];
1778 enum tree_code code = TREE_CODE (t);
1779 /* IDENTIFIER_NODEs should be singletons and are merged by the
1780 streamer. The others should be singletons, too, and we
1781 should not merge them in any way. */
1782 gcc_assert (code != TRANSLATION_UNIT_DECL
1783 && code != IDENTIFIER_NODE
1784 && !streamer_handle_as_builtin_p (t));
1785 }
1786 #endif
1787
1788 /* Fixup the streamer cache with the prevailing nodes according
1789 to the tree node mapping computed by compare_tree_sccs. */
1790 if (len == 1)
1791 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1792 else
1793 {
1794 tree *map2 = XALLOCAVEC (tree, 2 * len);
1795 for (unsigned i = 0; i < len; ++i)
1796 {
1797 map2[i*2] = (tree)(uintptr_t)(from + i);
1798 map2[i*2+1] = scc->entries[i];
1799 }
1800 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1801 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1802 for (unsigned i = 0; i < len; ++i)
1803 streamer_tree_cache_replace_tree (cache, map[2*i],
1804 (uintptr_t)map2[2*i]);
1805 }
1806
1807 /* Free the tree nodes from the read SCC. */
1808 for (unsigned i = 0; i < len; ++i)
1809 {
1810 enum tree_code code;
1811 if (TYPE_P (scc->entries[i]))
1812 num_merged_types++;
1813 code = TREE_CODE (scc->entries[i]);
1814 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1815 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1816 ggc_free (scc->entries[i]);
1817 }
1818
1819 break;
1820 }
1821
1822 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1823 if (!unified_p)
1824 for (unsigned i = 0; i < scc->len; ++i)
1825 TREE_VISITED (scc->entries[i]) = 0;
1826 }
1827
1828 /* If we didn't unify it to any candidate duplicate the relevant
1829 pieces to permanent storage and link it into the chain. */
1830 if (!unified_p)
1831 {
1832 tree_scc *pscc
1833 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1834 memcpy (pscc, scc, sizeof (tree_scc));
1835 pscc->next = (*slot);
1836 *slot = pscc;
1837 }
1838 return unified_p;
1839 }
1840
1841
1842 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1843 RESOLUTIONS is the set of symbols picked by the linker (read from the
1844 resolution file when the linker plugin is being used). */
1845
1846 static void
1847 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1848 vec<ld_plugin_symbol_resolution_t> resolutions)
1849 {
1850 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1851 const int decl_offset = sizeof (struct lto_decl_header);
1852 const int main_offset = decl_offset + header->decl_state_size;
1853 const int string_offset = main_offset + header->main_size;
1854 struct lto_input_block ib_main;
1855 struct data_in *data_in;
1856 unsigned int i;
1857 const uint32_t *data_ptr, *data_end;
1858 uint32_t num_decl_states;
1859
1860 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1861 header->main_size);
1862
1863 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1864 header->string_size, resolutions);
1865
1866 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1867 internal types that should not be merged. */
1868
1869 /* Read the global declarations and types. */
1870 while (ib_main.p < ib_main.len)
1871 {
1872 tree t;
1873 unsigned from = data_in->reader_cache->nodes.length ();
1874 /* Read and uniquify SCCs as in the input stream. */
1875 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1876 if (tag == LTO_tree_scc)
1877 {
1878 unsigned len_;
1879 unsigned scc_entry_len;
1880 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1881 &scc_entry_len);
1882 unsigned len = data_in->reader_cache->nodes.length () - from;
1883 gcc_assert (len == len_);
1884
1885 total_scc_size += len;
1886 num_sccs_read++;
1887
1888 /* We have the special case of size-1 SCCs that are pre-merged
1889 by means of identifier and string sharing for example.
1890 ??? Maybe we should avoid streaming those as SCCs. */
1891 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1892 from);
1893 if (len == 1
1894 && (TREE_CODE (first) == IDENTIFIER_NODE
1895 || TREE_CODE (first) == INTEGER_CST
1896 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1897 || streamer_handle_as_builtin_p (first)))
1898 continue;
1899
1900 /* Try to unify the SCC with already existing ones. */
1901 if (!flag_ltrans
1902 && unify_scc (data_in->reader_cache, from,
1903 len, scc_entry_len, scc_hash))
1904 continue;
1905
1906 /* Do remaining fixup tasks for prevailing nodes. */
1907 bool seen_type = false;
1908 for (unsigned i = 0; i < len; ++i)
1909 {
1910 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1911 from + i);
1912 /* Reconstruct the type variant and pointer-to/reference-to
1913 chains. */
1914 if (TYPE_P (t))
1915 {
1916 seen_type = true;
1917 num_prevailing_types++;
1918 lto_fixup_prevailing_type (t);
1919 }
1920 /* Compute the canonical type of all types.
1921 ??? Should be able to assert that !TYPE_CANONICAL. */
1922 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1923 gimple_register_canonical_type (t);
1924 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1925 type which is also member of this SCC. */
1926 if (TREE_CODE (t) == INTEGER_CST
1927 && !TREE_OVERFLOW (t))
1928 cache_integer_cst (t);
1929 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1930 for both WPA and LTRANS stage. */
1931 if (TREE_CODE (t) == FUNCTION_DECL)
1932 {
1933 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1934 if (attr)
1935 targetm.target_option.valid_attribute_p
1936 (t, NULL_TREE, TREE_VALUE (attr), 0);
1937 }
1938 /* Register TYPE_DECLs with the debuginfo machinery. */
1939 if (!flag_wpa
1940 && TREE_CODE (t) == TYPE_DECL)
1941 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1942 if (!flag_ltrans)
1943 {
1944 /* Register variables and functions with the
1945 symbol table. */
1946 if (TREE_CODE (t) == VAR_DECL)
1947 lto_register_var_decl_in_symtab (data_in, t, from + i);
1948 else if (TREE_CODE (t) == FUNCTION_DECL
1949 && !DECL_BUILT_IN (t))
1950 lto_register_function_decl_in_symtab (data_in, t, from + i);
1951 /* Scan the tree for references to global functions or
1952 variables and record those for later fixup. */
1953 if (mentions_vars_p (t))
1954 vec_safe_push (tree_with_vars, t);
1955 }
1956 }
1957 if (seen_type)
1958 num_type_scc_trees += len;
1959 }
1960 else
1961 {
1962 /* Pickle stray references. */
1963 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1964 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1965 }
1966 }
1967
1968 /* Read in lto_in_decl_state objects. */
1969 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1970 data_end =
1971 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1972 num_decl_states = *data_ptr++;
1973
1974 gcc_assert (num_decl_states > 0);
1975 decl_data->global_decl_state = lto_new_in_decl_state ();
1976 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1977 decl_data->global_decl_state);
1978
1979 /* Read in per-function decl states and enter them in hash table. */
1980 decl_data->function_decl_states =
1981 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1982
1983 for (i = 1; i < num_decl_states; i++)
1984 {
1985 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1986 void **slot;
1987
1988 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1989 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1990 gcc_assert (*slot == NULL);
1991 *slot = state;
1992 }
1993
1994 if (data_ptr != data_end)
1995 internal_error ("bytecode stream: garbage at the end of symbols section");
1996
1997 /* Set the current decl state to be the global state. */
1998 decl_data->current_decl_state = decl_data->global_decl_state;
1999
2000 lto_data_in_delete (data_in);
2001 }
2002
2003 /* Custom version of strtoll, which is not portable. */
2004
2005 static HOST_WIDEST_INT
2006 lto_parse_hex (const char *p)
2007 {
2008 HOST_WIDEST_INT ret = 0;
2009
2010 for (; *p != '\0'; ++p)
2011 {
2012 char c = *p;
2013 unsigned char part;
2014 ret <<= 4;
2015 if (c >= '0' && c <= '9')
2016 part = c - '0';
2017 else if (c >= 'a' && c <= 'f')
2018 part = c - 'a' + 10;
2019 else if (c >= 'A' && c <= 'F')
2020 part = c - 'A' + 10;
2021 else
2022 internal_error ("could not parse hex number");
2023 ret |= part;
2024 }
2025
2026 return ret;
2027 }
2028
2029 /* Read resolution for file named FILE_NAME. The resolution is read from
2030 RESOLUTION. */
2031
2032 static void
2033 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2034 {
2035 /* We require that objects in the resolution file are in the same
2036 order as the lto1 command line. */
2037 unsigned int name_len;
2038 char *obj_name;
2039 unsigned int num_symbols;
2040 unsigned int i;
2041 struct lto_file_decl_data *file_data;
2042 splay_tree_node nd = NULL;
2043
2044 if (!resolution)
2045 return;
2046
2047 name_len = strlen (file->filename);
2048 obj_name = XNEWVEC (char, name_len + 1);
2049 fscanf (resolution, " "); /* Read white space. */
2050
2051 fread (obj_name, sizeof (char), name_len, resolution);
2052 obj_name[name_len] = '\0';
2053 if (filename_cmp (obj_name, file->filename) != 0)
2054 internal_error ("unexpected file name %s in linker resolution file. "
2055 "Expected %s", obj_name, file->filename);
2056 if (file->offset != 0)
2057 {
2058 int t;
2059 char offset_p[17];
2060 HOST_WIDEST_INT offset;
2061 t = fscanf (resolution, "@0x%16s", offset_p);
2062 if (t != 1)
2063 internal_error ("could not parse file offset");
2064 offset = lto_parse_hex (offset_p);
2065 if (offset != file->offset)
2066 internal_error ("unexpected offset");
2067 }
2068
2069 free (obj_name);
2070
2071 fscanf (resolution, "%u", &num_symbols);
2072
2073 for (i = 0; i < num_symbols; i++)
2074 {
2075 int t;
2076 unsigned index;
2077 unsigned HOST_WIDE_INT id;
2078 char r_str[27];
2079 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2080 unsigned int j;
2081 unsigned int lto_resolution_str_len =
2082 sizeof (lto_resolution_str) / sizeof (char *);
2083 res_pair rp;
2084
2085 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2086 &index, &id, r_str);
2087 if (t != 3)
2088 internal_error ("invalid line in the resolution file");
2089
2090 for (j = 0; j < lto_resolution_str_len; j++)
2091 {
2092 if (strcmp (lto_resolution_str[j], r_str) == 0)
2093 {
2094 r = (enum ld_plugin_symbol_resolution) j;
2095 break;
2096 }
2097 }
2098 if (j == lto_resolution_str_len)
2099 internal_error ("invalid resolution in the resolution file");
2100
2101 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2102 {
2103 nd = lto_splay_tree_lookup (file_ids, id);
2104 if (nd == NULL)
2105 internal_error ("resolution sub id %wx not in object file", id);
2106 }
2107
2108 file_data = (struct lto_file_decl_data *)nd->value;
2109 /* The indexes are very sparse. To save memory save them in a compact
2110 format that is only unpacked later when the subfile is processed. */
2111 rp.res = r;
2112 rp.index = index;
2113 file_data->respairs.safe_push (rp);
2114 if (file_data->max_index < index)
2115 file_data->max_index = index;
2116 }
2117 }
2118
2119 /* List of file_decl_datas */
2120 struct file_data_list
2121 {
2122 struct lto_file_decl_data *first, *last;
2123 };
2124
2125 /* Is the name for a id'ed LTO section? */
2126
2127 static int
2128 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2129 {
2130 const char *s;
2131
2132 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2133 return 0;
2134 s = strrchr (name, '.');
2135 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2136 }
2137
2138 /* Create file_data of each sub file id */
2139
2140 static int
2141 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2142 struct file_data_list *list)
2143 {
2144 struct lto_section_slot s_slot, *new_slot;
2145 unsigned HOST_WIDE_INT id;
2146 splay_tree_node nd;
2147 void **hash_slot;
2148 char *new_name;
2149 struct lto_file_decl_data *file_data;
2150
2151 if (!lto_section_with_id (ls->name, &id))
2152 return 1;
2153
2154 /* Find hash table of sub module id */
2155 nd = lto_splay_tree_lookup (file_ids, id);
2156 if (nd != NULL)
2157 {
2158 file_data = (struct lto_file_decl_data *)nd->value;
2159 }
2160 else
2161 {
2162 file_data = ggc_alloc_lto_file_decl_data ();
2163 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2164 file_data->id = id;
2165 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2166 lto_splay_tree_insert (file_ids, id, file_data);
2167
2168 /* Maintain list in linker order */
2169 if (!list->first)
2170 list->first = file_data;
2171 if (list->last)
2172 list->last->next = file_data;
2173 list->last = file_data;
2174 }
2175
2176 /* Copy section into sub module hash table */
2177 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2178 s_slot.name = new_name;
2179 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2180 gcc_assert (*hash_slot == NULL);
2181
2182 new_slot = XDUP (struct lto_section_slot, ls);
2183 new_slot->name = new_name;
2184 *hash_slot = new_slot;
2185 return 1;
2186 }
2187
2188 /* Read declarations and other initializations for a FILE_DATA. */
2189
2190 static void
2191 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2192 {
2193 const char *data;
2194 size_t len;
2195 vec<ld_plugin_symbol_resolution_t>
2196 resolutions = vNULL;
2197 int i;
2198 res_pair *rp;
2199
2200 /* Create vector for fast access of resolution. We do this lazily
2201 to save memory. */
2202 resolutions.safe_grow_cleared (file_data->max_index + 1);
2203 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2204 resolutions[rp->index] = rp->res;
2205 file_data->respairs.release ();
2206
2207 file_data->renaming_hash_table = lto_create_renaming_table ();
2208 file_data->file_name = file->filename;
2209 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2210 if (data == NULL)
2211 {
2212 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2213 return;
2214 }
2215 /* Frees resolutions */
2216 lto_read_decls (file_data, data, resolutions);
2217 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2218 }
2219
2220 /* Finalize FILE_DATA in FILE and increase COUNT. */
2221
2222 static int
2223 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2224 int *count)
2225 {
2226 lto_file_finalize (file_data, file);
2227 if (cgraph_dump_file)
2228 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2229 file_data->file_name, file_data->id);
2230 (*count)++;
2231 return 0;
2232 }
2233
2234 /* Generate a TREE representation for all types and external decls
2235 entities in FILE.
2236
2237 Read all of the globals out of the file. Then read the cgraph
2238 and process the .o index into the cgraph nodes so that it can open
2239 the .o file to load the functions and ipa information. */
2240
2241 static struct lto_file_decl_data *
2242 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2243 {
2244 struct lto_file_decl_data *file_data = NULL;
2245 splay_tree file_ids;
2246 htab_t section_hash_table;
2247 struct lto_section_slot *section;
2248 struct file_data_list file_list;
2249 struct lto_section_list section_list;
2250
2251 memset (&section_list, 0, sizeof (struct lto_section_list));
2252 section_hash_table = lto_obj_build_section_table (file, &section_list);
2253
2254 /* Find all sub modules in the object and put their sections into new hash
2255 tables in a splay tree. */
2256 file_ids = lto_splay_tree_new ();
2257 memset (&file_list, 0, sizeof (struct file_data_list));
2258 for (section = section_list.first; section != NULL; section = section->next)
2259 create_subid_section_table (section, file_ids, &file_list);
2260
2261 /* Add resolutions to file ids */
2262 lto_resolution_read (file_ids, resolution_file, file);
2263
2264 /* Finalize each lto file for each submodule in the merged object */
2265 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2266 lto_create_files_from_ids (file, file_data, count);
2267
2268 splay_tree_delete (file_ids);
2269 htab_delete (section_hash_table);
2270
2271 return file_list.first;
2272 }
2273
2274 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2275 #define LTO_MMAP_IO 1
2276 #endif
2277
2278 #if LTO_MMAP_IO
2279 /* Page size of machine is used for mmap and munmap calls. */
2280 static size_t page_mask;
2281 #endif
2282
2283 /* Get the section data of length LEN from FILENAME starting at
2284 OFFSET. The data segment must be freed by the caller when the
2285 caller is finished. Returns NULL if all was not well. */
2286
2287 static char *
2288 lto_read_section_data (struct lto_file_decl_data *file_data,
2289 intptr_t offset, size_t len)
2290 {
2291 char *result;
2292 static int fd = -1;
2293 static char *fd_name;
2294 #if LTO_MMAP_IO
2295 intptr_t computed_len;
2296 intptr_t computed_offset;
2297 intptr_t diff;
2298 #endif
2299
2300 /* Keep a single-entry file-descriptor cache. The last file we
2301 touched will get closed at exit.
2302 ??? Eventually we want to add a more sophisticated larger cache
2303 or rather fix function body streaming to not stream them in
2304 practically random order. */
2305 if (fd != -1
2306 && filename_cmp (fd_name, file_data->file_name) != 0)
2307 {
2308 free (fd_name);
2309 close (fd);
2310 fd = -1;
2311 }
2312 if (fd == -1)
2313 {
2314 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2315 if (fd == -1)
2316 {
2317 fatal_error ("Cannot open %s", file_data->file_name);
2318 return NULL;
2319 }
2320 fd_name = xstrdup (file_data->file_name);
2321 }
2322
2323 #if LTO_MMAP_IO
2324 if (!page_mask)
2325 {
2326 size_t page_size = sysconf (_SC_PAGE_SIZE);
2327 page_mask = ~(page_size - 1);
2328 }
2329
2330 computed_offset = offset & page_mask;
2331 diff = offset - computed_offset;
2332 computed_len = len + diff;
2333
2334 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2335 fd, computed_offset);
2336 if (result == MAP_FAILED)
2337 {
2338 fatal_error ("Cannot map %s", file_data->file_name);
2339 return NULL;
2340 }
2341
2342 return result + diff;
2343 #else
2344 result = (char *) xmalloc (len);
2345 if (lseek (fd, offset, SEEK_SET) != offset
2346 || read (fd, result, len) != (ssize_t) len)
2347 {
2348 free (result);
2349 fatal_error ("Cannot read %s", file_data->file_name);
2350 result = NULL;
2351 }
2352 #ifdef __MINGW32__
2353 /* Native windows doesn't supports delayed unlink on opened file. So
2354 we close file here again. This produces higher I/O load, but at least
2355 it prevents to have dangling file handles preventing unlink. */
2356 free (fd_name);
2357 fd_name = NULL;
2358 close (fd);
2359 fd = -1;
2360 #endif
2361 return result;
2362 #endif
2363 }
2364
2365
2366 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2367 NAME will be NULL unless the section type is for a function
2368 body. */
2369
2370 static const char *
2371 get_section_data (struct lto_file_decl_data *file_data,
2372 enum lto_section_type section_type,
2373 const char *name,
2374 size_t *len)
2375 {
2376 htab_t section_hash_table = file_data->section_hash_table;
2377 struct lto_section_slot *f_slot;
2378 struct lto_section_slot s_slot;
2379 const char *section_name = lto_get_section_name (section_type, name, file_data);
2380 char *data = NULL;
2381
2382 *len = 0;
2383 s_slot.name = section_name;
2384 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2385 if (f_slot)
2386 {
2387 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2388 *len = f_slot->len;
2389 }
2390
2391 free (CONST_CAST (char *, section_name));
2392 return data;
2393 }
2394
2395
2396 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2397 starts at OFFSET and has LEN bytes. */
2398
2399 static void
2400 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2401 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2402 const char *name ATTRIBUTE_UNUSED,
2403 const char *offset, size_t len ATTRIBUTE_UNUSED)
2404 {
2405 #if LTO_MMAP_IO
2406 intptr_t computed_len;
2407 intptr_t computed_offset;
2408 intptr_t diff;
2409 #endif
2410
2411 #if LTO_MMAP_IO
2412 computed_offset = ((intptr_t) offset) & page_mask;
2413 diff = (intptr_t) offset - computed_offset;
2414 computed_len = len + diff;
2415
2416 munmap ((caddr_t) computed_offset, computed_len);
2417 #else
2418 free (CONST_CAST(char *, offset));
2419 #endif
2420 }
2421
2422 static lto_file *current_lto_file;
2423
2424 /* Helper for qsort; compare partitions and return one with smaller size.
2425 We sort from greatest to smallest so parallel build doesn't stale on the
2426 longest compilation being executed too late. */
2427
2428 static int
2429 cmp_partitions_size (const void *a, const void *b)
2430 {
2431 const struct ltrans_partition_def *pa
2432 = *(struct ltrans_partition_def *const *)a;
2433 const struct ltrans_partition_def *pb
2434 = *(struct ltrans_partition_def *const *)b;
2435 return pb->insns - pa->insns;
2436 }
2437
2438 /* Helper for qsort; compare partitions and return one with smaller order. */
2439
2440 static int
2441 cmp_partitions_order (const void *a, const void *b)
2442 {
2443 const struct ltrans_partition_def *pa
2444 = *(struct ltrans_partition_def *const *)a;
2445 const struct ltrans_partition_def *pb
2446 = *(struct ltrans_partition_def *const *)b;
2447 int ordera = -1, orderb = -1;
2448
2449 if (lto_symtab_encoder_size (pa->encoder))
2450 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2451 if (lto_symtab_encoder_size (pb->encoder))
2452 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2453 return orderb - ordera;
2454 }
2455
2456 /* Actually stream out ENCODER into TEMP_FILENAME. */
2457
2458 static void
2459 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2460 {
2461 lto_file *file = lto_obj_file_open (temp_filename, true);
2462 if (!file)
2463 fatal_error ("lto_obj_file_open() failed");
2464 lto_set_current_out_file (file);
2465
2466 ipa_write_optimization_summaries (encoder);
2467
2468 lto_set_current_out_file (NULL);
2469 lto_obj_file_close (file);
2470 free (file);
2471 }
2472
2473 /* Wait for forked process and signal errors. */
2474 #ifdef HAVE_WORKING_FORK
2475 static void
2476 wait_for_child ()
2477 {
2478 int status;
2479 do
2480 {
2481 #ifndef WCONTINUED
2482 #define WCONTINUED 0
2483 #endif
2484 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2485 if (w == -1)
2486 fatal_error ("waitpid failed");
2487
2488 if (WIFEXITED (status) && WEXITSTATUS (status))
2489 fatal_error ("streaming subprocess failed");
2490 else if (WIFSIGNALED (status))
2491 fatal_error ("streaming subprocess was killed by signal");
2492 }
2493 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2494 }
2495 #endif
2496
2497 /* Stream out ENCODER into TEMP_FILENAME
2498 Fork if that seems to help. */
2499
2500 static void
2501 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2502 {
2503 #ifdef HAVE_WORKING_FORK
2504 static int nruns;
2505
2506 if (lto_parallelism <= 1)
2507 {
2508 do_stream_out (temp_filename, encoder);
2509 return;
2510 }
2511
2512 /* Do not run more than LTO_PARALLELISM streamings
2513 FIXME: we ignore limits on jobserver. */
2514 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2515 {
2516 wait_for_child ();
2517 nruns --;
2518 }
2519 /* If this is not the last parallel partition, execute new
2520 streaming process. */
2521 if (!last)
2522 {
2523 pid_t cpid = fork ();
2524
2525 if (!cpid)
2526 {
2527 setproctitle ("lto1-wpa-streaming");
2528 do_stream_out (temp_filename, encoder);
2529 exit (0);
2530 }
2531 /* Fork failed; lets do the job ourseleves. */
2532 else if (cpid == -1)
2533 do_stream_out (temp_filename, encoder);
2534 else
2535 nruns++;
2536 }
2537 /* Last partition; stream it and wait for all children to die. */
2538 else
2539 {
2540 int i;
2541 do_stream_out (temp_filename, encoder);
2542 for (i = 0; i < nruns; i++)
2543 wait_for_child ();
2544 }
2545 asm_nodes_output = true;
2546 #else
2547 do_stream_out (temp_filename, encoder);
2548 #endif
2549 }
2550
2551 /* Write all output files in WPA mode and the file with the list of
2552 LTRANS units. */
2553
2554 static void
2555 lto_wpa_write_files (void)
2556 {
2557 unsigned i, n_sets;
2558 ltrans_partition part;
2559 FILE *ltrans_output_list_stream;
2560 char *temp_filename;
2561 vec <char *>temp_filenames = vNULL;
2562 size_t blen;
2563
2564 /* Open the LTRANS output list. */
2565 if (!ltrans_output_list)
2566 fatal_error ("no LTRANS output list filename provided");
2567
2568 timevar_push (TV_WHOPR_WPA);
2569
2570 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2571 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2572
2573 timevar_pop (TV_WHOPR_WPA);
2574
2575 timevar_push (TV_WHOPR_WPA_IO);
2576
2577 /* Generate a prefix for the LTRANS unit files. */
2578 blen = strlen (ltrans_output_list);
2579 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2580 strcpy (temp_filename, ltrans_output_list);
2581 if (blen > sizeof (".out")
2582 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2583 ".out") == 0)
2584 temp_filename[blen - sizeof (".out") + 1] = '\0';
2585 blen = strlen (temp_filename);
2586
2587 n_sets = ltrans_partitions.length ();
2588
2589 /* Sort partitions by size so small ones are compiled last.
2590 FIXME: Even when not reordering we may want to output one list for parallel make
2591 and other for final link command. */
2592
2593 if (!flag_profile_reorder_functions || !flag_profile_use)
2594 ltrans_partitions.qsort (flag_toplevel_reorder
2595 ? cmp_partitions_size
2596 : cmp_partitions_order);
2597
2598 for (i = 0; i < n_sets; i++)
2599 {
2600 ltrans_partition part = ltrans_partitions[i];
2601
2602 /* Write all the nodes in SET. */
2603 sprintf (temp_filename + blen, "%u.o", i);
2604
2605 if (!quiet_flag)
2606 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2607 if (cgraph_dump_file)
2608 {
2609 lto_symtab_encoder_iterator lsei;
2610
2611 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2612 part->name, temp_filename, part->insns);
2613 fprintf (cgraph_dump_file, " Symbols in partition: ");
2614 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2615 lsei_next_in_partition (&lsei))
2616 {
2617 symtab_node *node = lsei_node (lsei);
2618 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2619 }
2620 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2621 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2622 lsei_next (&lsei))
2623 {
2624 symtab_node *node = lsei_node (lsei);
2625 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2626 {
2627 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2628 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2629 if (cnode
2630 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2631 fprintf (cgraph_dump_file, "(body included)");
2632 else
2633 {
2634 varpool_node *vnode = dyn_cast <varpool_node> (node);
2635 if (vnode
2636 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2637 fprintf (cgraph_dump_file, "(initializer included)");
2638 }
2639 }
2640 }
2641 fprintf (cgraph_dump_file, "\n");
2642 }
2643 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2644
2645 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2646
2647 part->encoder = NULL;
2648
2649 temp_filenames.safe_push (xstrdup (temp_filename));
2650 }
2651 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2652 if (ltrans_output_list_stream == NULL)
2653 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2654 for (i = 0; i < n_sets; i++)
2655 {
2656 unsigned int len = strlen (temp_filenames[i]);
2657 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2658 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2659 fatal_error ("writing to LTRANS output list %s: %m",
2660 ltrans_output_list);
2661 free (temp_filenames[i]);
2662 }
2663 temp_filenames.release();
2664
2665 lto_stats.num_output_files += n_sets;
2666
2667 /* Close the LTRANS output list. */
2668 if (fclose (ltrans_output_list_stream))
2669 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2670
2671 free_ltrans_partitions();
2672 free (temp_filename);
2673
2674 timevar_pop (TV_WHOPR_WPA_IO);
2675 }
2676
2677
2678 /* If TT is a variable or function decl replace it with its
2679 prevailing variant. */
2680 #define LTO_SET_PREVAIL(tt) \
2681 do {\
2682 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2683 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2684 { \
2685 tt = lto_symtab_prevailing_decl (tt); \
2686 fixed = true; \
2687 } \
2688 } while (0)
2689
2690 /* Ensure that TT isn't a replacable var of function decl. */
2691 #define LTO_NO_PREVAIL(tt) \
2692 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2693
2694 /* Given a tree T replace all fields referring to variables or functions
2695 with their prevailing variant. */
2696 static void
2697 lto_fixup_prevailing_decls (tree t)
2698 {
2699 enum tree_code code = TREE_CODE (t);
2700 bool fixed = false;
2701
2702 gcc_checking_assert (code != TREE_BINFO);
2703 LTO_NO_PREVAIL (TREE_TYPE (t));
2704 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2705 LTO_NO_PREVAIL (TREE_CHAIN (t));
2706 if (DECL_P (t))
2707 {
2708 LTO_NO_PREVAIL (DECL_NAME (t));
2709 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2710 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2711 {
2712 LTO_SET_PREVAIL (DECL_SIZE (t));
2713 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2714 LTO_SET_PREVAIL (DECL_INITIAL (t));
2715 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2716 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2717 }
2718 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2719 {
2720 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2721 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2722 }
2723 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2724 {
2725 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2726 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2727 LTO_NO_PREVAIL (DECL_VINDEX (t));
2728 }
2729 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2730 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2731 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2732 {
2733 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2734 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2735 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2736 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2737 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2738 }
2739 }
2740 else if (TYPE_P (t))
2741 {
2742 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2743 LTO_SET_PREVAIL (TYPE_SIZE (t));
2744 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2745 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2746 LTO_NO_PREVAIL (TYPE_NAME (t));
2747
2748 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2749 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2750 LTO_NO_PREVAIL (t->type_non_common.binfo);
2751
2752 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2753
2754 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2755 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2756 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2757 }
2758 else if (EXPR_P (t))
2759 {
2760 int i;
2761 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2762 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2763 }
2764 else if (TREE_CODE (t) == CONSTRUCTOR)
2765 {
2766 unsigned i;
2767 tree val;
2768 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2769 LTO_SET_PREVAIL (val);
2770 }
2771 else
2772 {
2773 switch (code)
2774 {
2775 case TREE_LIST:
2776 LTO_SET_PREVAIL (TREE_VALUE (t));
2777 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2778 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2779 break;
2780 default:
2781 gcc_unreachable ();
2782 }
2783 }
2784 /* If we fixed nothing, then we missed something seen by
2785 mentions_vars_p. */
2786 gcc_checking_assert (fixed);
2787 }
2788 #undef LTO_SET_PREVAIL
2789 #undef LTO_NO_PREVAIL
2790
2791 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2792 replaces var and function decls with the corresponding prevailing def. */
2793
2794 static void
2795 lto_fixup_state (struct lto_in_decl_state *state)
2796 {
2797 unsigned i, si;
2798 struct lto_tree_ref_table *table;
2799
2800 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2801 we still need to walk from all DECLs to find the reachable
2802 FUNCTION_DECLs and VAR_DECLs. */
2803 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2804 {
2805 table = &state->streams[si];
2806 for (i = 0; i < table->size; i++)
2807 {
2808 tree *tp = table->trees + i;
2809 if (VAR_OR_FUNCTION_DECL_P (*tp)
2810 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2811 *tp = lto_symtab_prevailing_decl (*tp);
2812 }
2813 }
2814 }
2815
2816 /* A callback of htab_traverse. Just extracts a state from SLOT
2817 and calls lto_fixup_state. */
2818
2819 static int
2820 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2821 {
2822 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2823 lto_fixup_state (state);
2824 return 1;
2825 }
2826
2827 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2828 prevailing one. */
2829
2830 static void
2831 lto_fixup_decls (struct lto_file_decl_data **files)
2832 {
2833 unsigned int i;
2834 tree t;
2835
2836 if (tree_with_vars)
2837 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2838 lto_fixup_prevailing_decls (t);
2839
2840 for (i = 0; files[i]; i++)
2841 {
2842 struct lto_file_decl_data *file = files[i];
2843 struct lto_in_decl_state *state = file->global_decl_state;
2844 lto_fixup_state (state);
2845
2846 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2847 }
2848 }
2849
2850 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2851
2852 /* Turn file datas for sub files into a single array, so that they look
2853 like separate files for further passes. */
2854
2855 static void
2856 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2857 {
2858 struct lto_file_decl_data *n, *next;
2859 int i, k;
2860
2861 lto_stats.num_input_files = count;
2862 all_file_decl_data
2863 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2864 /* Set the hooks so that all of the ipa passes can read in their data. */
2865 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2866 for (i = 0, k = 0; i < last_file_ix; i++)
2867 {
2868 for (n = orig[i]; n != NULL; n = next)
2869 {
2870 all_file_decl_data[k++] = n;
2871 next = n->next;
2872 n->next = NULL;
2873 }
2874 }
2875 all_file_decl_data[k] = NULL;
2876 gcc_assert (k == count);
2877 }
2878
2879 /* Input file data before flattening (i.e. splitting them to subfiles to support
2880 incremental linking. */
2881 static int real_file_count;
2882 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2883
2884 static void print_lto_report_1 (void);
2885
2886 /* Read all the symbols from the input files FNAMES. NFILES is the
2887 number of files requested in the command line. Instantiate a
2888 global call graph by aggregating all the sub-graphs found in each
2889 file. */
2890
2891 static void
2892 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2893 {
2894 unsigned int i, last_file_ix;
2895 FILE *resolution;
2896 int count = 0;
2897 struct lto_file_decl_data **decl_data;
2898 void **res;
2899 symtab_node *snode;
2900
2901 init_cgraph ();
2902
2903 timevar_push (TV_IPA_LTO_DECL_IN);
2904
2905 real_file_decl_data
2906 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2907 real_file_count = nfiles;
2908
2909 /* Read the resolution file. */
2910 resolution = NULL;
2911 if (resolution_file_name)
2912 {
2913 int t;
2914 unsigned num_objects;
2915
2916 resolution = fopen (resolution_file_name, "r");
2917 if (resolution == NULL)
2918 fatal_error ("could not open symbol resolution file: %m");
2919
2920 t = fscanf (resolution, "%u", &num_objects);
2921 gcc_assert (t == 1);
2922
2923 /* True, since the plugin splits the archives. */
2924 gcc_assert (num_objects == nfiles);
2925 }
2926 cgraph_state = CGRAPH_LTO_STREAMING;
2927
2928 canonical_type_hash_cache = new pointer_map <hashval_t>;
2929 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2930 gimple_canonical_type_eq, 0);
2931 gcc_obstack_init (&tree_scc_hash_obstack);
2932 tree_scc_hash.create (4096);
2933
2934 /* Register the common node types with the canonical type machinery so
2935 we properly share alias-sets across languages and TUs. Do not
2936 expose the common nodes as type merge target - those that should be
2937 are already exposed so by pre-loading the LTO streamer caches.
2938 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2939 for (i = 0; i < itk_none; ++i)
2940 lto_register_canonical_types (integer_types[i], true);
2941 for (i = 0; i < stk_type_kind_last; ++i)
2942 lto_register_canonical_types (sizetype_tab[i], true);
2943 for (i = 0; i < TI_MAX; ++i)
2944 lto_register_canonical_types (global_trees[i], true);
2945 for (i = 0; i < itk_none; ++i)
2946 lto_register_canonical_types (integer_types[i], false);
2947 for (i = 0; i < stk_type_kind_last; ++i)
2948 lto_register_canonical_types (sizetype_tab[i], false);
2949 for (i = 0; i < TI_MAX; ++i)
2950 lto_register_canonical_types (global_trees[i], false);
2951
2952 if (!quiet_flag)
2953 fprintf (stderr, "Reading object files:");
2954
2955 /* Read all of the object files specified on the command line. */
2956 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2957 {
2958 struct lto_file_decl_data *file_data = NULL;
2959 if (!quiet_flag)
2960 {
2961 fprintf (stderr, " %s", fnames[i]);
2962 fflush (stderr);
2963 }
2964
2965 current_lto_file = lto_obj_file_open (fnames[i], false);
2966 if (!current_lto_file)
2967 break;
2968
2969 file_data = lto_file_read (current_lto_file, resolution, &count);
2970 if (!file_data)
2971 {
2972 lto_obj_file_close (current_lto_file);
2973 free (current_lto_file);
2974 current_lto_file = NULL;
2975 break;
2976 }
2977
2978 decl_data[last_file_ix++] = file_data;
2979
2980 lto_obj_file_close (current_lto_file);
2981 free (current_lto_file);
2982 current_lto_file = NULL;
2983 }
2984
2985 lto_flatten_files (decl_data, count, last_file_ix);
2986 lto_stats.num_input_files = count;
2987 ggc_free(decl_data);
2988 real_file_decl_data = NULL;
2989
2990 if (resolution_file_name)
2991 fclose (resolution);
2992
2993 /* Show the LTO report before launching LTRANS. */
2994 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2995 print_lto_report_1 ();
2996
2997 /* Free gimple type merging datastructures. */
2998 tree_scc_hash.dispose ();
2999 obstack_free (&tree_scc_hash_obstack, NULL);
3000 htab_delete (gimple_canonical_types);
3001 gimple_canonical_types = NULL;
3002 delete canonical_type_hash_cache;
3003 canonical_type_hash_cache = NULL;
3004 ggc_collect ();
3005
3006 /* Set the hooks so that all of the ipa passes can read in their data. */
3007 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3008
3009 timevar_pop (TV_IPA_LTO_DECL_IN);
3010
3011 if (!quiet_flag)
3012 fprintf (stderr, "\nReading the callgraph\n");
3013
3014 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3015 /* Read the symtab. */
3016 input_symtab ();
3017
3018 /* Store resolutions into the symbol table. */
3019
3020 FOR_EACH_SYMBOL (snode)
3021 if (symtab_real_symbol_p (snode)
3022 && snode->lto_file_data
3023 && snode->lto_file_data->resolution_map
3024 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3025 snode->decl)))
3026 snode->resolution
3027 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3028 for (i = 0; all_file_decl_data[i]; i++)
3029 if (all_file_decl_data[i]->resolution_map)
3030 {
3031 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3032 all_file_decl_data[i]->resolution_map = NULL;
3033 }
3034
3035 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3036
3037 if (!quiet_flag)
3038 fprintf (stderr, "Merging declarations\n");
3039
3040 timevar_push (TV_IPA_LTO_DECL_MERGE);
3041 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3042 need to care about resolving symbols again, we only need to replace
3043 duplicated declarations read from the callgraph and from function
3044 sections. */
3045 if (!flag_ltrans)
3046 {
3047 lto_symtab_merge_decls ();
3048
3049 /* If there were errors during symbol merging bail out, we have no
3050 good way to recover here. */
3051 if (seen_error ())
3052 fatal_error ("errors during merging of translation units");
3053
3054 /* Fixup all decls. */
3055 lto_fixup_decls (all_file_decl_data);
3056 }
3057 if (tree_with_vars)
3058 ggc_free (tree_with_vars);
3059 tree_with_vars = NULL;
3060 ggc_collect ();
3061
3062 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3063 /* Each pass will set the appropriate timer. */
3064
3065 if (!quiet_flag)
3066 fprintf (stderr, "Reading summaries\n");
3067
3068 /* Read the IPA summary data. */
3069 if (flag_ltrans)
3070 ipa_read_optimization_summaries ();
3071 else
3072 ipa_read_summaries ();
3073
3074 for (i = 0; all_file_decl_data[i]; i++)
3075 {
3076 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3077 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3078 all_file_decl_data[i]->symtab_node_encoder = NULL;
3079 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3080 all_file_decl_data[i]->global_decl_state = NULL;
3081 all_file_decl_data[i]->current_decl_state = NULL;
3082 }
3083
3084 /* Finally merge the cgraph according to the decl merging decisions. */
3085 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3086 if (cgraph_dump_file)
3087 {
3088 fprintf (cgraph_dump_file, "Before merging:\n");
3089 dump_symtab (cgraph_dump_file);
3090 }
3091 lto_symtab_merge_symbols ();
3092 ggc_collect ();
3093 cgraph_state = CGRAPH_STATE_IPA_SSA;
3094
3095 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3096
3097 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3098
3099 /* Indicate that the cgraph is built and ready. */
3100 cgraph_function_flags_ready = true;
3101
3102 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3103 ggc_free (all_file_decl_data);
3104 all_file_decl_data = NULL;
3105 }
3106
3107
3108 /* Materialize all the bodies for all the nodes in the callgraph. */
3109
3110 static void
3111 materialize_cgraph (void)
3112 {
3113 struct cgraph_node *node;
3114 timevar_id_t lto_timer;
3115
3116 if (!quiet_flag)
3117 fprintf (stderr,
3118 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3119
3120 /* Now that we have input the cgraph, we need to clear all of the aux
3121 nodes and read the functions if we are not running in WPA mode. */
3122 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3123
3124 FOR_EACH_FUNCTION (node)
3125 {
3126 if (node->lto_file_data)
3127 {
3128 lto_materialize_function (node);
3129 lto_stats.num_input_cgraph_nodes++;
3130 }
3131 }
3132
3133 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3134
3135 /* Start the appropriate timer depending on the mode that we are
3136 operating in. */
3137 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3138 : (flag_ltrans) ? TV_WHOPR_LTRANS
3139 : TV_LTO;
3140 timevar_push (lto_timer);
3141
3142 current_function_decl = NULL;
3143 set_cfun (NULL);
3144
3145 if (!quiet_flag)
3146 fprintf (stderr, "\n");
3147
3148 timevar_pop (lto_timer);
3149 }
3150
3151
3152 /* Show various memory usage statistics related to LTO. */
3153 static void
3154 print_lto_report_1 (void)
3155 {
3156 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3157 fprintf (stderr, "%s statistics\n", pfx);
3158
3159 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3160 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3161 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3162 if (flag_wpa && tree_scc_hash.is_created ())
3163 {
3164 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3165 "collision ratio: %f\n", pfx,
3166 (long) tree_scc_hash.size (),
3167 (long) tree_scc_hash.elements (),
3168 tree_scc_hash.collisions ());
3169 hash_table<tree_scc_hasher>::iterator hiter;
3170 tree_scc *scc, *max_scc = NULL;
3171 unsigned max_length = 0;
3172 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3173 {
3174 unsigned length = 0;
3175 tree_scc *s = scc;
3176 for (; s; s = s->next)
3177 length++;
3178 if (length > max_length)
3179 {
3180 max_length = length;
3181 max_scc = scc;
3182 }
3183 }
3184 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3185 pfx, max_length, max_scc->len);
3186 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3187 num_scc_compares, num_scc_compare_collisions,
3188 num_scc_compare_collisions / (double) num_scc_compares);
3189 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3190 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3191 total_scc_size_merged);
3192 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3193 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3194 pfx, num_prevailing_types, num_type_scc_trees);
3195 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3196 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3197 (long) htab_size (gimple_canonical_types),
3198 (long) htab_elements (gimple_canonical_types),
3199 (long) gimple_canonical_types->searches,
3200 (long) gimple_canonical_types->collisions,
3201 htab_collisions (gimple_canonical_types));
3202 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3203 "%lu elements, %ld searches\n", pfx,
3204 num_canonical_type_hash_entries,
3205 num_canonical_type_hash_queries);
3206 }
3207
3208 print_lto_report (pfx);
3209 }
3210
3211 /* Perform whole program analysis (WPA) on the callgraph and write out the
3212 optimization plan. */
3213
3214 static void
3215 do_whole_program_analysis (void)
3216 {
3217 symtab_node *node;
3218
3219 lto_parallelism = 1;
3220
3221 /* TODO: jobserver communicatoin is not supported, yet. */
3222 if (!strcmp (flag_wpa, "jobserver"))
3223 lto_parallelism = -1;
3224 else
3225 {
3226 lto_parallelism = atoi (flag_wpa);
3227 if (lto_parallelism <= 0)
3228 lto_parallelism = 0;
3229 }
3230
3231 timevar_start (TV_PHASE_OPT_GEN);
3232
3233 /* Note that since we are in WPA mode, materialize_cgraph will not
3234 actually read in all the function bodies. It only materializes
3235 the decls and cgraph nodes so that analysis can be performed. */
3236 materialize_cgraph ();
3237
3238 /* Reading in the cgraph uses different timers, start timing WPA now. */
3239 timevar_push (TV_WHOPR_WPA);
3240
3241 if (pre_ipa_mem_report)
3242 {
3243 fprintf (stderr, "Memory consumption before IPA\n");
3244 dump_memory_report (false);
3245 }
3246
3247 cgraph_function_flags_ready = true;
3248
3249 if (cgraph_dump_file)
3250 dump_symtab (cgraph_dump_file);
3251 bitmap_obstack_initialize (NULL);
3252 cgraph_state = CGRAPH_STATE_IPA_SSA;
3253
3254 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3255 symtab_remove_unreachable_nodes (false, dump_file);
3256
3257 if (cgraph_dump_file)
3258 {
3259 fprintf (cgraph_dump_file, "Optimized ");
3260 dump_symtab (cgraph_dump_file);
3261 }
3262 #ifdef ENABLE_CHECKING
3263 verify_cgraph ();
3264 #endif
3265 bitmap_obstack_release (NULL);
3266
3267 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3268 timevar_pop (TV_WHOPR_WPA);
3269
3270 timevar_push (TV_WHOPR_PARTITIONING);
3271 if (flag_lto_partition == LTO_PARTITION_1TO1)
3272 lto_1_to_1_map ();
3273 else if (flag_lto_partition == LTO_PARTITION_MAX)
3274 lto_max_map ();
3275 else if (flag_lto_partition == LTO_PARTITION_ONE)
3276 lto_balanced_map (1);
3277 else if (flag_lto_partition == LTO_PARTITION_BALANCED)
3278 lto_balanced_map (PARAM_VALUE (PARAM_LTO_PARTITIONS));
3279 else
3280 gcc_unreachable ();
3281
3282 /* Inline summaries are needed for balanced partitioning. Free them now so
3283 the memory can be used for streamer caches. */
3284 inline_free_summary ();
3285
3286 /* AUX pointers are used by partitioning code to bookkeep number of
3287 partitions symbol is in. This is no longer needed. */
3288 FOR_EACH_SYMBOL (node)
3289 node->aux = NULL;
3290
3291 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3292
3293 /* Find out statics that need to be promoted
3294 to globals with hidden visibility because they are accessed from multiple
3295 partitions. */
3296 lto_promote_cross_file_statics ();
3297 timevar_pop (TV_WHOPR_PARTITIONING);
3298
3299 timevar_stop (TV_PHASE_OPT_GEN);
3300
3301 /* Collect a last time - in lto_wpa_write_files we may end up forking
3302 with the idea that this doesn't increase memory usage. So we
3303 absoultely do not want to collect after that. */
3304 ggc_collect ();
3305
3306 timevar_start (TV_PHASE_STREAM_OUT);
3307 if (!quiet_flag)
3308 {
3309 fprintf (stderr, "\nStreaming out");
3310 fflush (stderr);
3311 }
3312 lto_wpa_write_files ();
3313 if (!quiet_flag)
3314 fprintf (stderr, "\n");
3315 timevar_stop (TV_PHASE_STREAM_OUT);
3316
3317 if (post_ipa_mem_report)
3318 {
3319 fprintf (stderr, "Memory consumption after IPA\n");
3320 dump_memory_report (false);
3321 }
3322
3323 /* Show the LTO report before launching LTRANS. */
3324 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3325 print_lto_report_1 ();
3326 if (mem_report_wpa)
3327 dump_memory_report (true);
3328 }
3329
3330
3331 static GTY(()) tree lto_eh_personality_decl;
3332
3333 /* Return the LTO personality function decl. */
3334
3335 tree
3336 lto_eh_personality (void)
3337 {
3338 if (!lto_eh_personality_decl)
3339 {
3340 /* Use the first personality DECL for our personality if we don't
3341 support multiple ones. This ensures that we don't artificially
3342 create the need for them in a single-language program. */
3343 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3344 lto_eh_personality_decl = first_personality_decl;
3345 else
3346 lto_eh_personality_decl = lhd_gcc_personality ();
3347 }
3348
3349 return lto_eh_personality_decl;
3350 }
3351
3352 /* Set the process name based on the LTO mode. */
3353
3354 static void
3355 lto_process_name (void)
3356 {
3357 if (flag_lto)
3358 setproctitle ("lto1-lto");
3359 if (flag_wpa)
3360 setproctitle ("lto1-wpa");
3361 if (flag_ltrans)
3362 setproctitle ("lto1-ltrans");
3363 }
3364
3365
3366 /* Initialize the LTO front end. */
3367
3368 static void
3369 lto_init (void)
3370 {
3371 lto_process_name ();
3372 lto_streamer_hooks_init ();
3373 lto_reader_init ();
3374 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3375 memset (&lto_stats, 0, sizeof (lto_stats));
3376 bitmap_obstack_initialize (NULL);
3377 gimple_register_cfg_hooks ();
3378 }
3379
3380
3381 /* Main entry point for the GIMPLE front end. This front end has
3382 three main personalities:
3383
3384 - LTO (-flto). All the object files on the command line are
3385 loaded in memory and processed as a single translation unit.
3386 This is the traditional link-time optimization behavior.
3387
3388 - WPA (-fwpa). Only the callgraph and summary information for
3389 files in the command file are loaded. A single callgraph
3390 (without function bodies) is instantiated for the whole set of
3391 files. IPA passes are only allowed to analyze the call graph
3392 and make transformation decisions. The callgraph is
3393 partitioned, each partition is written to a new object file
3394 together with the transformation decisions.
3395
3396 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3397 summary files from running again. Since WPA computed summary
3398 information and decided what transformations to apply, LTRANS
3399 simply applies them. */
3400
3401 void
3402 lto_main (void)
3403 {
3404 /* LTO is called as a front end, even though it is not a front end.
3405 Because it is called as a front end, TV_PHASE_PARSING and
3406 TV_PARSE_GLOBAL are active, and we need to turn them off while
3407 doing LTO. Later we turn them back on so they are active up in
3408 toplev.c. */
3409 timevar_pop (TV_PARSE_GLOBAL);
3410 timevar_stop (TV_PHASE_PARSING);
3411
3412 timevar_start (TV_PHASE_SETUP);
3413
3414 /* Initialize the LTO front end. */
3415 lto_init ();
3416
3417 timevar_stop (TV_PHASE_SETUP);
3418 timevar_start (TV_PHASE_STREAM_IN);
3419
3420 /* Read all the symbols and call graph from all the files in the
3421 command line. */
3422 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3423
3424 timevar_stop (TV_PHASE_STREAM_IN);
3425
3426 if (!seen_error ())
3427 {
3428 /* If WPA is enabled analyze the whole call graph and create an
3429 optimization plan. Otherwise, read in all the function
3430 bodies and continue with optimization. */
3431 if (flag_wpa)
3432 do_whole_program_analysis ();
3433 else
3434 {
3435 timevar_start (TV_PHASE_OPT_GEN);
3436
3437 materialize_cgraph ();
3438 if (!flag_ltrans)
3439 lto_promote_statics_nonwpa ();
3440
3441 /* Let the middle end know that we have read and merged all of
3442 the input files. */
3443 compile ();
3444
3445 timevar_stop (TV_PHASE_OPT_GEN);
3446
3447 /* FIXME lto, if the processes spawned by WPA fail, we miss
3448 the chance to print WPA's report, so WPA will call
3449 print_lto_report before launching LTRANS. If LTRANS was
3450 launched directly by the driver we would not need to do
3451 this. */
3452 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3453 print_lto_report_1 ();
3454 }
3455 }
3456
3457 /* Here we make LTO pretend to be a parser. */
3458 timevar_start (TV_PHASE_PARSING);
3459 timevar_push (TV_PARSE_GLOBAL);
3460 }
3461
3462 #include "gt-lto-lto.h"