add hash_map class
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "hash-map.h"
36 #include "ipa-prop.h"
37 #include "common.h"
38 #include "debug.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "is-a.h"
43 #include "gimple.h"
44 #include "lto.h"
45 #include "lto-tree.h"
46 #include "lto-streamer.h"
47 #include "lto-section-names.h"
48 #include "tree-streamer.h"
49 #include "splay-tree.h"
50 #include "lto-partition.h"
51 #include "data-streamer.h"
52 #include "context.h"
53 #include "pass_manager.h"
54 #include "ipa-inline.h"
55 #include "params.h"
56
57
58 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
59 static int lto_parallelism;
60
61 static GTY(()) tree first_personality_decl;
62
63 /* Returns a hash code for P. */
64
65 static hashval_t
66 hash_name (const void *p)
67 {
68 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
69 return (hashval_t) htab_hash_string (ds->name);
70 }
71
72
73 /* Returns nonzero if P1 and P2 are equal. */
74
75 static int
76 eq_name (const void *p1, const void *p2)
77 {
78 const struct lto_section_slot *s1 =
79 (const struct lto_section_slot *) p1;
80 const struct lto_section_slot *s2 =
81 (const struct lto_section_slot *) p2;
82
83 return strcmp (s1->name, s2->name) == 0;
84 }
85
86 /* Free lto_section_slot */
87
88 static void
89 free_with_string (void *arg)
90 {
91 struct lto_section_slot *s = (struct lto_section_slot *)arg;
92
93 free (CONST_CAST (char *, s->name));
94 free (arg);
95 }
96
97 /* Create section hash table */
98
99 htab_t
100 lto_obj_create_section_hash_table (void)
101 {
102 return htab_create (37, hash_name, eq_name, free_with_string);
103 }
104
105 /* Delete an allocated integer KEY in the splay tree. */
106
107 static void
108 lto_splay_tree_delete_id (splay_tree_key key)
109 {
110 free ((void *) key);
111 }
112
113 /* Compare splay tree node ids A and B. */
114
115 static int
116 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
117 {
118 unsigned HOST_WIDE_INT ai;
119 unsigned HOST_WIDE_INT bi;
120
121 ai = *(unsigned HOST_WIDE_INT *) a;
122 bi = *(unsigned HOST_WIDE_INT *) b;
123
124 if (ai < bi)
125 return -1;
126 else if (ai > bi)
127 return 1;
128 return 0;
129 }
130
131 /* Look up splay tree node by ID in splay tree T. */
132
133 static splay_tree_node
134 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
135 {
136 return splay_tree_lookup (t, (splay_tree_key) &id);
137 }
138
139 /* Check if KEY has ID. */
140
141 static bool
142 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
143 {
144 return *(unsigned HOST_WIDE_INT *) key == id;
145 }
146
147 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
148 The ID is allocated separately because we need HOST_WIDE_INTs which may
149 be wider than a splay_tree_key. */
150
151 static void
152 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
153 struct lto_file_decl_data *file_data)
154 {
155 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
156 *idp = id;
157 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
158 }
159
160 /* Create a splay tree. */
161
162 static splay_tree
163 lto_splay_tree_new (void)
164 {
165 return splay_tree_new (lto_splay_tree_compare_ids,
166 lto_splay_tree_delete_id,
167 NULL);
168 }
169
170 /* Return true when NODE has a clone that is analyzed (i.e. we need
171 to load its body even if the node itself is not needed). */
172
173 static bool
174 has_analyzed_clone_p (struct cgraph_node *node)
175 {
176 struct cgraph_node *orig = node;
177 node = node->clones;
178 if (node)
179 while (node != orig)
180 {
181 if (node->analyzed)
182 return true;
183 if (node->clones)
184 node = node->clones;
185 else if (node->next_sibling_clone)
186 node = node->next_sibling_clone;
187 else
188 {
189 while (node != orig && !node->next_sibling_clone)
190 node = node->clone_of;
191 if (node != orig)
192 node = node->next_sibling_clone;
193 }
194 }
195 return false;
196 }
197
198 /* Read the function body for the function associated with NODE. */
199
200 static void
201 lto_materialize_function (struct cgraph_node *node)
202 {
203 tree decl;
204
205 decl = node->decl;
206 /* Read in functions with body (analyzed nodes)
207 and also functions that are needed to produce virtual clones. */
208 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
209 || node->used_as_abstract_origin
210 || has_analyzed_clone_p (node))
211 {
212 /* Clones don't need to be read. */
213 if (node->clone_of)
214 return;
215 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
216 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
217 }
218
219 /* Let the middle end know about the function. */
220 rest_of_decl_compilation (decl, 1, 0);
221 }
222
223
224 /* Decode the content of memory pointed to by DATA in the in decl
225 state object STATE. DATA_IN points to a data_in structure for
226 decoding. Return the address after the decoded object in the
227 input. */
228
229 static const uint32_t *
230 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
231 struct lto_in_decl_state *state)
232 {
233 uint32_t ix;
234 tree decl;
235 uint32_t i, j;
236
237 ix = *data++;
238 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
239 if (TREE_CODE (decl) != FUNCTION_DECL)
240 {
241 gcc_assert (decl == void_type_node);
242 decl = NULL_TREE;
243 }
244 state->fn_decl = decl;
245
246 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
247 {
248 uint32_t size = *data++;
249 tree *decls = ggc_vec_alloc<tree> (size);
250
251 for (j = 0; j < size; j++)
252 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
253
254 state->streams[i].size = size;
255 state->streams[i].trees = decls;
256 data += size;
257 }
258
259 return data;
260 }
261
262
263 /* Global canonical type table. */
264 static htab_t gimple_canonical_types;
265 static hash_map<const_tree, hashval_t> *canonical_type_hash_cache;
266 static unsigned long num_canonical_type_hash_entries;
267 static unsigned long num_canonical_type_hash_queries;
268
269 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
270 static hashval_t gimple_canonical_type_hash (const void *p);
271 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
272
273 /* Returning a hash value for gimple type TYPE.
274
275 The hash value returned is equal for types considered compatible
276 by gimple_canonical_types_compatible_p. */
277
278 static hashval_t
279 hash_canonical_type (tree type)
280 {
281 hashval_t v;
282
283 /* Combine a few common features of types so that types are grouped into
284 smaller sets; when searching for existing matching types to merge,
285 only existing types having the same features as the new type will be
286 checked. */
287 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
288 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
289
290 /* Incorporate common features of numerical types. */
291 if (INTEGRAL_TYPE_P (type)
292 || SCALAR_FLOAT_TYPE_P (type)
293 || FIXED_POINT_TYPE_P (type)
294 || TREE_CODE (type) == OFFSET_TYPE
295 || POINTER_TYPE_P (type))
296 {
297 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
298 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
299 }
300
301 if (VECTOR_TYPE_P (type))
302 {
303 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
304 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
305 }
306
307 if (TREE_CODE (type) == COMPLEX_TYPE)
308 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
309
310 /* For pointer and reference types, fold in information about the type
311 pointed to but do not recurse to the pointed-to type. */
312 if (POINTER_TYPE_P (type))
313 {
314 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
315 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
316 }
317
318 /* For integer types hash only the string flag. */
319 if (TREE_CODE (type) == INTEGER_TYPE)
320 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
321
322 /* For array types hash the domain bounds and the string flag. */
323 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
324 {
325 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
326 /* OMP lowering can introduce error_mark_node in place of
327 random local decls in types. */
328 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
329 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
330 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
331 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
332 }
333
334 /* Recurse for aggregates with a single element type. */
335 if (TREE_CODE (type) == ARRAY_TYPE
336 || TREE_CODE (type) == COMPLEX_TYPE
337 || TREE_CODE (type) == VECTOR_TYPE)
338 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
339
340 /* Incorporate function return and argument types. */
341 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
342 {
343 unsigned na;
344 tree p;
345
346 /* For method types also incorporate their parent class. */
347 if (TREE_CODE (type) == METHOD_TYPE)
348 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
349
350 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
351
352 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
353 {
354 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
355 na++;
356 }
357
358 v = iterative_hash_hashval_t (na, v);
359 }
360
361 if (RECORD_OR_UNION_TYPE_P (type))
362 {
363 unsigned nf;
364 tree f;
365
366 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
367 if (TREE_CODE (f) == FIELD_DECL)
368 {
369 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
370 nf++;
371 }
372
373 v = iterative_hash_hashval_t (nf, v);
374 }
375
376 return v;
377 }
378
379 /* Returning a hash value for gimple type TYPE combined with VAL. */
380
381 static hashval_t
382 iterative_hash_canonical_type (tree type, hashval_t val)
383 {
384 hashval_t v;
385 /* An already processed type. */
386 if (TYPE_CANONICAL (type))
387 {
388 type = TYPE_CANONICAL (type);
389 v = gimple_canonical_type_hash (type);
390 }
391 else
392 {
393 /* Canonical types should not be able to form SCCs by design, this
394 recursion is just because we do not register canonical types in
395 optimal order. To avoid quadratic behavior also register the
396 type here. */
397 v = hash_canonical_type (type);
398 gimple_register_canonical_type_1 (type, v);
399 }
400 return iterative_hash_hashval_t (v, val);
401 }
402
403 /* Returns the hash for a canonical type P. */
404
405 static hashval_t
406 gimple_canonical_type_hash (const void *p)
407 {
408 num_canonical_type_hash_queries++;
409 hashval_t *slot = canonical_type_hash_cache->get ((const_tree) p);
410 gcc_assert (slot != NULL);
411 return *slot;
412 }
413
414
415 /* The TYPE_CANONICAL merging machinery. It should closely resemble
416 the middle-end types_compatible_p function. It needs to avoid
417 claiming types are different for types that should be treated
418 the same with respect to TBAA. Canonical types are also used
419 for IL consistency checks via the useless_type_conversion_p
420 predicate which does not handle all type kinds itself but falls
421 back to pointer-comparison of TYPE_CANONICAL for aggregates
422 for example. */
423
424 /* Return true iff T1 and T2 are structurally identical for what
425 TBAA is concerned. */
426
427 static bool
428 gimple_canonical_types_compatible_p (tree t1, tree t2)
429 {
430 /* Before starting to set up the SCC machinery handle simple cases. */
431
432 /* Check first for the obvious case of pointer identity. */
433 if (t1 == t2)
434 return true;
435
436 /* Check that we have two types to compare. */
437 if (t1 == NULL_TREE || t2 == NULL_TREE)
438 return false;
439
440 /* If the types have been previously registered and found equal
441 they still are. */
442 if (TYPE_CANONICAL (t1)
443 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
444 return true;
445
446 /* Can't be the same type if the types don't have the same code. */
447 if (TREE_CODE (t1) != TREE_CODE (t2))
448 return false;
449
450 /* Qualifiers do not matter for canonical type comparison purposes. */
451
452 /* Void types and nullptr types are always the same. */
453 if (TREE_CODE (t1) == VOID_TYPE
454 || TREE_CODE (t1) == NULLPTR_TYPE)
455 return true;
456
457 /* Can't be the same type if they have different mode. */
458 if (TYPE_MODE (t1) != TYPE_MODE (t2))
459 return false;
460
461 /* Non-aggregate types can be handled cheaply. */
462 if (INTEGRAL_TYPE_P (t1)
463 || SCALAR_FLOAT_TYPE_P (t1)
464 || FIXED_POINT_TYPE_P (t1)
465 || TREE_CODE (t1) == VECTOR_TYPE
466 || TREE_CODE (t1) == COMPLEX_TYPE
467 || TREE_CODE (t1) == OFFSET_TYPE
468 || POINTER_TYPE_P (t1))
469 {
470 /* Can't be the same type if they have different sign or precision. */
471 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
472 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
473 return false;
474
475 if (TREE_CODE (t1) == INTEGER_TYPE
476 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
477 return false;
478
479 /* For canonical type comparisons we do not want to build SCCs
480 so we cannot compare pointed-to types. But we can, for now,
481 require the same pointed-to type kind and match what
482 useless_type_conversion_p would do. */
483 if (POINTER_TYPE_P (t1))
484 {
485 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
486 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
487 return false;
488
489 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
490 return false;
491 }
492
493 /* Tail-recurse to components. */
494 if (TREE_CODE (t1) == VECTOR_TYPE
495 || TREE_CODE (t1) == COMPLEX_TYPE)
496 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
497 TREE_TYPE (t2));
498
499 return true;
500 }
501
502 /* Do type-specific comparisons. */
503 switch (TREE_CODE (t1))
504 {
505 case ARRAY_TYPE:
506 /* Array types are the same if the element types are the same and
507 the number of elements are the same. */
508 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
509 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
510 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
511 return false;
512 else
513 {
514 tree i1 = TYPE_DOMAIN (t1);
515 tree i2 = TYPE_DOMAIN (t2);
516
517 /* For an incomplete external array, the type domain can be
518 NULL_TREE. Check this condition also. */
519 if (i1 == NULL_TREE && i2 == NULL_TREE)
520 return true;
521 else if (i1 == NULL_TREE || i2 == NULL_TREE)
522 return false;
523 else
524 {
525 tree min1 = TYPE_MIN_VALUE (i1);
526 tree min2 = TYPE_MIN_VALUE (i2);
527 tree max1 = TYPE_MAX_VALUE (i1);
528 tree max2 = TYPE_MAX_VALUE (i2);
529
530 /* The minimum/maximum values have to be the same. */
531 if ((min1 == min2
532 || (min1 && min2
533 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
534 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
535 || operand_equal_p (min1, min2, 0))))
536 && (max1 == max2
537 || (max1 && max2
538 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
539 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
540 || operand_equal_p (max1, max2, 0)))))
541 return true;
542 else
543 return false;
544 }
545 }
546
547 case METHOD_TYPE:
548 case FUNCTION_TYPE:
549 /* Function types are the same if the return type and arguments types
550 are the same. */
551 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
552 return false;
553
554 if (!comp_type_attributes (t1, t2))
555 return false;
556
557 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
558 return true;
559 else
560 {
561 tree parms1, parms2;
562
563 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
564 parms1 && parms2;
565 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
566 {
567 if (!gimple_canonical_types_compatible_p
568 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
569 return false;
570 }
571
572 if (parms1 || parms2)
573 return false;
574
575 return true;
576 }
577
578 case RECORD_TYPE:
579 case UNION_TYPE:
580 case QUAL_UNION_TYPE:
581 {
582 tree f1, f2;
583
584 /* For aggregate types, all the fields must be the same. */
585 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
586 f1 || f2;
587 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
588 {
589 /* Skip non-fields. */
590 while (f1 && TREE_CODE (f1) != FIELD_DECL)
591 f1 = TREE_CHAIN (f1);
592 while (f2 && TREE_CODE (f2) != FIELD_DECL)
593 f2 = TREE_CHAIN (f2);
594 if (!f1 || !f2)
595 break;
596 /* The fields must have the same name, offset and type. */
597 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
598 || !gimple_compare_field_offset (f1, f2)
599 || !gimple_canonical_types_compatible_p
600 (TREE_TYPE (f1), TREE_TYPE (f2)))
601 return false;
602 }
603
604 /* If one aggregate has more fields than the other, they
605 are not the same. */
606 if (f1 || f2)
607 return false;
608
609 return true;
610 }
611
612 default:
613 gcc_unreachable ();
614 }
615 }
616
617
618 /* Returns nonzero if P1 and P2 are equal. */
619
620 static int
621 gimple_canonical_type_eq (const void *p1, const void *p2)
622 {
623 const_tree t1 = (const_tree) p1;
624 const_tree t2 = (const_tree) p2;
625 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
626 CONST_CAST_TREE (t2));
627 }
628
629 /* Main worker for gimple_register_canonical_type. */
630
631 static void
632 gimple_register_canonical_type_1 (tree t, hashval_t hash)
633 {
634 void **slot;
635
636 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
637
638 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
639 if (*slot)
640 {
641 tree new_type = (tree)(*slot);
642 gcc_checking_assert (new_type != t);
643 TYPE_CANONICAL (t) = new_type;
644 }
645 else
646 {
647 TYPE_CANONICAL (t) = t;
648 *slot = (void *) t;
649 /* Cache the just computed hash value. */
650 num_canonical_type_hash_entries++;
651 bool existed_p = canonical_type_hash_cache->put (t, hash);
652 gcc_assert (!existed_p);
653 }
654 }
655
656 /* Register type T in the global type table gimple_types and set
657 TYPE_CANONICAL of T accordingly.
658 This is used by LTO to merge structurally equivalent types for
659 type-based aliasing purposes across different TUs and languages.
660
661 ??? This merging does not exactly match how the tree.c middle-end
662 functions will assign TYPE_CANONICAL when new types are created
663 during optimization (which at least happens for pointer and array
664 types). */
665
666 static void
667 gimple_register_canonical_type (tree t)
668 {
669 if (TYPE_CANONICAL (t))
670 return;
671
672 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
673 }
674
675 /* Re-compute TYPE_CANONICAL for NODE and related types. */
676
677 static void
678 lto_register_canonical_types (tree node, bool first_p)
679 {
680 if (!node
681 || !TYPE_P (node))
682 return;
683
684 if (first_p)
685 TYPE_CANONICAL (node) = NULL_TREE;
686
687 if (POINTER_TYPE_P (node)
688 || TREE_CODE (node) == COMPLEX_TYPE
689 || TREE_CODE (node) == ARRAY_TYPE)
690 lto_register_canonical_types (TREE_TYPE (node), first_p);
691
692 if (!first_p)
693 gimple_register_canonical_type (node);
694 }
695
696
697 /* Remember trees that contains references to declarations. */
698 static GTY(()) vec <tree, va_gc> *tree_with_vars;
699
700 #define CHECK_VAR(tt) \
701 do \
702 { \
703 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
704 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
705 return true; \
706 } while (0)
707
708 #define CHECK_NO_VAR(tt) \
709 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
710
711 /* Check presence of pointers to decls in fields of a tree_typed T. */
712
713 static inline bool
714 mentions_vars_p_typed (tree t)
715 {
716 CHECK_NO_VAR (TREE_TYPE (t));
717 return false;
718 }
719
720 /* Check presence of pointers to decls in fields of a tree_common T. */
721
722 static inline bool
723 mentions_vars_p_common (tree t)
724 {
725 if (mentions_vars_p_typed (t))
726 return true;
727 CHECK_NO_VAR (TREE_CHAIN (t));
728 return false;
729 }
730
731 /* Check presence of pointers to decls in fields of a decl_minimal T. */
732
733 static inline bool
734 mentions_vars_p_decl_minimal (tree t)
735 {
736 if (mentions_vars_p_common (t))
737 return true;
738 CHECK_NO_VAR (DECL_NAME (t));
739 CHECK_VAR (DECL_CONTEXT (t));
740 return false;
741 }
742
743 /* Check presence of pointers to decls in fields of a decl_common T. */
744
745 static inline bool
746 mentions_vars_p_decl_common (tree t)
747 {
748 if (mentions_vars_p_decl_minimal (t))
749 return true;
750 CHECK_VAR (DECL_SIZE (t));
751 CHECK_VAR (DECL_SIZE_UNIT (t));
752 CHECK_VAR (DECL_INITIAL (t));
753 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
754 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
755 return false;
756 }
757
758 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
759
760 static inline bool
761 mentions_vars_p_decl_with_vis (tree t)
762 {
763 if (mentions_vars_p_decl_common (t))
764 return true;
765
766 /* Accessor macro has side-effects, use field-name here. */
767 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
768 return false;
769 }
770
771 /* Check presence of pointers to decls in fields of a decl_non_common T. */
772
773 static inline bool
774 mentions_vars_p_decl_non_common (tree t)
775 {
776 if (mentions_vars_p_decl_with_vis (t))
777 return true;
778 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
779 CHECK_NO_VAR (DECL_RESULT_FLD (t));
780 CHECK_NO_VAR (DECL_VINDEX (t));
781 return false;
782 }
783
784 /* Check presence of pointers to decls in fields of a decl_non_common T. */
785
786 static bool
787 mentions_vars_p_function (tree t)
788 {
789 if (mentions_vars_p_decl_non_common (t))
790 return true;
791 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
792 return false;
793 }
794
795 /* Check presence of pointers to decls in fields of a field_decl T. */
796
797 static bool
798 mentions_vars_p_field_decl (tree t)
799 {
800 if (mentions_vars_p_decl_common (t))
801 return true;
802 CHECK_VAR (DECL_FIELD_OFFSET (t));
803 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
804 CHECK_NO_VAR (DECL_QUALIFIER (t));
805 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
806 CHECK_NO_VAR (DECL_FCONTEXT (t));
807 return false;
808 }
809
810 /* Check presence of pointers to decls in fields of a type T. */
811
812 static bool
813 mentions_vars_p_type (tree t)
814 {
815 if (mentions_vars_p_common (t))
816 return true;
817 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
818 CHECK_VAR (TYPE_SIZE (t));
819 CHECK_VAR (TYPE_SIZE_UNIT (t));
820 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
821 CHECK_NO_VAR (TYPE_NAME (t));
822
823 CHECK_VAR (TYPE_MINVAL (t));
824 CHECK_VAR (TYPE_MAXVAL (t));
825
826 /* Accessor is for derived node types only. */
827 CHECK_NO_VAR (t->type_non_common.binfo);
828
829 CHECK_VAR (TYPE_CONTEXT (t));
830 CHECK_NO_VAR (TYPE_CANONICAL (t));
831 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
832 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
833 return false;
834 }
835
836 /* Check presence of pointers to decls in fields of a BINFO T. */
837
838 static bool
839 mentions_vars_p_binfo (tree t)
840 {
841 unsigned HOST_WIDE_INT i, n;
842
843 if (mentions_vars_p_common (t))
844 return true;
845 CHECK_VAR (BINFO_VTABLE (t));
846 CHECK_NO_VAR (BINFO_OFFSET (t));
847 CHECK_NO_VAR (BINFO_VIRTUALS (t));
848 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
849 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
850 for (i = 0; i < n; i++)
851 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
852 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
853 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
854 n = BINFO_N_BASE_BINFOS (t);
855 for (i = 0; i < n; i++)
856 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
857 return false;
858 }
859
860 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
861
862 static bool
863 mentions_vars_p_constructor (tree t)
864 {
865 unsigned HOST_WIDE_INT idx;
866 constructor_elt *ce;
867
868 if (mentions_vars_p_typed (t))
869 return true;
870
871 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
872 {
873 CHECK_NO_VAR (ce->index);
874 CHECK_VAR (ce->value);
875 }
876 return false;
877 }
878
879 /* Check presence of pointers to decls in fields of an expression tree T. */
880
881 static bool
882 mentions_vars_p_expr (tree t)
883 {
884 int i;
885 if (mentions_vars_p_typed (t))
886 return true;
887 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
888 CHECK_VAR (TREE_OPERAND (t, i));
889 return false;
890 }
891
892 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
893
894 static bool
895 mentions_vars_p_omp_clause (tree t)
896 {
897 int i;
898 if (mentions_vars_p_common (t))
899 return true;
900 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
901 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
902 return false;
903 }
904
905 /* Check presence of pointers to decls that needs later fixup in T. */
906
907 static bool
908 mentions_vars_p (tree t)
909 {
910 switch (TREE_CODE (t))
911 {
912 case IDENTIFIER_NODE:
913 break;
914
915 case TREE_LIST:
916 CHECK_VAR (TREE_VALUE (t));
917 CHECK_VAR (TREE_PURPOSE (t));
918 CHECK_NO_VAR (TREE_CHAIN (t));
919 break;
920
921 case FIELD_DECL:
922 return mentions_vars_p_field_decl (t);
923
924 case LABEL_DECL:
925 case CONST_DECL:
926 case PARM_DECL:
927 case RESULT_DECL:
928 case IMPORTED_DECL:
929 case NAMESPACE_DECL:
930 case NAMELIST_DECL:
931 return mentions_vars_p_decl_common (t);
932
933 case VAR_DECL:
934 return mentions_vars_p_decl_with_vis (t);
935
936 case TYPE_DECL:
937 return mentions_vars_p_decl_non_common (t);
938
939 case FUNCTION_DECL:
940 return mentions_vars_p_function (t);
941
942 case TREE_BINFO:
943 return mentions_vars_p_binfo (t);
944
945 case PLACEHOLDER_EXPR:
946 return mentions_vars_p_common (t);
947
948 case BLOCK:
949 case TRANSLATION_UNIT_DECL:
950 case OPTIMIZATION_NODE:
951 case TARGET_OPTION_NODE:
952 break;
953
954 case CONSTRUCTOR:
955 return mentions_vars_p_constructor (t);
956
957 case OMP_CLAUSE:
958 return mentions_vars_p_omp_clause (t);
959
960 default:
961 if (TYPE_P (t))
962 {
963 if (mentions_vars_p_type (t))
964 return true;
965 }
966 else if (EXPR_P (t))
967 {
968 if (mentions_vars_p_expr (t))
969 return true;
970 }
971 else if (CONSTANT_CLASS_P (t))
972 CHECK_NO_VAR (TREE_TYPE (t));
973 else
974 gcc_unreachable ();
975 }
976 return false;
977 }
978
979
980 /* Return the resolution for the decl with index INDEX from DATA_IN. */
981
982 static enum ld_plugin_symbol_resolution
983 get_resolution (struct data_in *data_in, unsigned index)
984 {
985 if (data_in->globals_resolution.exists ())
986 {
987 ld_plugin_symbol_resolution_t ret;
988 /* We can have references to not emitted functions in
989 DECL_FUNCTION_PERSONALITY at least. So we can and have
990 to indeed return LDPR_UNKNOWN in some cases. */
991 if (data_in->globals_resolution.length () <= index)
992 return LDPR_UNKNOWN;
993 ret = data_in->globals_resolution[index];
994 return ret;
995 }
996 else
997 /* Delay resolution finding until decl merging. */
998 return LDPR_UNKNOWN;
999 }
1000
1001 /* We need to record resolutions until symbol table is read. */
1002 static void
1003 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1004 enum ld_plugin_symbol_resolution resolution)
1005 {
1006 if (resolution == LDPR_UNKNOWN)
1007 return;
1008 if (!file_data->resolution_map)
1009 file_data->resolution_map = pointer_map_create ();
1010 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1011 }
1012
1013 /* Register DECL with the global symbol table and change its
1014 name if necessary to avoid name clashes for static globals across
1015 different files. */
1016
1017 static void
1018 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1019 unsigned ix)
1020 {
1021 tree context;
1022
1023 /* Variable has file scope, not local. */
1024 if (!TREE_PUBLIC (decl)
1025 && !((context = decl_function_context (decl))
1026 && auto_var_in_fn_p (decl, context)))
1027 rest_of_decl_compilation (decl, 1, 0);
1028
1029 /* If this variable has already been declared, queue the
1030 declaration for merging. */
1031 if (TREE_PUBLIC (decl))
1032 register_resolution (data_in->file_data,
1033 decl, get_resolution (data_in, ix));
1034 }
1035
1036
1037 /* Register DECL with the global symbol table and change its
1038 name if necessary to avoid name clashes for static globals across
1039 different files. DATA_IN contains descriptors and tables for the
1040 file being read. */
1041
1042 static void
1043 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1044 unsigned ix)
1045 {
1046 /* If this variable has already been declared, queue the
1047 declaration for merging. */
1048 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1049 register_resolution (data_in->file_data,
1050 decl, get_resolution (data_in, ix));
1051 }
1052
1053
1054 /* For the type T re-materialize it in the type variant list and
1055 the pointer/reference-to chains. */
1056
1057 static void
1058 lto_fixup_prevailing_type (tree t)
1059 {
1060 /* The following re-creates proper variant lists while fixing up
1061 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1062 variant list state before fixup is broken. */
1063
1064 /* If we are not our own variant leader link us into our new leaders
1065 variant list. */
1066 if (TYPE_MAIN_VARIANT (t) != t)
1067 {
1068 tree mv = TYPE_MAIN_VARIANT (t);
1069 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1070 TYPE_NEXT_VARIANT (mv) = t;
1071 }
1072
1073 /* The following reconstructs the pointer chains
1074 of the new pointed-to type if we are a main variant. We do
1075 not stream those so they are broken before fixup. */
1076 if (TREE_CODE (t) == POINTER_TYPE
1077 && TYPE_MAIN_VARIANT (t) == t)
1078 {
1079 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1080 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1081 }
1082 else if (TREE_CODE (t) == REFERENCE_TYPE
1083 && TYPE_MAIN_VARIANT (t) == t)
1084 {
1085 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1086 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1087 }
1088 }
1089
1090
1091 /* We keep prevailing tree SCCs in a hashtable with manual collision
1092 handling (in case all hashes compare the same) and keep the colliding
1093 entries in the tree_scc->next chain. */
1094
1095 struct tree_scc
1096 {
1097 tree_scc *next;
1098 /* Hash of the whole SCC. */
1099 hashval_t hash;
1100 /* Number of trees in the SCC. */
1101 unsigned len;
1102 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1103 which share the same individual tree hash). */
1104 unsigned entry_len;
1105 /* The members of the SCC.
1106 We only need to remember the first entry node candidate for prevailing
1107 SCCs (but of course have access to all entries for SCCs we are
1108 processing).
1109 ??? For prevailing SCCs we really only need hash and the first
1110 entry candidate, but that's too awkward to implement. */
1111 tree entries[1];
1112 };
1113
1114 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1115 {
1116 typedef tree_scc value_type;
1117 typedef tree_scc compare_type;
1118 static inline hashval_t hash (const value_type *);
1119 static inline bool equal (const value_type *, const compare_type *);
1120 };
1121
1122 hashval_t
1123 tree_scc_hasher::hash (const value_type *scc)
1124 {
1125 return scc->hash;
1126 }
1127
1128 bool
1129 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1130 {
1131 if (scc1->hash != scc2->hash
1132 || scc1->len != scc2->len
1133 || scc1->entry_len != scc2->entry_len)
1134 return false;
1135 return true;
1136 }
1137
1138 static hash_table<tree_scc_hasher> *tree_scc_hash;
1139 static struct obstack tree_scc_hash_obstack;
1140
1141 static unsigned long num_merged_types;
1142 static unsigned long num_prevailing_types;
1143 static unsigned long num_type_scc_trees;
1144 static unsigned long total_scc_size;
1145 static unsigned long num_sccs_read;
1146 static unsigned long total_scc_size_merged;
1147 static unsigned long num_sccs_merged;
1148 static unsigned long num_scc_compares;
1149 static unsigned long num_scc_compare_collisions;
1150
1151
1152 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1153 recursing through in-SCC tree edges. Returns true if the SCCs entered
1154 through T1 and T2 are equal and fills in *MAP with the pairs of
1155 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1156
1157 static bool
1158 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1159 {
1160 enum tree_code code;
1161
1162 /* Mark already visited nodes. */
1163 TREE_ASM_WRITTEN (t2) = 1;
1164
1165 /* Push the pair onto map. */
1166 (*map)[0] = t1;
1167 (*map)[1] = t2;
1168 *map = *map + 2;
1169
1170 /* Compare value-fields. */
1171 #define compare_values(X) \
1172 do { \
1173 if (X(t1) != X(t2)) \
1174 return false; \
1175 } while (0)
1176
1177 compare_values (TREE_CODE);
1178 code = TREE_CODE (t1);
1179
1180 if (!TYPE_P (t1))
1181 {
1182 compare_values (TREE_SIDE_EFFECTS);
1183 compare_values (TREE_CONSTANT);
1184 compare_values (TREE_READONLY);
1185 compare_values (TREE_PUBLIC);
1186 }
1187 compare_values (TREE_ADDRESSABLE);
1188 compare_values (TREE_THIS_VOLATILE);
1189 if (DECL_P (t1))
1190 compare_values (DECL_UNSIGNED);
1191 else if (TYPE_P (t1))
1192 compare_values (TYPE_UNSIGNED);
1193 if (TYPE_P (t1))
1194 compare_values (TYPE_ARTIFICIAL);
1195 else
1196 compare_values (TREE_NO_WARNING);
1197 compare_values (TREE_NOTHROW);
1198 compare_values (TREE_STATIC);
1199 if (code != TREE_BINFO)
1200 compare_values (TREE_PRIVATE);
1201 compare_values (TREE_PROTECTED);
1202 compare_values (TREE_DEPRECATED);
1203 if (TYPE_P (t1))
1204 {
1205 compare_values (TYPE_SATURATING);
1206 compare_values (TYPE_ADDR_SPACE);
1207 }
1208 else if (code == SSA_NAME)
1209 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1212 {
1213 if (!wi::eq_p (t1, t2))
1214 return false;
1215 }
1216
1217 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1218 {
1219 /* ??? No suitable compare routine available. */
1220 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1221 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1222 if (r1.cl != r2.cl
1223 || r1.decimal != r2.decimal
1224 || r1.sign != r2.sign
1225 || r1.signalling != r2.signalling
1226 || r1.canonical != r2.canonical
1227 || r1.uexp != r2.uexp)
1228 return false;
1229 for (unsigned i = 0; i < SIGSZ; ++i)
1230 if (r1.sig[i] != r2.sig[i])
1231 return false;
1232 }
1233
1234 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1235 if (!fixed_compare (EQ_EXPR,
1236 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1237 return false;
1238
1239
1240 /* We don't want to compare locations, so there is nothing do compare
1241 for TS_DECL_MINIMAL. */
1242
1243 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1244 {
1245 compare_values (DECL_MODE);
1246 compare_values (DECL_NONLOCAL);
1247 compare_values (DECL_VIRTUAL_P);
1248 compare_values (DECL_IGNORED_P);
1249 compare_values (DECL_ABSTRACT);
1250 compare_values (DECL_ARTIFICIAL);
1251 compare_values (DECL_USER_ALIGN);
1252 compare_values (DECL_PRESERVE_P);
1253 compare_values (DECL_EXTERNAL);
1254 compare_values (DECL_GIMPLE_REG_P);
1255 compare_values (DECL_ALIGN);
1256 if (code == LABEL_DECL)
1257 {
1258 compare_values (EH_LANDING_PAD_NR);
1259 compare_values (LABEL_DECL_UID);
1260 }
1261 else if (code == FIELD_DECL)
1262 {
1263 compare_values (DECL_PACKED);
1264 compare_values (DECL_NONADDRESSABLE_P);
1265 compare_values (DECL_OFFSET_ALIGN);
1266 }
1267 else if (code == VAR_DECL)
1268 {
1269 compare_values (DECL_HAS_DEBUG_EXPR_P);
1270 compare_values (DECL_NONLOCAL_FRAME);
1271 }
1272 if (code == RESULT_DECL
1273 || code == PARM_DECL
1274 || code == VAR_DECL)
1275 {
1276 compare_values (DECL_BY_REFERENCE);
1277 if (code == VAR_DECL
1278 || code == PARM_DECL)
1279 compare_values (DECL_HAS_VALUE_EXPR_P);
1280 }
1281 }
1282
1283 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1284 compare_values (DECL_REGISTER);
1285
1286 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1287 {
1288 compare_values (DECL_COMMON);
1289 compare_values (DECL_DLLIMPORT_P);
1290 compare_values (DECL_WEAK);
1291 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1292 compare_values (DECL_COMDAT);
1293 compare_values (DECL_VISIBILITY);
1294 compare_values (DECL_VISIBILITY_SPECIFIED);
1295 if (code == VAR_DECL)
1296 {
1297 compare_values (DECL_HARD_REGISTER);
1298 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1299 compare_values (DECL_IN_CONSTANT_POOL);
1300 }
1301 }
1302
1303 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1304 {
1305 compare_values (DECL_BUILT_IN_CLASS);
1306 compare_values (DECL_STATIC_CONSTRUCTOR);
1307 compare_values (DECL_STATIC_DESTRUCTOR);
1308 compare_values (DECL_UNINLINABLE);
1309 compare_values (DECL_POSSIBLY_INLINED);
1310 compare_values (DECL_IS_NOVOPS);
1311 compare_values (DECL_IS_RETURNS_TWICE);
1312 compare_values (DECL_IS_MALLOC);
1313 compare_values (DECL_IS_OPERATOR_NEW);
1314 compare_values (DECL_DECLARED_INLINE_P);
1315 compare_values (DECL_STATIC_CHAIN);
1316 compare_values (DECL_NO_INLINE_WARNING_P);
1317 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1318 compare_values (DECL_NO_LIMIT_STACK);
1319 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1320 compare_values (DECL_PURE_P);
1321 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1322 compare_values (DECL_FINAL_P);
1323 compare_values (DECL_CXX_CONSTRUCTOR_P);
1324 compare_values (DECL_CXX_DESTRUCTOR_P);
1325 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1326 compare_values (DECL_FUNCTION_CODE);
1327 }
1328
1329 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1330 {
1331 compare_values (TYPE_MODE);
1332 compare_values (TYPE_STRING_FLAG);
1333 compare_values (TYPE_NO_FORCE_BLK);
1334 compare_values (TYPE_NEEDS_CONSTRUCTING);
1335 if (RECORD_OR_UNION_TYPE_P (t1))
1336 {
1337 compare_values (TYPE_TRANSPARENT_AGGR);
1338 compare_values (TYPE_FINAL_P);
1339 }
1340 else if (code == ARRAY_TYPE)
1341 compare_values (TYPE_NONALIASED_COMPONENT);
1342 compare_values (TYPE_PACKED);
1343 compare_values (TYPE_RESTRICT);
1344 compare_values (TYPE_USER_ALIGN);
1345 compare_values (TYPE_READONLY);
1346 compare_values (TYPE_PRECISION);
1347 compare_values (TYPE_ALIGN);
1348 compare_values (TYPE_ALIAS_SET);
1349 }
1350
1351 /* We don't want to compare locations, so there is nothing do compare
1352 for TS_EXP. */
1353
1354 /* BLOCKs are function local and we don't merge anything there, so
1355 simply refuse to merge. */
1356 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1357 return false;
1358
1359 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1360 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1361 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1362 return false;
1363
1364 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1365 gcc_unreachable ();
1366
1367 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1368 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1369 sizeof (struct cl_optimization)) != 0)
1370 return false;
1371
1372 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1373 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1374 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1375 return false;
1376
1377 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1378 compare_values (CONSTRUCTOR_NELTS);
1379
1380 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1381 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1382 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1383 IDENTIFIER_LENGTH (t1)) != 0)
1384 return false;
1385
1386 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1387 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1388 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1389 TREE_STRING_LENGTH (t1)) != 0)
1390 return false;
1391
1392 if (code == OMP_CLAUSE)
1393 {
1394 compare_values (OMP_CLAUSE_CODE);
1395 switch (OMP_CLAUSE_CODE (t1))
1396 {
1397 case OMP_CLAUSE_DEFAULT:
1398 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1399 break;
1400 case OMP_CLAUSE_SCHEDULE:
1401 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1402 break;
1403 case OMP_CLAUSE_DEPEND:
1404 compare_values (OMP_CLAUSE_DEPEND_KIND);
1405 break;
1406 case OMP_CLAUSE_MAP:
1407 compare_values (OMP_CLAUSE_MAP_KIND);
1408 break;
1409 case OMP_CLAUSE_PROC_BIND:
1410 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1411 break;
1412 case OMP_CLAUSE_REDUCTION:
1413 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1414 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1415 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1416 break;
1417 default:
1418 break;
1419 }
1420 }
1421
1422 #undef compare_values
1423
1424
1425 /* Compare pointer fields. */
1426
1427 /* Recurse. Search & Replaced from DFS_write_tree_body.
1428 Folding the early checks into the compare_tree_edges recursion
1429 macro makes debugging way quicker as you are able to break on
1430 compare_tree_sccs_1 and simply finish until a call returns false
1431 to spot the SCC members with the difference. */
1432 #define compare_tree_edges(E1, E2) \
1433 do { \
1434 tree t1_ = (E1), t2_ = (E2); \
1435 if (t1_ != t2_ \
1436 && (!t1_ || !t2_ \
1437 || !TREE_VISITED (t2_) \
1438 || (!TREE_ASM_WRITTEN (t2_) \
1439 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1440 return false; \
1441 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1442 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1443 } while (0)
1444
1445 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1446 {
1447 if (code != IDENTIFIER_NODE)
1448 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1449 }
1450
1451 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1452 {
1453 unsigned i;
1454 /* Note that the number of elements for EXPR has already been emitted
1455 in EXPR's header (see streamer_write_tree_header). */
1456 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1457 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1458 }
1459
1460 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1461 {
1462 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1463 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1464 }
1465
1466 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1467 {
1468 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1469 /* ??? Global decls from different TUs have non-matching
1470 TRANSLATION_UNIT_DECLs. Only consider a small set of
1471 decls equivalent, we should not end up merging others. */
1472 if ((code == TYPE_DECL
1473 || code == NAMESPACE_DECL
1474 || code == IMPORTED_DECL
1475 || code == CONST_DECL
1476 || (VAR_OR_FUNCTION_DECL_P (t1)
1477 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1478 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1479 ;
1480 else
1481 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1482 }
1483
1484 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1485 {
1486 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1487 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1488 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1489 if ((code == VAR_DECL
1490 || code == PARM_DECL)
1491 && DECL_HAS_VALUE_EXPR_P (t1))
1492 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1493 if (code == VAR_DECL
1494 && DECL_HAS_DEBUG_EXPR_P (t1))
1495 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1496 /* LTO specific edges. */
1497 if (code != FUNCTION_DECL
1498 && code != TRANSLATION_UNIT_DECL)
1499 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1500 }
1501
1502 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1503 {
1504 if (code == FUNCTION_DECL)
1505 {
1506 tree a1, a2;
1507 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1508 a1 || a2;
1509 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1510 compare_tree_edges (a1, a2);
1511 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1512 }
1513 else if (code == TYPE_DECL)
1514 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1515 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1516 }
1517
1518 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1519 {
1520 /* Make sure we don't inadvertently set the assembler name. */
1521 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1522 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1523 DECL_ASSEMBLER_NAME (t2));
1524 }
1525
1526 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1527 {
1528 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1529 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1530 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1531 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1532 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1533 DECL_FIELD_BIT_OFFSET (t2));
1534 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1535 }
1536
1537 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1538 {
1539 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1540 DECL_FUNCTION_PERSONALITY (t2));
1541 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1542 the attribute list instead. */
1543 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1544 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1545 }
1546
1547 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1548 {
1549 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1550 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1551 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1552 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1553 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1554 reconstructed during fixup. */
1555 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1556 during fixup. */
1557 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1558 /* ??? Global types from different TUs have non-matching
1559 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1560 equal. */
1561 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1562 ;
1563 else
1564 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1565 /* TYPE_CANONICAL is re-computed during type merging, so do not
1566 compare it here. */
1567 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1568 }
1569
1570 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1571 {
1572 if (code == ENUMERAL_TYPE)
1573 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1574 else if (code == ARRAY_TYPE)
1575 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1576 else if (RECORD_OR_UNION_TYPE_P (t1))
1577 {
1578 tree f1, f2;
1579 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1580 f1 || f2;
1581 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1582 compare_tree_edges (f1, f2);
1583 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1584 }
1585 else if (code == FUNCTION_TYPE
1586 || code == METHOD_TYPE)
1587 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1588 if (!POINTER_TYPE_P (t1))
1589 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1590 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1591 }
1592
1593 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1594 {
1595 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1596 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1597 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1598 }
1599
1600 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1601 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1602 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1603
1604 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1605 {
1606 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1607 compare_tree_edges (TREE_OPERAND (t1, i),
1608 TREE_OPERAND (t2, i));
1609
1610 /* BLOCKs are function local and we don't merge anything there. */
1611 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1612 return false;
1613 }
1614
1615 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1616 {
1617 unsigned i;
1618 tree t;
1619 /* Lengths have already been compared above. */
1620 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1621 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1622 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1623 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1624 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1625 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1626 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1627 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1628 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1629 }
1630
1631 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1632 {
1633 unsigned i;
1634 tree index, value;
1635 /* Lengths have already been compared above. */
1636 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1637 {
1638 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1639 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1640 }
1641 }
1642
1643 if (code == OMP_CLAUSE)
1644 {
1645 int i;
1646
1647 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1648 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1649 OMP_CLAUSE_OPERAND (t2, i));
1650 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1651 }
1652
1653 #undef compare_tree_edges
1654
1655 return true;
1656 }
1657
1658 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1659 out MAP if they are equal. */
1660
1661 static bool
1662 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1663 tree *map)
1664 {
1665 /* Assume SCC entry hashes are sorted after their cardinality. Which
1666 means we can simply take the first n-tuple of equal hashes
1667 (which is recorded as entry_len) and do n SCC entry candidate
1668 comparisons. */
1669 for (unsigned i = 0; i < pscc->entry_len; ++i)
1670 {
1671 tree *mapp = map;
1672 num_scc_compare_collisions++;
1673 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1674 {
1675 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1676 on the scc as all trees will be freed. */
1677 return true;
1678 }
1679 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1680 the SCC prevails. */
1681 for (unsigned j = 0; j < scc->len; ++j)
1682 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1683 }
1684
1685 return false;
1686 }
1687
1688 /* QSort sort function to sort a map of two pointers after the 2nd
1689 pointer. */
1690
1691 static int
1692 cmp_tree (const void *p1_, const void *p2_)
1693 {
1694 tree *p1 = (tree *)(const_cast<void *>(p1_));
1695 tree *p2 = (tree *)(const_cast<void *>(p2_));
1696 if (p1[1] == p2[1])
1697 return 0;
1698 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1699 }
1700
1701 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1702 hash value SCC_HASH with an already recorded SCC. Return true if
1703 that was successful, otherwise return false. */
1704
1705 static bool
1706 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1707 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1708 {
1709 bool unified_p = false;
1710 tree_scc *scc
1711 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1712 scc->next = NULL;
1713 scc->hash = scc_hash;
1714 scc->len = len;
1715 scc->entry_len = scc_entry_len;
1716 for (unsigned i = 0; i < len; ++i)
1717 {
1718 tree t = streamer_tree_cache_get_tree (cache, from + i);
1719 scc->entries[i] = t;
1720 /* Do not merge SCCs with local entities inside them. Also do
1721 not merge TRANSLATION_UNIT_DECLs. */
1722 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1723 || (VAR_OR_FUNCTION_DECL_P (t)
1724 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1725 || TREE_CODE (t) == LABEL_DECL)
1726 {
1727 /* Avoid doing any work for these cases and do not worry to
1728 record the SCCs for further merging. */
1729 return false;
1730 }
1731 }
1732
1733 /* Look for the list of candidate SCCs to compare against. */
1734 tree_scc **slot;
1735 slot = tree_scc_hash->find_slot_with_hash (scc, scc_hash, INSERT);
1736 if (*slot)
1737 {
1738 /* Try unifying against each candidate. */
1739 num_scc_compares++;
1740
1741 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1742 outside of the scc when following tree edges. Make sure
1743 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1744 to track whether we visited the SCC member during the compare.
1745 We cannot use TREE_VISITED on the pscc members as the extended
1746 scc and pscc can overlap. */
1747 for (unsigned i = 0; i < scc->len; ++i)
1748 {
1749 TREE_VISITED (scc->entries[i]) = 1;
1750 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1751 }
1752
1753 tree *map = XALLOCAVEC (tree, 2 * len);
1754 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1755 {
1756 if (!compare_tree_sccs (pscc, scc, map))
1757 continue;
1758
1759 /* Found an equal SCC. */
1760 unified_p = true;
1761 num_scc_compare_collisions--;
1762 num_sccs_merged++;
1763 total_scc_size_merged += len;
1764
1765 #ifdef ENABLE_CHECKING
1766 for (unsigned i = 0; i < len; ++i)
1767 {
1768 tree t = map[2*i+1];
1769 enum tree_code code = TREE_CODE (t);
1770 /* IDENTIFIER_NODEs should be singletons and are merged by the
1771 streamer. The others should be singletons, too, and we
1772 should not merge them in any way. */
1773 gcc_assert (code != TRANSLATION_UNIT_DECL
1774 && code != IDENTIFIER_NODE
1775 && !streamer_handle_as_builtin_p (t));
1776 }
1777 #endif
1778
1779 /* Fixup the streamer cache with the prevailing nodes according
1780 to the tree node mapping computed by compare_tree_sccs. */
1781 if (len == 1)
1782 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1783 else
1784 {
1785 tree *map2 = XALLOCAVEC (tree, 2 * len);
1786 for (unsigned i = 0; i < len; ++i)
1787 {
1788 map2[i*2] = (tree)(uintptr_t)(from + i);
1789 map2[i*2+1] = scc->entries[i];
1790 }
1791 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1792 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1793 for (unsigned i = 0; i < len; ++i)
1794 streamer_tree_cache_replace_tree (cache, map[2*i],
1795 (uintptr_t)map2[2*i]);
1796 }
1797
1798 /* Free the tree nodes from the read SCC. */
1799 for (unsigned i = 0; i < len; ++i)
1800 {
1801 enum tree_code code;
1802 if (TYPE_P (scc->entries[i]))
1803 num_merged_types++;
1804 code = TREE_CODE (scc->entries[i]);
1805 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1806 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1807 ggc_free (scc->entries[i]);
1808 }
1809
1810 break;
1811 }
1812
1813 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1814 if (!unified_p)
1815 for (unsigned i = 0; i < scc->len; ++i)
1816 TREE_VISITED (scc->entries[i]) = 0;
1817 }
1818
1819 /* If we didn't unify it to any candidate duplicate the relevant
1820 pieces to permanent storage and link it into the chain. */
1821 if (!unified_p)
1822 {
1823 tree_scc *pscc
1824 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1825 memcpy (pscc, scc, sizeof (tree_scc));
1826 pscc->next = (*slot);
1827 *slot = pscc;
1828 }
1829 return unified_p;
1830 }
1831
1832
1833 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1834 RESOLUTIONS is the set of symbols picked by the linker (read from the
1835 resolution file when the linker plugin is being used). */
1836
1837 static void
1838 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1839 vec<ld_plugin_symbol_resolution_t> resolutions)
1840 {
1841 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1842 const int decl_offset = sizeof (struct lto_decl_header);
1843 const int main_offset = decl_offset + header->decl_state_size;
1844 const int string_offset = main_offset + header->main_size;
1845 struct lto_input_block ib_main;
1846 struct data_in *data_in;
1847 unsigned int i;
1848 const uint32_t *data_ptr, *data_end;
1849 uint32_t num_decl_states;
1850
1851 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1852 header->main_size);
1853
1854 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1855 header->string_size, resolutions);
1856
1857 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1858 internal types that should not be merged. */
1859
1860 /* Read the global declarations and types. */
1861 while (ib_main.p < ib_main.len)
1862 {
1863 tree t;
1864 unsigned from = data_in->reader_cache->nodes.length ();
1865 /* Read and uniquify SCCs as in the input stream. */
1866 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1867 if (tag == LTO_tree_scc)
1868 {
1869 unsigned len_;
1870 unsigned scc_entry_len;
1871 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1872 &scc_entry_len);
1873 unsigned len = data_in->reader_cache->nodes.length () - from;
1874 gcc_assert (len == len_);
1875
1876 total_scc_size += len;
1877 num_sccs_read++;
1878
1879 /* We have the special case of size-1 SCCs that are pre-merged
1880 by means of identifier and string sharing for example.
1881 ??? Maybe we should avoid streaming those as SCCs. */
1882 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1883 from);
1884 if (len == 1
1885 && (TREE_CODE (first) == IDENTIFIER_NODE
1886 || TREE_CODE (first) == INTEGER_CST
1887 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1888 || streamer_handle_as_builtin_p (first)))
1889 continue;
1890
1891 /* Try to unify the SCC with already existing ones. */
1892 if (!flag_ltrans
1893 && unify_scc (data_in->reader_cache, from,
1894 len, scc_entry_len, scc_hash))
1895 continue;
1896
1897 /* Do remaining fixup tasks for prevailing nodes. */
1898 bool seen_type = false;
1899 for (unsigned i = 0; i < len; ++i)
1900 {
1901 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1902 from + i);
1903 /* Reconstruct the type variant and pointer-to/reference-to
1904 chains. */
1905 if (TYPE_P (t))
1906 {
1907 seen_type = true;
1908 num_prevailing_types++;
1909 lto_fixup_prevailing_type (t);
1910 }
1911 /* Compute the canonical type of all types.
1912 ??? Should be able to assert that !TYPE_CANONICAL. */
1913 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1914 gimple_register_canonical_type (t);
1915 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1916 type which is also member of this SCC. */
1917 if (TREE_CODE (t) == INTEGER_CST
1918 && !TREE_OVERFLOW (t))
1919 cache_integer_cst (t);
1920 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1921 for both WPA and LTRANS stage. */
1922 if (TREE_CODE (t) == FUNCTION_DECL)
1923 {
1924 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1925 if (attr)
1926 targetm.target_option.valid_attribute_p
1927 (t, NULL_TREE, TREE_VALUE (attr), 0);
1928 }
1929 /* Register TYPE_DECLs with the debuginfo machinery. */
1930 if (!flag_wpa
1931 && TREE_CODE (t) == TYPE_DECL)
1932 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1933 if (!flag_ltrans)
1934 {
1935 /* Register variables and functions with the
1936 symbol table. */
1937 if (TREE_CODE (t) == VAR_DECL)
1938 lto_register_var_decl_in_symtab (data_in, t, from + i);
1939 else if (TREE_CODE (t) == FUNCTION_DECL
1940 && !DECL_BUILT_IN (t))
1941 lto_register_function_decl_in_symtab (data_in, t, from + i);
1942 /* Scan the tree for references to global functions or
1943 variables and record those for later fixup. */
1944 if (mentions_vars_p (t))
1945 vec_safe_push (tree_with_vars, t);
1946 }
1947 }
1948 if (seen_type)
1949 num_type_scc_trees += len;
1950 }
1951 else
1952 {
1953 /* Pickle stray references. */
1954 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1955 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1956 }
1957 }
1958
1959 /* Read in lto_in_decl_state objects. */
1960 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1961 data_end =
1962 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1963 num_decl_states = *data_ptr++;
1964
1965 gcc_assert (num_decl_states > 0);
1966 decl_data->global_decl_state = lto_new_in_decl_state ();
1967 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1968 decl_data->global_decl_state);
1969
1970 /* Read in per-function decl states and enter them in hash table. */
1971 decl_data->function_decl_states =
1972 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1973
1974 for (i = 1; i < num_decl_states; i++)
1975 {
1976 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1977 void **slot;
1978
1979 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1980 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1981 gcc_assert (*slot == NULL);
1982 *slot = state;
1983 }
1984
1985 if (data_ptr != data_end)
1986 internal_error ("bytecode stream: garbage at the end of symbols section");
1987
1988 /* Set the current decl state to be the global state. */
1989 decl_data->current_decl_state = decl_data->global_decl_state;
1990
1991 lto_data_in_delete (data_in);
1992 }
1993
1994 /* Custom version of strtoll, which is not portable. */
1995
1996 static int64_t
1997 lto_parse_hex (const char *p)
1998 {
1999 int64_t ret = 0;
2000
2001 for (; *p != '\0'; ++p)
2002 {
2003 char c = *p;
2004 unsigned char part;
2005 ret <<= 4;
2006 if (c >= '0' && c <= '9')
2007 part = c - '0';
2008 else if (c >= 'a' && c <= 'f')
2009 part = c - 'a' + 10;
2010 else if (c >= 'A' && c <= 'F')
2011 part = c - 'A' + 10;
2012 else
2013 internal_error ("could not parse hex number");
2014 ret |= part;
2015 }
2016
2017 return ret;
2018 }
2019
2020 /* Read resolution for file named FILE_NAME. The resolution is read from
2021 RESOLUTION. */
2022
2023 static void
2024 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2025 {
2026 /* We require that objects in the resolution file are in the same
2027 order as the lto1 command line. */
2028 unsigned int name_len;
2029 char *obj_name;
2030 unsigned int num_symbols;
2031 unsigned int i;
2032 struct lto_file_decl_data *file_data;
2033 splay_tree_node nd = NULL;
2034
2035 if (!resolution)
2036 return;
2037
2038 name_len = strlen (file->filename);
2039 obj_name = XNEWVEC (char, name_len + 1);
2040 fscanf (resolution, " "); /* Read white space. */
2041
2042 fread (obj_name, sizeof (char), name_len, resolution);
2043 obj_name[name_len] = '\0';
2044 if (filename_cmp (obj_name, file->filename) != 0)
2045 internal_error ("unexpected file name %s in linker resolution file. "
2046 "Expected %s", obj_name, file->filename);
2047 if (file->offset != 0)
2048 {
2049 int t;
2050 char offset_p[17];
2051 int64_t offset;
2052 t = fscanf (resolution, "@0x%16s", offset_p);
2053 if (t != 1)
2054 internal_error ("could not parse file offset");
2055 offset = lto_parse_hex (offset_p);
2056 if (offset != file->offset)
2057 internal_error ("unexpected offset");
2058 }
2059
2060 free (obj_name);
2061
2062 fscanf (resolution, "%u", &num_symbols);
2063
2064 for (i = 0; i < num_symbols; i++)
2065 {
2066 int t;
2067 unsigned index;
2068 unsigned HOST_WIDE_INT id;
2069 char r_str[27];
2070 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2071 unsigned int j;
2072 unsigned int lto_resolution_str_len =
2073 sizeof (lto_resolution_str) / sizeof (char *);
2074 res_pair rp;
2075
2076 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2077 &index, &id, r_str);
2078 if (t != 3)
2079 internal_error ("invalid line in the resolution file");
2080
2081 for (j = 0; j < lto_resolution_str_len; j++)
2082 {
2083 if (strcmp (lto_resolution_str[j], r_str) == 0)
2084 {
2085 r = (enum ld_plugin_symbol_resolution) j;
2086 break;
2087 }
2088 }
2089 if (j == lto_resolution_str_len)
2090 internal_error ("invalid resolution in the resolution file");
2091
2092 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2093 {
2094 nd = lto_splay_tree_lookup (file_ids, id);
2095 if (nd == NULL)
2096 internal_error ("resolution sub id %wx not in object file", id);
2097 }
2098
2099 file_data = (struct lto_file_decl_data *)nd->value;
2100 /* The indexes are very sparse. To save memory save them in a compact
2101 format that is only unpacked later when the subfile is processed. */
2102 rp.res = r;
2103 rp.index = index;
2104 file_data->respairs.safe_push (rp);
2105 if (file_data->max_index < index)
2106 file_data->max_index = index;
2107 }
2108 }
2109
2110 /* List of file_decl_datas */
2111 struct file_data_list
2112 {
2113 struct lto_file_decl_data *first, *last;
2114 };
2115
2116 /* Is the name for a id'ed LTO section? */
2117
2118 static int
2119 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2120 {
2121 const char *s;
2122
2123 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2124 return 0;
2125 s = strrchr (name, '.');
2126 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2127 }
2128
2129 /* Create file_data of each sub file id */
2130
2131 static int
2132 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2133 struct file_data_list *list)
2134 {
2135 struct lto_section_slot s_slot, *new_slot;
2136 unsigned HOST_WIDE_INT id;
2137 splay_tree_node nd;
2138 void **hash_slot;
2139 char *new_name;
2140 struct lto_file_decl_data *file_data;
2141
2142 if (!lto_section_with_id (ls->name, &id))
2143 return 1;
2144
2145 /* Find hash table of sub module id */
2146 nd = lto_splay_tree_lookup (file_ids, id);
2147 if (nd != NULL)
2148 {
2149 file_data = (struct lto_file_decl_data *)nd->value;
2150 }
2151 else
2152 {
2153 file_data = ggc_alloc<lto_file_decl_data> ();
2154 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2155 file_data->id = id;
2156 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2157 lto_splay_tree_insert (file_ids, id, file_data);
2158
2159 /* Maintain list in linker order */
2160 if (!list->first)
2161 list->first = file_data;
2162 if (list->last)
2163 list->last->next = file_data;
2164 list->last = file_data;
2165 }
2166
2167 /* Copy section into sub module hash table */
2168 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2169 s_slot.name = new_name;
2170 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2171 gcc_assert (*hash_slot == NULL);
2172
2173 new_slot = XDUP (struct lto_section_slot, ls);
2174 new_slot->name = new_name;
2175 *hash_slot = new_slot;
2176 return 1;
2177 }
2178
2179 /* Read declarations and other initializations for a FILE_DATA. */
2180
2181 static void
2182 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2183 {
2184 const char *data;
2185 size_t len;
2186 vec<ld_plugin_symbol_resolution_t>
2187 resolutions = vNULL;
2188 int i;
2189 res_pair *rp;
2190
2191 /* Create vector for fast access of resolution. We do this lazily
2192 to save memory. */
2193 resolutions.safe_grow_cleared (file_data->max_index + 1);
2194 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2195 resolutions[rp->index] = rp->res;
2196 file_data->respairs.release ();
2197
2198 file_data->renaming_hash_table = lto_create_renaming_table ();
2199 file_data->file_name = file->filename;
2200 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2201 if (data == NULL)
2202 {
2203 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2204 return;
2205 }
2206 /* Frees resolutions */
2207 lto_read_decls (file_data, data, resolutions);
2208 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2209 }
2210
2211 /* Finalize FILE_DATA in FILE and increase COUNT. */
2212
2213 static int
2214 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2215 int *count)
2216 {
2217 lto_file_finalize (file_data, file);
2218 if (cgraph_dump_file)
2219 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2220 file_data->file_name, file_data->id);
2221 (*count)++;
2222 return 0;
2223 }
2224
2225 /* Generate a TREE representation for all types and external decls
2226 entities in FILE.
2227
2228 Read all of the globals out of the file. Then read the cgraph
2229 and process the .o index into the cgraph nodes so that it can open
2230 the .o file to load the functions and ipa information. */
2231
2232 static struct lto_file_decl_data *
2233 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2234 {
2235 struct lto_file_decl_data *file_data = NULL;
2236 splay_tree file_ids;
2237 htab_t section_hash_table;
2238 struct lto_section_slot *section;
2239 struct file_data_list file_list;
2240 struct lto_section_list section_list;
2241
2242 memset (&section_list, 0, sizeof (struct lto_section_list));
2243 section_hash_table = lto_obj_build_section_table (file, &section_list);
2244
2245 /* Find all sub modules in the object and put their sections into new hash
2246 tables in a splay tree. */
2247 file_ids = lto_splay_tree_new ();
2248 memset (&file_list, 0, sizeof (struct file_data_list));
2249 for (section = section_list.first; section != NULL; section = section->next)
2250 create_subid_section_table (section, file_ids, &file_list);
2251
2252 /* Add resolutions to file ids */
2253 lto_resolution_read (file_ids, resolution_file, file);
2254
2255 /* Finalize each lto file for each submodule in the merged object */
2256 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2257 lto_create_files_from_ids (file, file_data, count);
2258
2259 splay_tree_delete (file_ids);
2260 htab_delete (section_hash_table);
2261
2262 return file_list.first;
2263 }
2264
2265 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2266 #define LTO_MMAP_IO 1
2267 #endif
2268
2269 #if LTO_MMAP_IO
2270 /* Page size of machine is used for mmap and munmap calls. */
2271 static size_t page_mask;
2272 #endif
2273
2274 /* Get the section data of length LEN from FILENAME starting at
2275 OFFSET. The data segment must be freed by the caller when the
2276 caller is finished. Returns NULL if all was not well. */
2277
2278 static char *
2279 lto_read_section_data (struct lto_file_decl_data *file_data,
2280 intptr_t offset, size_t len)
2281 {
2282 char *result;
2283 static int fd = -1;
2284 static char *fd_name;
2285 #if LTO_MMAP_IO
2286 intptr_t computed_len;
2287 intptr_t computed_offset;
2288 intptr_t diff;
2289 #endif
2290
2291 /* Keep a single-entry file-descriptor cache. The last file we
2292 touched will get closed at exit.
2293 ??? Eventually we want to add a more sophisticated larger cache
2294 or rather fix function body streaming to not stream them in
2295 practically random order. */
2296 if (fd != -1
2297 && filename_cmp (fd_name, file_data->file_name) != 0)
2298 {
2299 free (fd_name);
2300 close (fd);
2301 fd = -1;
2302 }
2303 if (fd == -1)
2304 {
2305 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2306 if (fd == -1)
2307 {
2308 fatal_error ("Cannot open %s", file_data->file_name);
2309 return NULL;
2310 }
2311 fd_name = xstrdup (file_data->file_name);
2312 }
2313
2314 #if LTO_MMAP_IO
2315 if (!page_mask)
2316 {
2317 size_t page_size = sysconf (_SC_PAGE_SIZE);
2318 page_mask = ~(page_size - 1);
2319 }
2320
2321 computed_offset = offset & page_mask;
2322 diff = offset - computed_offset;
2323 computed_len = len + diff;
2324
2325 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2326 fd, computed_offset);
2327 if (result == MAP_FAILED)
2328 {
2329 fatal_error ("Cannot map %s", file_data->file_name);
2330 return NULL;
2331 }
2332
2333 return result + diff;
2334 #else
2335 result = (char *) xmalloc (len);
2336 if (lseek (fd, offset, SEEK_SET) != offset
2337 || read (fd, result, len) != (ssize_t) len)
2338 {
2339 free (result);
2340 fatal_error ("Cannot read %s", file_data->file_name);
2341 result = NULL;
2342 }
2343 #ifdef __MINGW32__
2344 /* Native windows doesn't supports delayed unlink on opened file. So
2345 we close file here again. This produces higher I/O load, but at least
2346 it prevents to have dangling file handles preventing unlink. */
2347 free (fd_name);
2348 fd_name = NULL;
2349 close (fd);
2350 fd = -1;
2351 #endif
2352 return result;
2353 #endif
2354 }
2355
2356
2357 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2358 NAME will be NULL unless the section type is for a function
2359 body. */
2360
2361 static const char *
2362 get_section_data (struct lto_file_decl_data *file_data,
2363 enum lto_section_type section_type,
2364 const char *name,
2365 size_t *len)
2366 {
2367 htab_t section_hash_table = file_data->section_hash_table;
2368 struct lto_section_slot *f_slot;
2369 struct lto_section_slot s_slot;
2370 const char *section_name = lto_get_section_name (section_type, name, file_data);
2371 char *data = NULL;
2372
2373 *len = 0;
2374 s_slot.name = section_name;
2375 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2376 if (f_slot)
2377 {
2378 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2379 *len = f_slot->len;
2380 }
2381
2382 free (CONST_CAST (char *, section_name));
2383 return data;
2384 }
2385
2386
2387 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2388 starts at OFFSET and has LEN bytes. */
2389
2390 static void
2391 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2392 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2393 const char *name ATTRIBUTE_UNUSED,
2394 const char *offset, size_t len ATTRIBUTE_UNUSED)
2395 {
2396 #if LTO_MMAP_IO
2397 intptr_t computed_len;
2398 intptr_t computed_offset;
2399 intptr_t diff;
2400 #endif
2401
2402 #if LTO_MMAP_IO
2403 computed_offset = ((intptr_t) offset) & page_mask;
2404 diff = (intptr_t) offset - computed_offset;
2405 computed_len = len + diff;
2406
2407 munmap ((caddr_t) computed_offset, computed_len);
2408 #else
2409 free (CONST_CAST(char *, offset));
2410 #endif
2411 }
2412
2413 static lto_file *current_lto_file;
2414
2415 /* Helper for qsort; compare partitions and return one with smaller size.
2416 We sort from greatest to smallest so parallel build doesn't stale on the
2417 longest compilation being executed too late. */
2418
2419 static int
2420 cmp_partitions_size (const void *a, const void *b)
2421 {
2422 const struct ltrans_partition_def *pa
2423 = *(struct ltrans_partition_def *const *)a;
2424 const struct ltrans_partition_def *pb
2425 = *(struct ltrans_partition_def *const *)b;
2426 return pb->insns - pa->insns;
2427 }
2428
2429 /* Helper for qsort; compare partitions and return one with smaller order. */
2430
2431 static int
2432 cmp_partitions_order (const void *a, const void *b)
2433 {
2434 const struct ltrans_partition_def *pa
2435 = *(struct ltrans_partition_def *const *)a;
2436 const struct ltrans_partition_def *pb
2437 = *(struct ltrans_partition_def *const *)b;
2438 int ordera = -1, orderb = -1;
2439
2440 if (lto_symtab_encoder_size (pa->encoder))
2441 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2442 if (lto_symtab_encoder_size (pb->encoder))
2443 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2444 return orderb - ordera;
2445 }
2446
2447 /* Actually stream out ENCODER into TEMP_FILENAME. */
2448
2449 static void
2450 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2451 {
2452 lto_file *file = lto_obj_file_open (temp_filename, true);
2453 if (!file)
2454 fatal_error ("lto_obj_file_open() failed");
2455 lto_set_current_out_file (file);
2456
2457 ipa_write_optimization_summaries (encoder);
2458
2459 lto_set_current_out_file (NULL);
2460 lto_obj_file_close (file);
2461 free (file);
2462 }
2463
2464 /* Wait for forked process and signal errors. */
2465 #ifdef HAVE_WORKING_FORK
2466 static void
2467 wait_for_child ()
2468 {
2469 int status;
2470 do
2471 {
2472 #ifndef WCONTINUED
2473 #define WCONTINUED 0
2474 #endif
2475 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2476 if (w == -1)
2477 fatal_error ("waitpid failed");
2478
2479 if (WIFEXITED (status) && WEXITSTATUS (status))
2480 fatal_error ("streaming subprocess failed");
2481 else if (WIFSIGNALED (status))
2482 fatal_error ("streaming subprocess was killed by signal");
2483 }
2484 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2485 }
2486 #endif
2487
2488 /* Stream out ENCODER into TEMP_FILENAME
2489 Fork if that seems to help. */
2490
2491 static void
2492 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2493 {
2494 #ifdef HAVE_WORKING_FORK
2495 static int nruns;
2496
2497 if (lto_parallelism <= 1)
2498 {
2499 do_stream_out (temp_filename, encoder);
2500 return;
2501 }
2502
2503 /* Do not run more than LTO_PARALLELISM streamings
2504 FIXME: we ignore limits on jobserver. */
2505 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2506 {
2507 wait_for_child ();
2508 nruns --;
2509 }
2510 /* If this is not the last parallel partition, execute new
2511 streaming process. */
2512 if (!last)
2513 {
2514 pid_t cpid = fork ();
2515
2516 if (!cpid)
2517 {
2518 setproctitle ("lto1-wpa-streaming");
2519 do_stream_out (temp_filename, encoder);
2520 exit (0);
2521 }
2522 /* Fork failed; lets do the job ourseleves. */
2523 else if (cpid == -1)
2524 do_stream_out (temp_filename, encoder);
2525 else
2526 nruns++;
2527 }
2528 /* Last partition; stream it and wait for all children to die. */
2529 else
2530 {
2531 int i;
2532 do_stream_out (temp_filename, encoder);
2533 for (i = 0; i < nruns; i++)
2534 wait_for_child ();
2535 }
2536 asm_nodes_output = true;
2537 #else
2538 do_stream_out (temp_filename, encoder);
2539 #endif
2540 }
2541
2542 /* Write all output files in WPA mode and the file with the list of
2543 LTRANS units. */
2544
2545 static void
2546 lto_wpa_write_files (void)
2547 {
2548 unsigned i, n_sets;
2549 ltrans_partition part;
2550 FILE *ltrans_output_list_stream;
2551 char *temp_filename;
2552 vec <char *>temp_filenames = vNULL;
2553 size_t blen;
2554
2555 /* Open the LTRANS output list. */
2556 if (!ltrans_output_list)
2557 fatal_error ("no LTRANS output list filename provided");
2558
2559 timevar_push (TV_WHOPR_WPA);
2560
2561 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2562 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2563
2564 timevar_pop (TV_WHOPR_WPA);
2565
2566 timevar_push (TV_WHOPR_WPA_IO);
2567
2568 /* Generate a prefix for the LTRANS unit files. */
2569 blen = strlen (ltrans_output_list);
2570 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2571 strcpy (temp_filename, ltrans_output_list);
2572 if (blen > sizeof (".out")
2573 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2574 ".out") == 0)
2575 temp_filename[blen - sizeof (".out") + 1] = '\0';
2576 blen = strlen (temp_filename);
2577
2578 n_sets = ltrans_partitions.length ();
2579
2580 /* Sort partitions by size so small ones are compiled last.
2581 FIXME: Even when not reordering we may want to output one list for parallel make
2582 and other for final link command. */
2583
2584 if (!flag_profile_reorder_functions || !flag_profile_use)
2585 ltrans_partitions.qsort (flag_toplevel_reorder
2586 ? cmp_partitions_size
2587 : cmp_partitions_order);
2588
2589 for (i = 0; i < n_sets; i++)
2590 {
2591 ltrans_partition part = ltrans_partitions[i];
2592
2593 /* Write all the nodes in SET. */
2594 sprintf (temp_filename + blen, "%u.o", i);
2595
2596 if (!quiet_flag)
2597 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2598 if (cgraph_dump_file)
2599 {
2600 lto_symtab_encoder_iterator lsei;
2601
2602 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2603 part->name, temp_filename, part->insns);
2604 fprintf (cgraph_dump_file, " Symbols in partition: ");
2605 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2606 lsei_next_in_partition (&lsei))
2607 {
2608 symtab_node *node = lsei_node (lsei);
2609 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2610 }
2611 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2612 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2613 lsei_next (&lsei))
2614 {
2615 symtab_node *node = lsei_node (lsei);
2616 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2617 {
2618 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2619 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2620 if (cnode
2621 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2622 fprintf (cgraph_dump_file, "(body included)");
2623 else
2624 {
2625 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2626 if (vnode
2627 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2628 fprintf (cgraph_dump_file, "(initializer included)");
2629 }
2630 }
2631 }
2632 fprintf (cgraph_dump_file, "\n");
2633 }
2634 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2635
2636 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2637
2638 part->encoder = NULL;
2639
2640 temp_filenames.safe_push (xstrdup (temp_filename));
2641 }
2642 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2643 if (ltrans_output_list_stream == NULL)
2644 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2645 for (i = 0; i < n_sets; i++)
2646 {
2647 unsigned int len = strlen (temp_filenames[i]);
2648 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2649 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2650 fatal_error ("writing to LTRANS output list %s: %m",
2651 ltrans_output_list);
2652 free (temp_filenames[i]);
2653 }
2654 temp_filenames.release();
2655
2656 lto_stats.num_output_files += n_sets;
2657
2658 /* Close the LTRANS output list. */
2659 if (fclose (ltrans_output_list_stream))
2660 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2661
2662 free_ltrans_partitions();
2663 free (temp_filename);
2664
2665 timevar_pop (TV_WHOPR_WPA_IO);
2666 }
2667
2668
2669 /* If TT is a variable or function decl replace it with its
2670 prevailing variant. */
2671 #define LTO_SET_PREVAIL(tt) \
2672 do {\
2673 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2674 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2675 { \
2676 tt = lto_symtab_prevailing_decl (tt); \
2677 fixed = true; \
2678 } \
2679 } while (0)
2680
2681 /* Ensure that TT isn't a replacable var of function decl. */
2682 #define LTO_NO_PREVAIL(tt) \
2683 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2684
2685 /* Given a tree T replace all fields referring to variables or functions
2686 with their prevailing variant. */
2687 static void
2688 lto_fixup_prevailing_decls (tree t)
2689 {
2690 enum tree_code code = TREE_CODE (t);
2691 bool fixed = false;
2692
2693 gcc_checking_assert (code != TREE_BINFO);
2694 LTO_NO_PREVAIL (TREE_TYPE (t));
2695 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2696 LTO_NO_PREVAIL (TREE_CHAIN (t));
2697 if (DECL_P (t))
2698 {
2699 LTO_NO_PREVAIL (DECL_NAME (t));
2700 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2701 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2702 {
2703 LTO_SET_PREVAIL (DECL_SIZE (t));
2704 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2705 LTO_SET_PREVAIL (DECL_INITIAL (t));
2706 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2707 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2708 }
2709 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2710 {
2711 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2712 }
2713 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2714 {
2715 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2716 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2717 LTO_NO_PREVAIL (DECL_VINDEX (t));
2718 }
2719 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2720 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2721 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2722 {
2723 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2724 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2725 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2726 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2727 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2728 }
2729 }
2730 else if (TYPE_P (t))
2731 {
2732 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2733 LTO_SET_PREVAIL (TYPE_SIZE (t));
2734 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2735 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2736 LTO_NO_PREVAIL (TYPE_NAME (t));
2737
2738 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2739 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2740 LTO_NO_PREVAIL (t->type_non_common.binfo);
2741
2742 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2743
2744 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2745 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2746 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2747 }
2748 else if (EXPR_P (t))
2749 {
2750 int i;
2751 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2752 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2753 }
2754 else if (TREE_CODE (t) == CONSTRUCTOR)
2755 {
2756 unsigned i;
2757 tree val;
2758 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2759 LTO_SET_PREVAIL (val);
2760 }
2761 else
2762 {
2763 switch (code)
2764 {
2765 case TREE_LIST:
2766 LTO_SET_PREVAIL (TREE_VALUE (t));
2767 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2768 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2769 break;
2770 default:
2771 gcc_unreachable ();
2772 }
2773 }
2774 /* If we fixed nothing, then we missed something seen by
2775 mentions_vars_p. */
2776 gcc_checking_assert (fixed);
2777 }
2778 #undef LTO_SET_PREVAIL
2779 #undef LTO_NO_PREVAIL
2780
2781 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2782 replaces var and function decls with the corresponding prevailing def. */
2783
2784 static void
2785 lto_fixup_state (struct lto_in_decl_state *state)
2786 {
2787 unsigned i, si;
2788 struct lto_tree_ref_table *table;
2789
2790 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2791 we still need to walk from all DECLs to find the reachable
2792 FUNCTION_DECLs and VAR_DECLs. */
2793 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2794 {
2795 table = &state->streams[si];
2796 for (i = 0; i < table->size; i++)
2797 {
2798 tree *tp = table->trees + i;
2799 if (VAR_OR_FUNCTION_DECL_P (*tp)
2800 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2801 *tp = lto_symtab_prevailing_decl (*tp);
2802 }
2803 }
2804 }
2805
2806 /* A callback of htab_traverse. Just extracts a state from SLOT
2807 and calls lto_fixup_state. */
2808
2809 static int
2810 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2811 {
2812 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2813 lto_fixup_state (state);
2814 return 1;
2815 }
2816
2817 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2818 prevailing one. */
2819
2820 static void
2821 lto_fixup_decls (struct lto_file_decl_data **files)
2822 {
2823 unsigned int i;
2824 tree t;
2825
2826 if (tree_with_vars)
2827 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2828 lto_fixup_prevailing_decls (t);
2829
2830 for (i = 0; files[i]; i++)
2831 {
2832 struct lto_file_decl_data *file = files[i];
2833 struct lto_in_decl_state *state = file->global_decl_state;
2834 lto_fixup_state (state);
2835
2836 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2837 }
2838 }
2839
2840 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2841
2842 /* Turn file datas for sub files into a single array, so that they look
2843 like separate files for further passes. */
2844
2845 static void
2846 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2847 {
2848 struct lto_file_decl_data *n, *next;
2849 int i, k;
2850
2851 lto_stats.num_input_files = count;
2852 all_file_decl_data
2853 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2854 /* Set the hooks so that all of the ipa passes can read in their data. */
2855 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2856 for (i = 0, k = 0; i < last_file_ix; i++)
2857 {
2858 for (n = orig[i]; n != NULL; n = next)
2859 {
2860 all_file_decl_data[k++] = n;
2861 next = n->next;
2862 n->next = NULL;
2863 }
2864 }
2865 all_file_decl_data[k] = NULL;
2866 gcc_assert (k == count);
2867 }
2868
2869 /* Input file data before flattening (i.e. splitting them to subfiles to support
2870 incremental linking. */
2871 static int real_file_count;
2872 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2873
2874 static void print_lto_report_1 (void);
2875
2876 /* Read all the symbols from the input files FNAMES. NFILES is the
2877 number of files requested in the command line. Instantiate a
2878 global call graph by aggregating all the sub-graphs found in each
2879 file. */
2880
2881 static void
2882 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2883 {
2884 unsigned int i, last_file_ix;
2885 FILE *resolution;
2886 int count = 0;
2887 struct lto_file_decl_data **decl_data;
2888 void **res;
2889 symtab_node *snode;
2890
2891 init_cgraph ();
2892
2893 timevar_push (TV_IPA_LTO_DECL_IN);
2894
2895 real_file_decl_data
2896 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2897 real_file_count = nfiles;
2898
2899 /* Read the resolution file. */
2900 resolution = NULL;
2901 if (resolution_file_name)
2902 {
2903 int t;
2904 unsigned num_objects;
2905
2906 resolution = fopen (resolution_file_name, "r");
2907 if (resolution == NULL)
2908 fatal_error ("could not open symbol resolution file: %m");
2909
2910 t = fscanf (resolution, "%u", &num_objects);
2911 gcc_assert (t == 1);
2912
2913 /* True, since the plugin splits the archives. */
2914 gcc_assert (num_objects == nfiles);
2915 }
2916 cgraph_state = CGRAPH_LTO_STREAMING;
2917
2918 canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251);
2919 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2920 gimple_canonical_type_eq, 0);
2921 gcc_obstack_init (&tree_scc_hash_obstack);
2922 tree_scc_hash = new hash_table<tree_scc_hasher> (4096);
2923
2924 /* Register the common node types with the canonical type machinery so
2925 we properly share alias-sets across languages and TUs. Do not
2926 expose the common nodes as type merge target - those that should be
2927 are already exposed so by pre-loading the LTO streamer caches.
2928 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2929 for (i = 0; i < itk_none; ++i)
2930 lto_register_canonical_types (integer_types[i], true);
2931 for (i = 0; i < stk_type_kind_last; ++i)
2932 lto_register_canonical_types (sizetype_tab[i], true);
2933 for (i = 0; i < TI_MAX; ++i)
2934 lto_register_canonical_types (global_trees[i], true);
2935 for (i = 0; i < itk_none; ++i)
2936 lto_register_canonical_types (integer_types[i], false);
2937 for (i = 0; i < stk_type_kind_last; ++i)
2938 lto_register_canonical_types (sizetype_tab[i], false);
2939 for (i = 0; i < TI_MAX; ++i)
2940 lto_register_canonical_types (global_trees[i], false);
2941
2942 if (!quiet_flag)
2943 fprintf (stderr, "Reading object files:");
2944
2945 /* Read all of the object files specified on the command line. */
2946 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2947 {
2948 struct lto_file_decl_data *file_data = NULL;
2949 if (!quiet_flag)
2950 {
2951 fprintf (stderr, " %s", fnames[i]);
2952 fflush (stderr);
2953 }
2954
2955 current_lto_file = lto_obj_file_open (fnames[i], false);
2956 if (!current_lto_file)
2957 break;
2958
2959 file_data = lto_file_read (current_lto_file, resolution, &count);
2960 if (!file_data)
2961 {
2962 lto_obj_file_close (current_lto_file);
2963 free (current_lto_file);
2964 current_lto_file = NULL;
2965 break;
2966 }
2967
2968 decl_data[last_file_ix++] = file_data;
2969
2970 lto_obj_file_close (current_lto_file);
2971 free (current_lto_file);
2972 current_lto_file = NULL;
2973 }
2974
2975 lto_flatten_files (decl_data, count, last_file_ix);
2976 lto_stats.num_input_files = count;
2977 ggc_free(decl_data);
2978 real_file_decl_data = NULL;
2979
2980 if (resolution_file_name)
2981 fclose (resolution);
2982
2983 /* Show the LTO report before launching LTRANS. */
2984 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2985 print_lto_report_1 ();
2986
2987 /* Free gimple type merging datastructures. */
2988 delete tree_scc_hash;
2989 tree_scc_hash = NULL;
2990 obstack_free (&tree_scc_hash_obstack, NULL);
2991 htab_delete (gimple_canonical_types);
2992 gimple_canonical_types = NULL;
2993 delete canonical_type_hash_cache;
2994 canonical_type_hash_cache = NULL;
2995
2996 /* At this stage we know that majority of GGC memory is reachable.
2997 Growing the limits prevents unnecesary invocation of GGC. */
2998 ggc_grow ();
2999 ggc_collect ();
3000
3001 /* Set the hooks so that all of the ipa passes can read in their data. */
3002 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3003
3004 timevar_pop (TV_IPA_LTO_DECL_IN);
3005
3006 if (!quiet_flag)
3007 fprintf (stderr, "\nReading the callgraph\n");
3008
3009 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3010 /* Read the symtab. */
3011 input_symtab ();
3012
3013 /* Store resolutions into the symbol table. */
3014
3015 FOR_EACH_SYMBOL (snode)
3016 if (symtab_real_symbol_p (snode)
3017 && snode->lto_file_data
3018 && snode->lto_file_data->resolution_map
3019 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3020 snode->decl)))
3021 snode->resolution
3022 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3023 for (i = 0; all_file_decl_data[i]; i++)
3024 if (all_file_decl_data[i]->resolution_map)
3025 {
3026 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3027 all_file_decl_data[i]->resolution_map = NULL;
3028 }
3029
3030 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3031
3032 if (!quiet_flag)
3033 fprintf (stderr, "Merging declarations\n");
3034
3035 timevar_push (TV_IPA_LTO_DECL_MERGE);
3036 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3037 need to care about resolving symbols again, we only need to replace
3038 duplicated declarations read from the callgraph and from function
3039 sections. */
3040 if (!flag_ltrans)
3041 {
3042 lto_symtab_merge_decls ();
3043
3044 /* If there were errors during symbol merging bail out, we have no
3045 good way to recover here. */
3046 if (seen_error ())
3047 fatal_error ("errors during merging of translation units");
3048
3049 /* Fixup all decls. */
3050 lto_fixup_decls (all_file_decl_data);
3051 }
3052 if (tree_with_vars)
3053 ggc_free (tree_with_vars);
3054 tree_with_vars = NULL;
3055 ggc_collect ();
3056
3057 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3058 /* Each pass will set the appropriate timer. */
3059
3060 if (!quiet_flag)
3061 fprintf (stderr, "Reading summaries\n");
3062
3063 /* Read the IPA summary data. */
3064 if (flag_ltrans)
3065 ipa_read_optimization_summaries ();
3066 else
3067 ipa_read_summaries ();
3068
3069 for (i = 0; all_file_decl_data[i]; i++)
3070 {
3071 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3072 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3073 all_file_decl_data[i]->symtab_node_encoder = NULL;
3074 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3075 all_file_decl_data[i]->global_decl_state = NULL;
3076 all_file_decl_data[i]->current_decl_state = NULL;
3077 }
3078
3079 /* Finally merge the cgraph according to the decl merging decisions. */
3080 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3081 if (cgraph_dump_file)
3082 {
3083 fprintf (cgraph_dump_file, "Before merging:\n");
3084 dump_symtab (cgraph_dump_file);
3085 }
3086 lto_symtab_merge_symbols ();
3087 /* Removal of unreacable symbols is needed to make verify_symtab to pass;
3088 we are still having duplicated comdat groups containing local statics.
3089 We could also just remove them while merging. */
3090 symtab_remove_unreachable_nodes (false, dump_file);
3091 ggc_collect ();
3092 cgraph_state = CGRAPH_STATE_IPA_SSA;
3093
3094 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3095
3096 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3097
3098 /* Indicate that the cgraph is built and ready. */
3099 cgraph_function_flags_ready = true;
3100
3101 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3102 ggc_free (all_file_decl_data);
3103 all_file_decl_data = NULL;
3104 }
3105
3106
3107 /* Materialize all the bodies for all the nodes in the callgraph. */
3108
3109 static void
3110 materialize_cgraph (void)
3111 {
3112 struct cgraph_node *node;
3113 timevar_id_t lto_timer;
3114
3115 if (!quiet_flag)
3116 fprintf (stderr,
3117 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3118
3119 /* Now that we have input the cgraph, we need to clear all of the aux
3120 nodes and read the functions if we are not running in WPA mode. */
3121 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3122
3123 FOR_EACH_FUNCTION (node)
3124 {
3125 if (node->lto_file_data)
3126 {
3127 lto_materialize_function (node);
3128 lto_stats.num_input_cgraph_nodes++;
3129 }
3130 }
3131
3132 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3133
3134 /* Start the appropriate timer depending on the mode that we are
3135 operating in. */
3136 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3137 : (flag_ltrans) ? TV_WHOPR_LTRANS
3138 : TV_LTO;
3139 timevar_push (lto_timer);
3140
3141 current_function_decl = NULL;
3142 set_cfun (NULL);
3143
3144 if (!quiet_flag)
3145 fprintf (stderr, "\n");
3146
3147 timevar_pop (lto_timer);
3148 }
3149
3150
3151 /* Show various memory usage statistics related to LTO. */
3152 static void
3153 print_lto_report_1 (void)
3154 {
3155 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3156 fprintf (stderr, "%s statistics\n", pfx);
3157
3158 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3159 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3160 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3161 if (flag_wpa && tree_scc_hash)
3162 {
3163 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3164 "collision ratio: %f\n", pfx,
3165 (long) tree_scc_hash->size (),
3166 (long) tree_scc_hash->elements (),
3167 tree_scc_hash->collisions ());
3168 hash_table<tree_scc_hasher>::iterator hiter;
3169 tree_scc *scc, *max_scc = NULL;
3170 unsigned max_length = 0;
3171 FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter)
3172 {
3173 unsigned length = 0;
3174 tree_scc *s = scc;
3175 for (; s; s = s->next)
3176 length++;
3177 if (length > max_length)
3178 {
3179 max_length = length;
3180 max_scc = scc;
3181 }
3182 }
3183 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3184 pfx, max_length, max_scc->len);
3185 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3186 num_scc_compares, num_scc_compare_collisions,
3187 num_scc_compare_collisions / (double) num_scc_compares);
3188 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3189 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3190 total_scc_size_merged);
3191 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3192 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3193 pfx, num_prevailing_types, num_type_scc_trees);
3194 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3195 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3196 (long) htab_size (gimple_canonical_types),
3197 (long) htab_elements (gimple_canonical_types),
3198 (long) gimple_canonical_types->searches,
3199 (long) gimple_canonical_types->collisions,
3200 htab_collisions (gimple_canonical_types));
3201 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3202 "%lu elements, %ld searches\n", pfx,
3203 num_canonical_type_hash_entries,
3204 num_canonical_type_hash_queries);
3205 }
3206
3207 print_lto_report (pfx);
3208 }
3209
3210 /* Perform whole program analysis (WPA) on the callgraph and write out the
3211 optimization plan. */
3212
3213 static void
3214 do_whole_program_analysis (void)
3215 {
3216 symtab_node *node;
3217
3218 lto_parallelism = 1;
3219
3220 /* TODO: jobserver communicatoin is not supported, yet. */
3221 if (!strcmp (flag_wpa, "jobserver"))
3222 lto_parallelism = -1;
3223 else
3224 {
3225 lto_parallelism = atoi (flag_wpa);
3226 if (lto_parallelism <= 0)
3227 lto_parallelism = 0;
3228 }
3229
3230 timevar_start (TV_PHASE_OPT_GEN);
3231
3232 /* Note that since we are in WPA mode, materialize_cgraph will not
3233 actually read in all the function bodies. It only materializes
3234 the decls and cgraph nodes so that analysis can be performed. */
3235 materialize_cgraph ();
3236
3237 /* Reading in the cgraph uses different timers, start timing WPA now. */
3238 timevar_push (TV_WHOPR_WPA);
3239
3240 if (pre_ipa_mem_report)
3241 {
3242 fprintf (stderr, "Memory consumption before IPA\n");
3243 dump_memory_report (false);
3244 }
3245
3246 cgraph_function_flags_ready = true;
3247
3248 if (cgraph_dump_file)
3249 dump_symtab (cgraph_dump_file);
3250 bitmap_obstack_initialize (NULL);
3251 cgraph_state = CGRAPH_STATE_IPA_SSA;
3252
3253 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3254 symtab_remove_unreachable_nodes (false, dump_file);
3255
3256 if (cgraph_dump_file)
3257 {
3258 fprintf (cgraph_dump_file, "Optimized ");
3259 dump_symtab (cgraph_dump_file);
3260 }
3261 #ifdef ENABLE_CHECKING
3262 verify_symtab ();
3263 #endif
3264 bitmap_obstack_release (NULL);
3265
3266 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3267 timevar_pop (TV_WHOPR_WPA);
3268
3269 timevar_push (TV_WHOPR_PARTITIONING);
3270 if (flag_lto_partition == LTO_PARTITION_1TO1)
3271 lto_1_to_1_map ();
3272 else if (flag_lto_partition == LTO_PARTITION_MAX)
3273 lto_max_map ();
3274 else if (flag_lto_partition == LTO_PARTITION_ONE)
3275 lto_balanced_map (1);
3276 else if (flag_lto_partition == LTO_PARTITION_BALANCED)
3277 lto_balanced_map (PARAM_VALUE (PARAM_LTO_PARTITIONS));
3278 else
3279 gcc_unreachable ();
3280
3281 /* Inline summaries are needed for balanced partitioning. Free them now so
3282 the memory can be used for streamer caches. */
3283 inline_free_summary ();
3284
3285 /* AUX pointers are used by partitioning code to bookkeep number of
3286 partitions symbol is in. This is no longer needed. */
3287 FOR_EACH_SYMBOL (node)
3288 node->aux = NULL;
3289
3290 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3291
3292 /* Find out statics that need to be promoted
3293 to globals with hidden visibility because they are accessed from multiple
3294 partitions. */
3295 lto_promote_cross_file_statics ();
3296 timevar_pop (TV_WHOPR_PARTITIONING);
3297
3298 timevar_stop (TV_PHASE_OPT_GEN);
3299
3300 /* Collect a last time - in lto_wpa_write_files we may end up forking
3301 with the idea that this doesn't increase memory usage. So we
3302 absoultely do not want to collect after that. */
3303 ggc_collect ();
3304
3305 timevar_start (TV_PHASE_STREAM_OUT);
3306 if (!quiet_flag)
3307 {
3308 fprintf (stderr, "\nStreaming out");
3309 fflush (stderr);
3310 }
3311 lto_wpa_write_files ();
3312 if (!quiet_flag)
3313 fprintf (stderr, "\n");
3314 timevar_stop (TV_PHASE_STREAM_OUT);
3315
3316 if (post_ipa_mem_report)
3317 {
3318 fprintf (stderr, "Memory consumption after IPA\n");
3319 dump_memory_report (false);
3320 }
3321
3322 /* Show the LTO report before launching LTRANS. */
3323 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3324 print_lto_report_1 ();
3325 if (mem_report_wpa)
3326 dump_memory_report (true);
3327 }
3328
3329
3330 static GTY(()) tree lto_eh_personality_decl;
3331
3332 /* Return the LTO personality function decl. */
3333
3334 tree
3335 lto_eh_personality (void)
3336 {
3337 if (!lto_eh_personality_decl)
3338 {
3339 /* Use the first personality DECL for our personality if we don't
3340 support multiple ones. This ensures that we don't artificially
3341 create the need for them in a single-language program. */
3342 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3343 lto_eh_personality_decl = first_personality_decl;
3344 else
3345 lto_eh_personality_decl = lhd_gcc_personality ();
3346 }
3347
3348 return lto_eh_personality_decl;
3349 }
3350
3351 /* Set the process name based on the LTO mode. */
3352
3353 static void
3354 lto_process_name (void)
3355 {
3356 if (flag_lto)
3357 setproctitle ("lto1-lto");
3358 if (flag_wpa)
3359 setproctitle ("lto1-wpa");
3360 if (flag_ltrans)
3361 setproctitle ("lto1-ltrans");
3362 }
3363
3364
3365 /* Initialize the LTO front end. */
3366
3367 static void
3368 lto_init (void)
3369 {
3370 lto_process_name ();
3371 lto_streamer_hooks_init ();
3372 lto_reader_init ();
3373 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3374 memset (&lto_stats, 0, sizeof (lto_stats));
3375 bitmap_obstack_initialize (NULL);
3376 gimple_register_cfg_hooks ();
3377 }
3378
3379
3380 /* Main entry point for the GIMPLE front end. This front end has
3381 three main personalities:
3382
3383 - LTO (-flto). All the object files on the command line are
3384 loaded in memory and processed as a single translation unit.
3385 This is the traditional link-time optimization behavior.
3386
3387 - WPA (-fwpa). Only the callgraph and summary information for
3388 files in the command file are loaded. A single callgraph
3389 (without function bodies) is instantiated for the whole set of
3390 files. IPA passes are only allowed to analyze the call graph
3391 and make transformation decisions. The callgraph is
3392 partitioned, each partition is written to a new object file
3393 together with the transformation decisions.
3394
3395 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3396 summary files from running again. Since WPA computed summary
3397 information and decided what transformations to apply, LTRANS
3398 simply applies them. */
3399
3400 void
3401 lto_main (void)
3402 {
3403 /* LTO is called as a front end, even though it is not a front end.
3404 Because it is called as a front end, TV_PHASE_PARSING and
3405 TV_PARSE_GLOBAL are active, and we need to turn them off while
3406 doing LTO. Later we turn them back on so they are active up in
3407 toplev.c. */
3408 timevar_pop (TV_PARSE_GLOBAL);
3409 timevar_stop (TV_PHASE_PARSING);
3410
3411 timevar_start (TV_PHASE_SETUP);
3412
3413 /* Initialize the LTO front end. */
3414 lto_init ();
3415
3416 timevar_stop (TV_PHASE_SETUP);
3417 timevar_start (TV_PHASE_STREAM_IN);
3418
3419 /* Read all the symbols and call graph from all the files in the
3420 command line. */
3421 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3422
3423 timevar_stop (TV_PHASE_STREAM_IN);
3424
3425 if (!seen_error ())
3426 {
3427 /* If WPA is enabled analyze the whole call graph and create an
3428 optimization plan. Otherwise, read in all the function
3429 bodies and continue with optimization. */
3430 if (flag_wpa)
3431 do_whole_program_analysis ();
3432 else
3433 {
3434 timevar_start (TV_PHASE_OPT_GEN);
3435
3436 materialize_cgraph ();
3437 if (!flag_ltrans)
3438 lto_promote_statics_nonwpa ();
3439
3440 /* Let the middle end know that we have read and merged all of
3441 the input files. */
3442 compile ();
3443
3444 timevar_stop (TV_PHASE_OPT_GEN);
3445
3446 /* FIXME lto, if the processes spawned by WPA fail, we miss
3447 the chance to print WPA's report, so WPA will call
3448 print_lto_report before launching LTRANS. If LTRANS was
3449 launched directly by the driver we would not need to do
3450 this. */
3451 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3452 print_lto_report_1 ();
3453 }
3454 }
3455
3456 /* Here we make LTO pretend to be a parser. */
3457 timevar_start (TV_PHASE_PARSING);
3458 timevar_push (TV_PARSE_GLOBAL);
3459 }
3460
3461 #include "gt-lto-lto.h"