re PR lto/60295 (Too many lto1-wpa-stream processes are forked)
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "tree-streamer.h"
47 #include "splay-tree.h"
48 #include "lto-partition.h"
49 #include "data-streamer.h"
50 #include "context.h"
51 #include "pass_manager.h"
52
53
54 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
55 static int lto_parallelism;
56
57 static GTY(()) tree first_personality_decl;
58
59 /* Returns a hash code for P. */
60
61 static hashval_t
62 hash_name (const void *p)
63 {
64 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
65 return (hashval_t) htab_hash_string (ds->name);
66 }
67
68
69 /* Returns nonzero if P1 and P2 are equal. */
70
71 static int
72 eq_name (const void *p1, const void *p2)
73 {
74 const struct lto_section_slot *s1 =
75 (const struct lto_section_slot *) p1;
76 const struct lto_section_slot *s2 =
77 (const struct lto_section_slot *) p2;
78
79 return strcmp (s1->name, s2->name) == 0;
80 }
81
82 /* Free lto_section_slot */
83
84 static void
85 free_with_string (void *arg)
86 {
87 struct lto_section_slot *s = (struct lto_section_slot *)arg;
88
89 free (CONST_CAST (char *, s->name));
90 free (arg);
91 }
92
93 /* Create section hash table */
94
95 htab_t
96 lto_obj_create_section_hash_table (void)
97 {
98 return htab_create (37, hash_name, eq_name, free_with_string);
99 }
100
101 /* Delete an allocated integer KEY in the splay tree. */
102
103 static void
104 lto_splay_tree_delete_id (splay_tree_key key)
105 {
106 free ((void *) key);
107 }
108
109 /* Compare splay tree node ids A and B. */
110
111 static int
112 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
113 {
114 unsigned HOST_WIDE_INT ai;
115 unsigned HOST_WIDE_INT bi;
116
117 ai = *(unsigned HOST_WIDE_INT *) a;
118 bi = *(unsigned HOST_WIDE_INT *) b;
119
120 if (ai < bi)
121 return -1;
122 else if (ai > bi)
123 return 1;
124 return 0;
125 }
126
127 /* Look up splay tree node by ID in splay tree T. */
128
129 static splay_tree_node
130 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
131 {
132 return splay_tree_lookup (t, (splay_tree_key) &id);
133 }
134
135 /* Check if KEY has ID. */
136
137 static bool
138 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
139 {
140 return *(unsigned HOST_WIDE_INT *) key == id;
141 }
142
143 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
144 The ID is allocated separately because we need HOST_WIDE_INTs which may
145 be wider than a splay_tree_key. */
146
147 static void
148 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
149 struct lto_file_decl_data *file_data)
150 {
151 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
152 *idp = id;
153 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
154 }
155
156 /* Create a splay tree. */
157
158 static splay_tree
159 lto_splay_tree_new (void)
160 {
161 return splay_tree_new (lto_splay_tree_compare_ids,
162 lto_splay_tree_delete_id,
163 NULL);
164 }
165
166 /* Return true when NODE has a clone that is analyzed (i.e. we need
167 to load its body even if the node itself is not needed). */
168
169 static bool
170 has_analyzed_clone_p (struct cgraph_node *node)
171 {
172 struct cgraph_node *orig = node;
173 node = node->clones;
174 if (node)
175 while (node != orig)
176 {
177 if (node->analyzed)
178 return true;
179 if (node->clones)
180 node = node->clones;
181 else if (node->next_sibling_clone)
182 node = node->next_sibling_clone;
183 else
184 {
185 while (node != orig && !node->next_sibling_clone)
186 node = node->clone_of;
187 if (node != orig)
188 node = node->next_sibling_clone;
189 }
190 }
191 return false;
192 }
193
194 /* Read the function body for the function associated with NODE. */
195
196 static void
197 lto_materialize_function (struct cgraph_node *node)
198 {
199 tree decl;
200
201 decl = node->decl;
202 /* Read in functions with body (analyzed nodes)
203 and also functions that are needed to produce virtual clones. */
204 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
205 || node->used_as_abstract_origin
206 || has_analyzed_clone_p (node))
207 {
208 /* Clones don't need to be read. */
209 if (node->clone_of)
210 return;
211 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
212 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
213 }
214
215 /* Let the middle end know about the function. */
216 rest_of_decl_compilation (decl, 1, 0);
217 }
218
219
220 /* Decode the content of memory pointed to by DATA in the in decl
221 state object STATE. DATA_IN points to a data_in structure for
222 decoding. Return the address after the decoded object in the
223 input. */
224
225 static const uint32_t *
226 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
227 struct lto_in_decl_state *state)
228 {
229 uint32_t ix;
230 tree decl;
231 uint32_t i, j;
232
233 ix = *data++;
234 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
235 if (TREE_CODE (decl) != FUNCTION_DECL)
236 {
237 gcc_assert (decl == void_type_node);
238 decl = NULL_TREE;
239 }
240 state->fn_decl = decl;
241
242 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
243 {
244 uint32_t size = *data++;
245 tree *decls = ggc_alloc_vec_tree (size);
246
247 for (j = 0; j < size; j++)
248 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
249
250 state->streams[i].size = size;
251 state->streams[i].trees = decls;
252 data += size;
253 }
254
255 return data;
256 }
257
258
259 /* Global canonical type table. */
260 static htab_t gimple_canonical_types;
261 static pointer_map <hashval_t> *canonical_type_hash_cache;
262 static unsigned long num_canonical_type_hash_entries;
263 static unsigned long num_canonical_type_hash_queries;
264
265 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
266 static hashval_t gimple_canonical_type_hash (const void *p);
267 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
268
269 /* Returning a hash value for gimple type TYPE.
270
271 The hash value returned is equal for types considered compatible
272 by gimple_canonical_types_compatible_p. */
273
274 static hashval_t
275 hash_canonical_type (tree type)
276 {
277 hashval_t v;
278
279 /* Combine a few common features of types so that types are grouped into
280 smaller sets; when searching for existing matching types to merge,
281 only existing types having the same features as the new type will be
282 checked. */
283 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
284 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
285
286 /* Incorporate common features of numerical types. */
287 if (INTEGRAL_TYPE_P (type)
288 || SCALAR_FLOAT_TYPE_P (type)
289 || FIXED_POINT_TYPE_P (type)
290 || TREE_CODE (type) == OFFSET_TYPE
291 || POINTER_TYPE_P (type))
292 {
293 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
294 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
295 }
296
297 if (VECTOR_TYPE_P (type))
298 {
299 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
300 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
301 }
302
303 if (TREE_CODE (type) == COMPLEX_TYPE)
304 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
305
306 /* For pointer and reference types, fold in information about the type
307 pointed to but do not recurse to the pointed-to type. */
308 if (POINTER_TYPE_P (type))
309 {
310 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
311 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
312 }
313
314 /* For integer types hash only the string flag. */
315 if (TREE_CODE (type) == INTEGER_TYPE)
316 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
317
318 /* For array types hash the domain bounds and the string flag. */
319 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
320 {
321 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
322 /* OMP lowering can introduce error_mark_node in place of
323 random local decls in types. */
324 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
325 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
326 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
327 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
328 }
329
330 /* Recurse for aggregates with a single element type. */
331 if (TREE_CODE (type) == ARRAY_TYPE
332 || TREE_CODE (type) == COMPLEX_TYPE
333 || TREE_CODE (type) == VECTOR_TYPE)
334 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
335
336 /* Incorporate function return and argument types. */
337 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
338 {
339 unsigned na;
340 tree p;
341
342 /* For method types also incorporate their parent class. */
343 if (TREE_CODE (type) == METHOD_TYPE)
344 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
345
346 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
347
348 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
349 {
350 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
351 na++;
352 }
353
354 v = iterative_hash_hashval_t (na, v);
355 }
356
357 if (RECORD_OR_UNION_TYPE_P (type))
358 {
359 unsigned nf;
360 tree f;
361
362 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
363 if (TREE_CODE (f) == FIELD_DECL)
364 {
365 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
366 nf++;
367 }
368
369 v = iterative_hash_hashval_t (nf, v);
370 }
371
372 return v;
373 }
374
375 /* Returning a hash value for gimple type TYPE combined with VAL. */
376
377 static hashval_t
378 iterative_hash_canonical_type (tree type, hashval_t val)
379 {
380 hashval_t v;
381 /* An already processed type. */
382 if (TYPE_CANONICAL (type))
383 {
384 type = TYPE_CANONICAL (type);
385 v = gimple_canonical_type_hash (type);
386 }
387 else
388 {
389 /* Canonical types should not be able to form SCCs by design, this
390 recursion is just because we do not register canonical types in
391 optimal order. To avoid quadratic behavior also register the
392 type here. */
393 v = hash_canonical_type (type);
394 gimple_register_canonical_type_1 (type, v);
395 }
396 return iterative_hash_hashval_t (v, val);
397 }
398
399 /* Returns the hash for a canonical type P. */
400
401 static hashval_t
402 gimple_canonical_type_hash (const void *p)
403 {
404 num_canonical_type_hash_queries++;
405 hashval_t *slot
406 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
407 gcc_assert (slot != NULL);
408 return *slot;
409 }
410
411
412 /* The TYPE_CANONICAL merging machinery. It should closely resemble
413 the middle-end types_compatible_p function. It needs to avoid
414 claiming types are different for types that should be treated
415 the same with respect to TBAA. Canonical types are also used
416 for IL consistency checks via the useless_type_conversion_p
417 predicate which does not handle all type kinds itself but falls
418 back to pointer-comparison of TYPE_CANONICAL for aggregates
419 for example. */
420
421 /* Return true iff T1 and T2 are structurally identical for what
422 TBAA is concerned. */
423
424 static bool
425 gimple_canonical_types_compatible_p (tree t1, tree t2)
426 {
427 /* Before starting to set up the SCC machinery handle simple cases. */
428
429 /* Check first for the obvious case of pointer identity. */
430 if (t1 == t2)
431 return true;
432
433 /* Check that we have two types to compare. */
434 if (t1 == NULL_TREE || t2 == NULL_TREE)
435 return false;
436
437 /* If the types have been previously registered and found equal
438 they still are. */
439 if (TYPE_CANONICAL (t1)
440 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
441 return true;
442
443 /* Can't be the same type if the types don't have the same code. */
444 if (TREE_CODE (t1) != TREE_CODE (t2))
445 return false;
446
447 /* Qualifiers do not matter for canonical type comparison purposes. */
448
449 /* Void types and nullptr types are always the same. */
450 if (TREE_CODE (t1) == VOID_TYPE
451 || TREE_CODE (t1) == NULLPTR_TYPE)
452 return true;
453
454 /* Can't be the same type if they have different mode. */
455 if (TYPE_MODE (t1) != TYPE_MODE (t2))
456 return false;
457
458 /* Non-aggregate types can be handled cheaply. */
459 if (INTEGRAL_TYPE_P (t1)
460 || SCALAR_FLOAT_TYPE_P (t1)
461 || FIXED_POINT_TYPE_P (t1)
462 || TREE_CODE (t1) == VECTOR_TYPE
463 || TREE_CODE (t1) == COMPLEX_TYPE
464 || TREE_CODE (t1) == OFFSET_TYPE
465 || POINTER_TYPE_P (t1))
466 {
467 /* Can't be the same type if they have different sign or precision. */
468 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
469 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
470 return false;
471
472 if (TREE_CODE (t1) == INTEGER_TYPE
473 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
474 return false;
475
476 /* For canonical type comparisons we do not want to build SCCs
477 so we cannot compare pointed-to types. But we can, for now,
478 require the same pointed-to type kind and match what
479 useless_type_conversion_p would do. */
480 if (POINTER_TYPE_P (t1))
481 {
482 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
483 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
484 return false;
485
486 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
487 return false;
488 }
489
490 /* Tail-recurse to components. */
491 if (TREE_CODE (t1) == VECTOR_TYPE
492 || TREE_CODE (t1) == COMPLEX_TYPE)
493 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
494 TREE_TYPE (t2));
495
496 return true;
497 }
498
499 /* Do type-specific comparisons. */
500 switch (TREE_CODE (t1))
501 {
502 case ARRAY_TYPE:
503 /* Array types are the same if the element types are the same and
504 the number of elements are the same. */
505 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
506 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
507 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
508 return false;
509 else
510 {
511 tree i1 = TYPE_DOMAIN (t1);
512 tree i2 = TYPE_DOMAIN (t2);
513
514 /* For an incomplete external array, the type domain can be
515 NULL_TREE. Check this condition also. */
516 if (i1 == NULL_TREE && i2 == NULL_TREE)
517 return true;
518 else if (i1 == NULL_TREE || i2 == NULL_TREE)
519 return false;
520 else
521 {
522 tree min1 = TYPE_MIN_VALUE (i1);
523 tree min2 = TYPE_MIN_VALUE (i2);
524 tree max1 = TYPE_MAX_VALUE (i1);
525 tree max2 = TYPE_MAX_VALUE (i2);
526
527 /* The minimum/maximum values have to be the same. */
528 if ((min1 == min2
529 || (min1 && min2
530 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
531 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
532 || operand_equal_p (min1, min2, 0))))
533 && (max1 == max2
534 || (max1 && max2
535 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
536 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
537 || operand_equal_p (max1, max2, 0)))))
538 return true;
539 else
540 return false;
541 }
542 }
543
544 case METHOD_TYPE:
545 case FUNCTION_TYPE:
546 /* Function types are the same if the return type and arguments types
547 are the same. */
548 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
549 return false;
550
551 if (!comp_type_attributes (t1, t2))
552 return false;
553
554 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
555 return true;
556 else
557 {
558 tree parms1, parms2;
559
560 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
561 parms1 && parms2;
562 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
563 {
564 if (!gimple_canonical_types_compatible_p
565 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
566 return false;
567 }
568
569 if (parms1 || parms2)
570 return false;
571
572 return true;
573 }
574
575 case RECORD_TYPE:
576 case UNION_TYPE:
577 case QUAL_UNION_TYPE:
578 {
579 tree f1, f2;
580
581 /* For aggregate types, all the fields must be the same. */
582 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
583 f1 || f2;
584 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
585 {
586 /* Skip non-fields. */
587 while (f1 && TREE_CODE (f1) != FIELD_DECL)
588 f1 = TREE_CHAIN (f1);
589 while (f2 && TREE_CODE (f2) != FIELD_DECL)
590 f2 = TREE_CHAIN (f2);
591 if (!f1 || !f2)
592 break;
593 /* The fields must have the same name, offset and type. */
594 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
595 || !gimple_compare_field_offset (f1, f2)
596 || !gimple_canonical_types_compatible_p
597 (TREE_TYPE (f1), TREE_TYPE (f2)))
598 return false;
599 }
600
601 /* If one aggregate has more fields than the other, they
602 are not the same. */
603 if (f1 || f2)
604 return false;
605
606 return true;
607 }
608
609 default:
610 gcc_unreachable ();
611 }
612 }
613
614
615 /* Returns nonzero if P1 and P2 are equal. */
616
617 static int
618 gimple_canonical_type_eq (const void *p1, const void *p2)
619 {
620 const_tree t1 = (const_tree) p1;
621 const_tree t2 = (const_tree) p2;
622 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
623 CONST_CAST_TREE (t2));
624 }
625
626 /* Main worker for gimple_register_canonical_type. */
627
628 static void
629 gimple_register_canonical_type_1 (tree t, hashval_t hash)
630 {
631 void **slot;
632
633 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
634
635 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
636 if (*slot)
637 {
638 tree new_type = (tree)(*slot);
639 gcc_checking_assert (new_type != t);
640 TYPE_CANONICAL (t) = new_type;
641 }
642 else
643 {
644 TYPE_CANONICAL (t) = t;
645 *slot = (void *) t;
646 /* Cache the just computed hash value. */
647 num_canonical_type_hash_entries++;
648 bool existed_p;
649 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
650 gcc_assert (!existed_p);
651 *hslot = hash;
652 }
653 }
654
655 /* Register type T in the global type table gimple_types and set
656 TYPE_CANONICAL of T accordingly.
657 This is used by LTO to merge structurally equivalent types for
658 type-based aliasing purposes across different TUs and languages.
659
660 ??? This merging does not exactly match how the tree.c middle-end
661 functions will assign TYPE_CANONICAL when new types are created
662 during optimization (which at least happens for pointer and array
663 types). */
664
665 static void
666 gimple_register_canonical_type (tree t)
667 {
668 if (TYPE_CANONICAL (t))
669 return;
670
671 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
672 }
673
674 /* Re-compute TYPE_CANONICAL for NODE and related types. */
675
676 static void
677 lto_register_canonical_types (tree node, bool first_p)
678 {
679 if (!node
680 || !TYPE_P (node))
681 return;
682
683 if (first_p)
684 TYPE_CANONICAL (node) = NULL_TREE;
685
686 if (POINTER_TYPE_P (node)
687 || TREE_CODE (node) == COMPLEX_TYPE
688 || TREE_CODE (node) == ARRAY_TYPE)
689 lto_register_canonical_types (TREE_TYPE (node), first_p);
690
691 if (!first_p)
692 gimple_register_canonical_type (node);
693 }
694
695
696 /* Remember trees that contains references to declarations. */
697 static GTY(()) vec <tree, va_gc> *tree_with_vars;
698
699 #define CHECK_VAR(tt) \
700 do \
701 { \
702 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
703 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
704 return true; \
705 } while (0)
706
707 #define CHECK_NO_VAR(tt) \
708 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
709
710 /* Check presence of pointers to decls in fields of a tree_typed T. */
711
712 static inline bool
713 mentions_vars_p_typed (tree t)
714 {
715 CHECK_NO_VAR (TREE_TYPE (t));
716 return false;
717 }
718
719 /* Check presence of pointers to decls in fields of a tree_common T. */
720
721 static inline bool
722 mentions_vars_p_common (tree t)
723 {
724 if (mentions_vars_p_typed (t))
725 return true;
726 CHECK_NO_VAR (TREE_CHAIN (t));
727 return false;
728 }
729
730 /* Check presence of pointers to decls in fields of a decl_minimal T. */
731
732 static inline bool
733 mentions_vars_p_decl_minimal (tree t)
734 {
735 if (mentions_vars_p_common (t))
736 return true;
737 CHECK_NO_VAR (DECL_NAME (t));
738 CHECK_VAR (DECL_CONTEXT (t));
739 return false;
740 }
741
742 /* Check presence of pointers to decls in fields of a decl_common T. */
743
744 static inline bool
745 mentions_vars_p_decl_common (tree t)
746 {
747 if (mentions_vars_p_decl_minimal (t))
748 return true;
749 CHECK_VAR (DECL_SIZE (t));
750 CHECK_VAR (DECL_SIZE_UNIT (t));
751 CHECK_VAR (DECL_INITIAL (t));
752 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
753 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
754 return false;
755 }
756
757 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
758
759 static inline bool
760 mentions_vars_p_decl_with_vis (tree t)
761 {
762 if (mentions_vars_p_decl_common (t))
763 return true;
764
765 /* Accessor macro has side-effects, use field-name here. */
766 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
767 CHECK_NO_VAR (DECL_SECTION_NAME (t));
768 return false;
769 }
770
771 /* Check presence of pointers to decls in fields of a decl_non_common T. */
772
773 static inline bool
774 mentions_vars_p_decl_non_common (tree t)
775 {
776 if (mentions_vars_p_decl_with_vis (t))
777 return true;
778 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
779 CHECK_NO_VAR (DECL_RESULT_FLD (t));
780 CHECK_NO_VAR (DECL_VINDEX (t));
781 return false;
782 }
783
784 /* Check presence of pointers to decls in fields of a decl_non_common T. */
785
786 static bool
787 mentions_vars_p_function (tree t)
788 {
789 if (mentions_vars_p_decl_non_common (t))
790 return true;
791 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
792 return false;
793 }
794
795 /* Check presence of pointers to decls in fields of a field_decl T. */
796
797 static bool
798 mentions_vars_p_field_decl (tree t)
799 {
800 if (mentions_vars_p_decl_common (t))
801 return true;
802 CHECK_VAR (DECL_FIELD_OFFSET (t));
803 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
804 CHECK_NO_VAR (DECL_QUALIFIER (t));
805 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
806 CHECK_NO_VAR (DECL_FCONTEXT (t));
807 return false;
808 }
809
810 /* Check presence of pointers to decls in fields of a type T. */
811
812 static bool
813 mentions_vars_p_type (tree t)
814 {
815 if (mentions_vars_p_common (t))
816 return true;
817 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
818 CHECK_VAR (TYPE_SIZE (t));
819 CHECK_VAR (TYPE_SIZE_UNIT (t));
820 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
821 CHECK_NO_VAR (TYPE_NAME (t));
822
823 CHECK_VAR (TYPE_MINVAL (t));
824 CHECK_VAR (TYPE_MAXVAL (t));
825
826 /* Accessor is for derived node types only. */
827 CHECK_NO_VAR (t->type_non_common.binfo);
828
829 CHECK_VAR (TYPE_CONTEXT (t));
830 CHECK_NO_VAR (TYPE_CANONICAL (t));
831 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
832 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
833 return false;
834 }
835
836 /* Check presence of pointers to decls in fields of a BINFO T. */
837
838 static bool
839 mentions_vars_p_binfo (tree t)
840 {
841 unsigned HOST_WIDE_INT i, n;
842
843 if (mentions_vars_p_common (t))
844 return true;
845 CHECK_VAR (BINFO_VTABLE (t));
846 CHECK_NO_VAR (BINFO_OFFSET (t));
847 CHECK_NO_VAR (BINFO_VIRTUALS (t));
848 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
849 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
850 for (i = 0; i < n; i++)
851 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
852 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
853 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
854 n = BINFO_N_BASE_BINFOS (t);
855 for (i = 0; i < n; i++)
856 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
857 return false;
858 }
859
860 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
861
862 static bool
863 mentions_vars_p_constructor (tree t)
864 {
865 unsigned HOST_WIDE_INT idx;
866 constructor_elt *ce;
867
868 if (mentions_vars_p_typed (t))
869 return true;
870
871 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
872 {
873 CHECK_NO_VAR (ce->index);
874 CHECK_VAR (ce->value);
875 }
876 return false;
877 }
878
879 /* Check presence of pointers to decls in fields of an expression tree T. */
880
881 static bool
882 mentions_vars_p_expr (tree t)
883 {
884 int i;
885 if (mentions_vars_p_typed (t))
886 return true;
887 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
888 CHECK_VAR (TREE_OPERAND (t, i));
889 return false;
890 }
891
892 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
893
894 static bool
895 mentions_vars_p_omp_clause (tree t)
896 {
897 int i;
898 if (mentions_vars_p_common (t))
899 return true;
900 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
901 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
902 return false;
903 }
904
905 /* Check presence of pointers to decls that needs later fixup in T. */
906
907 static bool
908 mentions_vars_p (tree t)
909 {
910 switch (TREE_CODE (t))
911 {
912 case IDENTIFIER_NODE:
913 break;
914
915 case TREE_LIST:
916 CHECK_VAR (TREE_VALUE (t));
917 CHECK_VAR (TREE_PURPOSE (t));
918 CHECK_NO_VAR (TREE_CHAIN (t));
919 break;
920
921 case FIELD_DECL:
922 return mentions_vars_p_field_decl (t);
923
924 case LABEL_DECL:
925 case CONST_DECL:
926 case PARM_DECL:
927 case RESULT_DECL:
928 case IMPORTED_DECL:
929 case NAMESPACE_DECL:
930 case NAMELIST_DECL:
931 return mentions_vars_p_decl_common (t);
932
933 case VAR_DECL:
934 return mentions_vars_p_decl_with_vis (t);
935
936 case TYPE_DECL:
937 return mentions_vars_p_decl_non_common (t);
938
939 case FUNCTION_DECL:
940 return mentions_vars_p_function (t);
941
942 case TREE_BINFO:
943 return mentions_vars_p_binfo (t);
944
945 case PLACEHOLDER_EXPR:
946 return mentions_vars_p_common (t);
947
948 case BLOCK:
949 case TRANSLATION_UNIT_DECL:
950 case OPTIMIZATION_NODE:
951 case TARGET_OPTION_NODE:
952 break;
953
954 case CONSTRUCTOR:
955 return mentions_vars_p_constructor (t);
956
957 case OMP_CLAUSE:
958 return mentions_vars_p_omp_clause (t);
959
960 default:
961 if (TYPE_P (t))
962 {
963 if (mentions_vars_p_type (t))
964 return true;
965 }
966 else if (EXPR_P (t))
967 {
968 if (mentions_vars_p_expr (t))
969 return true;
970 }
971 else if (CONSTANT_CLASS_P (t))
972 CHECK_NO_VAR (TREE_TYPE (t));
973 else
974 gcc_unreachable ();
975 }
976 return false;
977 }
978
979
980 /* Return the resolution for the decl with index INDEX from DATA_IN. */
981
982 static enum ld_plugin_symbol_resolution
983 get_resolution (struct data_in *data_in, unsigned index)
984 {
985 if (data_in->globals_resolution.exists ())
986 {
987 ld_plugin_symbol_resolution_t ret;
988 /* We can have references to not emitted functions in
989 DECL_FUNCTION_PERSONALITY at least. So we can and have
990 to indeed return LDPR_UNKNOWN in some cases. */
991 if (data_in->globals_resolution.length () <= index)
992 return LDPR_UNKNOWN;
993 ret = data_in->globals_resolution[index];
994 return ret;
995 }
996 else
997 /* Delay resolution finding until decl merging. */
998 return LDPR_UNKNOWN;
999 }
1000
1001 /* We need to record resolutions until symbol table is read. */
1002 static void
1003 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1004 enum ld_plugin_symbol_resolution resolution)
1005 {
1006 if (resolution == LDPR_UNKNOWN)
1007 return;
1008 if (!file_data->resolution_map)
1009 file_data->resolution_map = pointer_map_create ();
1010 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1011 }
1012
1013 /* Register DECL with the global symbol table and change its
1014 name if necessary to avoid name clashes for static globals across
1015 different files. */
1016
1017 static void
1018 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1019 unsigned ix)
1020 {
1021 tree context;
1022
1023 /* Variable has file scope, not local. */
1024 if (!TREE_PUBLIC (decl)
1025 && !((context = decl_function_context (decl))
1026 && auto_var_in_fn_p (decl, context)))
1027 rest_of_decl_compilation (decl, 1, 0);
1028
1029 /* If this variable has already been declared, queue the
1030 declaration for merging. */
1031 if (TREE_PUBLIC (decl))
1032 register_resolution (data_in->file_data,
1033 decl, get_resolution (data_in, ix));
1034 }
1035
1036
1037 /* Register DECL with the global symbol table and change its
1038 name if necessary to avoid name clashes for static globals across
1039 different files. DATA_IN contains descriptors and tables for the
1040 file being read. */
1041
1042 static void
1043 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1044 unsigned ix)
1045 {
1046 /* If this variable has already been declared, queue the
1047 declaration for merging. */
1048 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1049 register_resolution (data_in->file_data,
1050 decl, get_resolution (data_in, ix));
1051 }
1052
1053
1054 /* For the type T re-materialize it in the type variant list and
1055 the pointer/reference-to chains. */
1056
1057 static void
1058 lto_fixup_prevailing_type (tree t)
1059 {
1060 /* The following re-creates proper variant lists while fixing up
1061 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1062 variant list state before fixup is broken. */
1063
1064 /* If we are not our own variant leader link us into our new leaders
1065 variant list. */
1066 if (TYPE_MAIN_VARIANT (t) != t)
1067 {
1068 tree mv = TYPE_MAIN_VARIANT (t);
1069 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1070 TYPE_NEXT_VARIANT (mv) = t;
1071 }
1072
1073 /* The following reconstructs the pointer chains
1074 of the new pointed-to type if we are a main variant. We do
1075 not stream those so they are broken before fixup. */
1076 if (TREE_CODE (t) == POINTER_TYPE
1077 && TYPE_MAIN_VARIANT (t) == t)
1078 {
1079 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1080 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1081 }
1082 else if (TREE_CODE (t) == REFERENCE_TYPE
1083 && TYPE_MAIN_VARIANT (t) == t)
1084 {
1085 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1086 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1087 }
1088 }
1089
1090
1091 /* We keep prevailing tree SCCs in a hashtable with manual collision
1092 handling (in case all hashes compare the same) and keep the colliding
1093 entries in the tree_scc->next chain. */
1094
1095 struct tree_scc
1096 {
1097 tree_scc *next;
1098 /* Hash of the whole SCC. */
1099 hashval_t hash;
1100 /* Number of trees in the SCC. */
1101 unsigned len;
1102 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1103 which share the same individual tree hash). */
1104 unsigned entry_len;
1105 /* The members of the SCC.
1106 We only need to remember the first entry node candidate for prevailing
1107 SCCs (but of course have access to all entries for SCCs we are
1108 processing).
1109 ??? For prevailing SCCs we really only need hash and the first
1110 entry candidate, but that's too awkward to implement. */
1111 tree entries[1];
1112 };
1113
1114 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1115 {
1116 typedef tree_scc value_type;
1117 typedef tree_scc compare_type;
1118 static inline hashval_t hash (const value_type *);
1119 static inline bool equal (const value_type *, const compare_type *);
1120 };
1121
1122 hashval_t
1123 tree_scc_hasher::hash (const value_type *scc)
1124 {
1125 return scc->hash;
1126 }
1127
1128 bool
1129 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1130 {
1131 if (scc1->hash != scc2->hash
1132 || scc1->len != scc2->len
1133 || scc1->entry_len != scc2->entry_len)
1134 return false;
1135 return true;
1136 }
1137
1138 static hash_table <tree_scc_hasher> tree_scc_hash;
1139 static struct obstack tree_scc_hash_obstack;
1140
1141 static unsigned long num_merged_types;
1142 static unsigned long num_prevailing_types;
1143 static unsigned long num_type_scc_trees;
1144 static unsigned long total_scc_size;
1145 static unsigned long num_sccs_read;
1146 static unsigned long total_scc_size_merged;
1147 static unsigned long num_sccs_merged;
1148 static unsigned long num_scc_compares;
1149 static unsigned long num_scc_compare_collisions;
1150
1151
1152 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1153 recursing through in-SCC tree edges. Returns true if the SCCs entered
1154 through T1 and T2 are equal and fills in *MAP with the pairs of
1155 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1156
1157 static bool
1158 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1159 {
1160 enum tree_code code;
1161
1162 /* Mark already visited nodes. */
1163 TREE_ASM_WRITTEN (t2) = 1;
1164
1165 /* Push the pair onto map. */
1166 (*map)[0] = t1;
1167 (*map)[1] = t2;
1168 *map = *map + 2;
1169
1170 /* Compare value-fields. */
1171 #define compare_values(X) \
1172 do { \
1173 if (X(t1) != X(t2)) \
1174 return false; \
1175 } while (0)
1176
1177 compare_values (TREE_CODE);
1178 code = TREE_CODE (t1);
1179
1180 if (!TYPE_P (t1))
1181 {
1182 compare_values (TREE_SIDE_EFFECTS);
1183 compare_values (TREE_CONSTANT);
1184 compare_values (TREE_READONLY);
1185 compare_values (TREE_PUBLIC);
1186 }
1187 compare_values (TREE_ADDRESSABLE);
1188 compare_values (TREE_THIS_VOLATILE);
1189 if (DECL_P (t1))
1190 compare_values (DECL_UNSIGNED);
1191 else if (TYPE_P (t1))
1192 compare_values (TYPE_UNSIGNED);
1193 if (TYPE_P (t1))
1194 compare_values (TYPE_ARTIFICIAL);
1195 else
1196 compare_values (TREE_NO_WARNING);
1197 compare_values (TREE_NOTHROW);
1198 compare_values (TREE_STATIC);
1199 if (code != TREE_BINFO)
1200 compare_values (TREE_PRIVATE);
1201 compare_values (TREE_PROTECTED);
1202 compare_values (TREE_DEPRECATED);
1203 if (TYPE_P (t1))
1204 {
1205 compare_values (TYPE_SATURATING);
1206 compare_values (TYPE_ADDR_SPACE);
1207 }
1208 else if (code == SSA_NAME)
1209 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1212 {
1213 compare_values (TREE_INT_CST_LOW);
1214 compare_values (TREE_INT_CST_HIGH);
1215 }
1216
1217 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1218 {
1219 /* ??? No suitable compare routine available. */
1220 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1221 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1222 if (r1.cl != r2.cl
1223 || r1.decimal != r2.decimal
1224 || r1.sign != r2.sign
1225 || r1.signalling != r2.signalling
1226 || r1.canonical != r2.canonical
1227 || r1.uexp != r2.uexp)
1228 return false;
1229 for (unsigned i = 0; i < SIGSZ; ++i)
1230 if (r1.sig[i] != r2.sig[i])
1231 return false;
1232 }
1233
1234 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1235 if (!fixed_compare (EQ_EXPR,
1236 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1237 return false;
1238
1239
1240 /* We don't want to compare locations, so there is nothing do compare
1241 for TS_DECL_MINIMAL. */
1242
1243 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1244 {
1245 compare_values (DECL_MODE);
1246 compare_values (DECL_NONLOCAL);
1247 compare_values (DECL_VIRTUAL_P);
1248 compare_values (DECL_IGNORED_P);
1249 compare_values (DECL_ABSTRACT);
1250 compare_values (DECL_ARTIFICIAL);
1251 compare_values (DECL_USER_ALIGN);
1252 compare_values (DECL_PRESERVE_P);
1253 compare_values (DECL_EXTERNAL);
1254 compare_values (DECL_GIMPLE_REG_P);
1255 compare_values (DECL_ALIGN);
1256 if (code == LABEL_DECL)
1257 {
1258 compare_values (EH_LANDING_PAD_NR);
1259 compare_values (LABEL_DECL_UID);
1260 }
1261 else if (code == FIELD_DECL)
1262 {
1263 compare_values (DECL_PACKED);
1264 compare_values (DECL_NONADDRESSABLE_P);
1265 compare_values (DECL_OFFSET_ALIGN);
1266 }
1267 else if (code == VAR_DECL)
1268 {
1269 compare_values (DECL_HAS_DEBUG_EXPR_P);
1270 compare_values (DECL_NONLOCAL_FRAME);
1271 }
1272 if (code == RESULT_DECL
1273 || code == PARM_DECL
1274 || code == VAR_DECL)
1275 {
1276 compare_values (DECL_BY_REFERENCE);
1277 if (code == VAR_DECL
1278 || code == PARM_DECL)
1279 compare_values (DECL_HAS_VALUE_EXPR_P);
1280 }
1281 }
1282
1283 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1284 compare_values (DECL_REGISTER);
1285
1286 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1287 {
1288 compare_values (DECL_COMMON);
1289 compare_values (DECL_DLLIMPORT_P);
1290 compare_values (DECL_WEAK);
1291 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1292 compare_values (DECL_COMDAT);
1293 compare_values (DECL_VISIBILITY);
1294 compare_values (DECL_VISIBILITY_SPECIFIED);
1295 if (code == VAR_DECL)
1296 {
1297 compare_values (DECL_HARD_REGISTER);
1298 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1299 compare_values (DECL_IN_CONSTANT_POOL);
1300 compare_values (DECL_TLS_MODEL);
1301 }
1302 if (VAR_OR_FUNCTION_DECL_P (t1))
1303 compare_values (DECL_INIT_PRIORITY);
1304 }
1305
1306 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1307 {
1308 compare_values (DECL_BUILT_IN_CLASS);
1309 compare_values (DECL_STATIC_CONSTRUCTOR);
1310 compare_values (DECL_STATIC_DESTRUCTOR);
1311 compare_values (DECL_UNINLINABLE);
1312 compare_values (DECL_POSSIBLY_INLINED);
1313 compare_values (DECL_IS_NOVOPS);
1314 compare_values (DECL_IS_RETURNS_TWICE);
1315 compare_values (DECL_IS_MALLOC);
1316 compare_values (DECL_IS_OPERATOR_NEW);
1317 compare_values (DECL_DECLARED_INLINE_P);
1318 compare_values (DECL_STATIC_CHAIN);
1319 compare_values (DECL_NO_INLINE_WARNING_P);
1320 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1321 compare_values (DECL_NO_LIMIT_STACK);
1322 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1323 compare_values (DECL_PURE_P);
1324 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1325 compare_values (DECL_FINAL_P);
1326 compare_values (DECL_CXX_CONSTRUCTOR_P);
1327 compare_values (DECL_CXX_DESTRUCTOR_P);
1328 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1329 compare_values (DECL_FUNCTION_CODE);
1330 if (DECL_STATIC_DESTRUCTOR (t1))
1331 compare_values (DECL_FINI_PRIORITY);
1332 }
1333
1334 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1335 {
1336 compare_values (TYPE_MODE);
1337 compare_values (TYPE_STRING_FLAG);
1338 compare_values (TYPE_NO_FORCE_BLK);
1339 compare_values (TYPE_NEEDS_CONSTRUCTING);
1340 if (RECORD_OR_UNION_TYPE_P (t1))
1341 {
1342 compare_values (TYPE_TRANSPARENT_AGGR);
1343 compare_values (TYPE_FINAL_P);
1344 }
1345 else if (code == ARRAY_TYPE)
1346 compare_values (TYPE_NONALIASED_COMPONENT);
1347 compare_values (TYPE_PACKED);
1348 compare_values (TYPE_RESTRICT);
1349 compare_values (TYPE_USER_ALIGN);
1350 compare_values (TYPE_READONLY);
1351 compare_values (TYPE_PRECISION);
1352 compare_values (TYPE_ALIGN);
1353 compare_values (TYPE_ALIAS_SET);
1354 }
1355
1356 /* We don't want to compare locations, so there is nothing do compare
1357 for TS_EXP. */
1358
1359 /* BLOCKs are function local and we don't merge anything there, so
1360 simply refuse to merge. */
1361 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1362 return false;
1363
1364 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1365 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1366 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1367 return false;
1368
1369 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1370 gcc_unreachable ();
1371
1372 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1373 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1374 sizeof (struct cl_optimization)) != 0)
1375 return false;
1376
1377 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1378 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1379 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1380 return false;
1381
1382 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1383 compare_values (CONSTRUCTOR_NELTS);
1384
1385 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1386 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1387 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1388 IDENTIFIER_LENGTH (t1)) != 0)
1389 return false;
1390
1391 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1392 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1393 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1394 TREE_STRING_LENGTH (t1)) != 0)
1395 return false;
1396
1397 if (code == OMP_CLAUSE)
1398 {
1399 compare_values (OMP_CLAUSE_CODE);
1400 switch (OMP_CLAUSE_CODE (t1))
1401 {
1402 case OMP_CLAUSE_DEFAULT:
1403 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1404 break;
1405 case OMP_CLAUSE_SCHEDULE:
1406 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1407 break;
1408 case OMP_CLAUSE_DEPEND:
1409 compare_values (OMP_CLAUSE_DEPEND_KIND);
1410 break;
1411 case OMP_CLAUSE_MAP:
1412 compare_values (OMP_CLAUSE_MAP_KIND);
1413 break;
1414 case OMP_CLAUSE_PROC_BIND:
1415 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1416 break;
1417 case OMP_CLAUSE_REDUCTION:
1418 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1419 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1420 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1421 break;
1422 default:
1423 break;
1424 }
1425 }
1426
1427 #undef compare_values
1428
1429
1430 /* Compare pointer fields. */
1431
1432 /* Recurse. Search & Replaced from DFS_write_tree_body.
1433 Folding the early checks into the compare_tree_edges recursion
1434 macro makes debugging way quicker as you are able to break on
1435 compare_tree_sccs_1 and simply finish until a call returns false
1436 to spot the SCC members with the difference. */
1437 #define compare_tree_edges(E1, E2) \
1438 do { \
1439 tree t1_ = (E1), t2_ = (E2); \
1440 if (t1_ != t2_ \
1441 && (!t1_ || !t2_ \
1442 || !TREE_VISITED (t2_) \
1443 || (!TREE_ASM_WRITTEN (t2_) \
1444 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1445 return false; \
1446 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1447 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1448 } while (0)
1449
1450 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1451 {
1452 if (code != IDENTIFIER_NODE)
1453 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1454 }
1455
1456 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1457 {
1458 unsigned i;
1459 /* Note that the number of elements for EXPR has already been emitted
1460 in EXPR's header (see streamer_write_tree_header). */
1461 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1462 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1463 }
1464
1465 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1466 {
1467 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1468 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1469 }
1470
1471 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1472 {
1473 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1474 /* ??? Global decls from different TUs have non-matching
1475 TRANSLATION_UNIT_DECLs. Only consider a small set of
1476 decls equivalent, we should not end up merging others. */
1477 if ((code == TYPE_DECL
1478 || code == NAMESPACE_DECL
1479 || code == IMPORTED_DECL
1480 || code == CONST_DECL
1481 || (VAR_OR_FUNCTION_DECL_P (t1)
1482 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1483 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1484 ;
1485 else
1486 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1487 }
1488
1489 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1490 {
1491 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1492 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1493 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1494 if ((code == VAR_DECL
1495 || code == PARM_DECL)
1496 && DECL_HAS_VALUE_EXPR_P (t1))
1497 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1498 if (code == VAR_DECL
1499 && DECL_HAS_DEBUG_EXPR_P (t1))
1500 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1501 /* LTO specific edges. */
1502 if (code != FUNCTION_DECL
1503 && code != TRANSLATION_UNIT_DECL)
1504 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1505 }
1506
1507 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1508 {
1509 if (code == FUNCTION_DECL)
1510 {
1511 tree a1, a2;
1512 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1513 a1 || a2;
1514 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1515 compare_tree_edges (a1, a2);
1516 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1517 }
1518 else if (code == TYPE_DECL)
1519 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1520 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1521 }
1522
1523 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1524 {
1525 /* Make sure we don't inadvertently set the assembler name. */
1526 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1527 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1528 DECL_ASSEMBLER_NAME (t2));
1529 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
1530 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
1531 }
1532
1533 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1534 {
1535 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1536 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1537 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1538 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1539 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1540 DECL_FIELD_BIT_OFFSET (t2));
1541 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1542 }
1543
1544 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1545 {
1546 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1547 DECL_FUNCTION_PERSONALITY (t2));
1548 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1549 the attribute list instead. */
1550 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1551 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1552 }
1553
1554 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1555 {
1556 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1557 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1558 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1559 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1560 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1561 reconstructed during fixup. */
1562 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1563 during fixup. */
1564 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1565 /* ??? Global types from different TUs have non-matching
1566 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1567 equal. */
1568 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1569 ;
1570 else
1571 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1572 /* TYPE_CANONICAL is re-computed during type merging, so do not
1573 compare it here. */
1574 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1575 }
1576
1577 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1578 {
1579 if (code == ENUMERAL_TYPE)
1580 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1581 else if (code == ARRAY_TYPE)
1582 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1583 else if (RECORD_OR_UNION_TYPE_P (t1))
1584 {
1585 tree f1, f2;
1586 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1587 f1 || f2;
1588 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1589 compare_tree_edges (f1, f2);
1590 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1591 }
1592 else if (code == FUNCTION_TYPE
1593 || code == METHOD_TYPE)
1594 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1595 if (!POINTER_TYPE_P (t1))
1596 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1597 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1598 }
1599
1600 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1601 {
1602 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1603 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1604 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1605 }
1606
1607 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1608 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1609 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1610
1611 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1612 {
1613 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1614 compare_tree_edges (TREE_OPERAND (t1, i),
1615 TREE_OPERAND (t2, i));
1616
1617 /* BLOCKs are function local and we don't merge anything there. */
1618 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1619 return false;
1620 }
1621
1622 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1623 {
1624 unsigned i;
1625 tree t;
1626 /* Lengths have already been compared above. */
1627 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1628 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1629 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1630 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1631 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1632 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1633 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1634 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1635 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1636 }
1637
1638 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1639 {
1640 unsigned i;
1641 tree index, value;
1642 /* Lengths have already been compared above. */
1643 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1644 {
1645 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1646 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1647 }
1648 }
1649
1650 if (code == OMP_CLAUSE)
1651 {
1652 int i;
1653
1654 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1655 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1656 OMP_CLAUSE_OPERAND (t2, i));
1657 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1658 }
1659
1660 #undef compare_tree_edges
1661
1662 return true;
1663 }
1664
1665 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1666 out MAP if they are equal. */
1667
1668 static bool
1669 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1670 tree *map)
1671 {
1672 /* Assume SCC entry hashes are sorted after their cardinality. Which
1673 means we can simply take the first n-tuple of equal hashes
1674 (which is recorded as entry_len) and do n SCC entry candidate
1675 comparisons. */
1676 for (unsigned i = 0; i < pscc->entry_len; ++i)
1677 {
1678 tree *mapp = map;
1679 num_scc_compare_collisions++;
1680 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1681 {
1682 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1683 on the scc as all trees will be freed. */
1684 return true;
1685 }
1686 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1687 the SCC prevails. */
1688 for (unsigned j = 0; j < scc->len; ++j)
1689 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1690 }
1691
1692 return false;
1693 }
1694
1695 /* QSort sort function to sort a map of two pointers after the 2nd
1696 pointer. */
1697
1698 static int
1699 cmp_tree (const void *p1_, const void *p2_)
1700 {
1701 tree *p1 = (tree *)(const_cast<void *>(p1_));
1702 tree *p2 = (tree *)(const_cast<void *>(p2_));
1703 if (p1[1] == p2[1])
1704 return 0;
1705 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1706 }
1707
1708 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1709 hash value SCC_HASH with an already recorded SCC. Return true if
1710 that was successful, otherwise return false. */
1711
1712 static bool
1713 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1714 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1715 {
1716 bool unified_p = false;
1717 tree_scc *scc
1718 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1719 scc->next = NULL;
1720 scc->hash = scc_hash;
1721 scc->len = len;
1722 scc->entry_len = scc_entry_len;
1723 for (unsigned i = 0; i < len; ++i)
1724 {
1725 tree t = streamer_tree_cache_get_tree (cache, from + i);
1726 scc->entries[i] = t;
1727 /* Do not merge SCCs with local entities inside them. Also do
1728 not merge TRANSLATION_UNIT_DECLs. */
1729 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1730 || (VAR_OR_FUNCTION_DECL_P (t)
1731 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1732 || TREE_CODE (t) == LABEL_DECL)
1733 {
1734 /* Avoid doing any work for these cases and do not worry to
1735 record the SCCs for further merging. */
1736 return false;
1737 }
1738 }
1739
1740 /* Look for the list of candidate SCCs to compare against. */
1741 tree_scc **slot;
1742 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1743 if (*slot)
1744 {
1745 /* Try unifying against each candidate. */
1746 num_scc_compares++;
1747
1748 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1749 outside of the scc when following tree edges. Make sure
1750 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1751 to track whether we visited the SCC member during the compare.
1752 We cannot use TREE_VISITED on the pscc members as the extended
1753 scc and pscc can overlap. */
1754 for (unsigned i = 0; i < scc->len; ++i)
1755 {
1756 TREE_VISITED (scc->entries[i]) = 1;
1757 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1758 }
1759
1760 tree *map = XALLOCAVEC (tree, 2 * len);
1761 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1762 {
1763 if (!compare_tree_sccs (pscc, scc, map))
1764 continue;
1765
1766 /* Found an equal SCC. */
1767 unified_p = true;
1768 num_scc_compare_collisions--;
1769 num_sccs_merged++;
1770 total_scc_size_merged += len;
1771
1772 #ifdef ENABLE_CHECKING
1773 for (unsigned i = 0; i < len; ++i)
1774 {
1775 tree t = map[2*i+1];
1776 enum tree_code code = TREE_CODE (t);
1777 /* IDENTIFIER_NODEs should be singletons and are merged by the
1778 streamer. The others should be singletons, too, and we
1779 should not merge them in any way. */
1780 gcc_assert (code != TRANSLATION_UNIT_DECL
1781 && code != IDENTIFIER_NODE
1782 && !streamer_handle_as_builtin_p (t));
1783 }
1784 #endif
1785
1786 /* Fixup the streamer cache with the prevailing nodes according
1787 to the tree node mapping computed by compare_tree_sccs. */
1788 if (len == 1)
1789 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1790 else
1791 {
1792 tree *map2 = XALLOCAVEC (tree, 2 * len);
1793 for (unsigned i = 0; i < len; ++i)
1794 {
1795 map2[i*2] = (tree)(uintptr_t)(from + i);
1796 map2[i*2+1] = scc->entries[i];
1797 }
1798 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1799 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1800 for (unsigned i = 0; i < len; ++i)
1801 streamer_tree_cache_replace_tree (cache, map[2*i],
1802 (uintptr_t)map2[2*i]);
1803 }
1804
1805 /* Free the tree nodes from the read SCC. */
1806 for (unsigned i = 0; i < len; ++i)
1807 {
1808 enum tree_code code;
1809 if (TYPE_P (scc->entries[i]))
1810 num_merged_types++;
1811 code = TREE_CODE (scc->entries[i]);
1812 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1813 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1814 ggc_free (scc->entries[i]);
1815 }
1816
1817 break;
1818 }
1819
1820 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1821 if (!unified_p)
1822 for (unsigned i = 0; i < scc->len; ++i)
1823 TREE_VISITED (scc->entries[i]) = 0;
1824 }
1825
1826 /* If we didn't unify it to any candidate duplicate the relevant
1827 pieces to permanent storage and link it into the chain. */
1828 if (!unified_p)
1829 {
1830 tree_scc *pscc
1831 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1832 memcpy (pscc, scc, sizeof (tree_scc));
1833 pscc->next = (*slot);
1834 *slot = pscc;
1835 }
1836 return unified_p;
1837 }
1838
1839
1840 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1841 RESOLUTIONS is the set of symbols picked by the linker (read from the
1842 resolution file when the linker plugin is being used). */
1843
1844 static void
1845 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1846 vec<ld_plugin_symbol_resolution_t> resolutions)
1847 {
1848 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1849 const int decl_offset = sizeof (struct lto_decl_header);
1850 const int main_offset = decl_offset + header->decl_state_size;
1851 const int string_offset = main_offset + header->main_size;
1852 struct lto_input_block ib_main;
1853 struct data_in *data_in;
1854 unsigned int i;
1855 const uint32_t *data_ptr, *data_end;
1856 uint32_t num_decl_states;
1857
1858 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1859 header->main_size);
1860
1861 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1862 header->string_size, resolutions);
1863
1864 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1865 internal types that should not be merged. */
1866
1867 /* Read the global declarations and types. */
1868 while (ib_main.p < ib_main.len)
1869 {
1870 tree t;
1871 unsigned from = data_in->reader_cache->nodes.length ();
1872 /* Read and uniquify SCCs as in the input stream. */
1873 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1874 if (tag == LTO_tree_scc)
1875 {
1876 unsigned len_;
1877 unsigned scc_entry_len;
1878 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1879 &scc_entry_len);
1880 unsigned len = data_in->reader_cache->nodes.length () - from;
1881 gcc_assert (len == len_);
1882
1883 total_scc_size += len;
1884 num_sccs_read++;
1885
1886 /* We have the special case of size-1 SCCs that are pre-merged
1887 by means of identifier and string sharing for example.
1888 ??? Maybe we should avoid streaming those as SCCs. */
1889 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1890 from);
1891 if (len == 1
1892 && (TREE_CODE (first) == IDENTIFIER_NODE
1893 || TREE_CODE (first) == INTEGER_CST
1894 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1895 || streamer_handle_as_builtin_p (first)))
1896 continue;
1897
1898 /* Try to unify the SCC with already existing ones. */
1899 if (!flag_ltrans
1900 && unify_scc (data_in->reader_cache, from,
1901 len, scc_entry_len, scc_hash))
1902 continue;
1903
1904 /* Do remaining fixup tasks for prevailing nodes. */
1905 bool seen_type = false;
1906 for (unsigned i = 0; i < len; ++i)
1907 {
1908 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1909 from + i);
1910 /* Reconstruct the type variant and pointer-to/reference-to
1911 chains. */
1912 if (TYPE_P (t))
1913 {
1914 seen_type = true;
1915 num_prevailing_types++;
1916 lto_fixup_prevailing_type (t);
1917 }
1918 /* Compute the canonical type of all types.
1919 ??? Should be able to assert that !TYPE_CANONICAL. */
1920 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1921 gimple_register_canonical_type (t);
1922 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1923 type which is also member of this SCC. */
1924 if (TREE_CODE (t) == INTEGER_CST
1925 && !TREE_OVERFLOW (t))
1926 cache_integer_cst (t);
1927 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1928 for both WPA and LTRANS stage. */
1929 if (TREE_CODE (t) == FUNCTION_DECL)
1930 {
1931 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1932 if (attr)
1933 targetm.target_option.valid_attribute_p
1934 (t, NULL_TREE, TREE_VALUE (attr), 0);
1935 }
1936 /* Register TYPE_DECLs with the debuginfo machinery. */
1937 if (!flag_wpa
1938 && TREE_CODE (t) == TYPE_DECL)
1939 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1940 if (!flag_ltrans)
1941 {
1942 /* Register variables and functions with the
1943 symbol table. */
1944 if (TREE_CODE (t) == VAR_DECL)
1945 lto_register_var_decl_in_symtab (data_in, t, from + i);
1946 else if (TREE_CODE (t) == FUNCTION_DECL
1947 && !DECL_BUILT_IN (t))
1948 lto_register_function_decl_in_symtab (data_in, t, from + i);
1949 /* Scan the tree for references to global functions or
1950 variables and record those for later fixup. */
1951 if (mentions_vars_p (t))
1952 vec_safe_push (tree_with_vars, t);
1953 }
1954 }
1955 if (seen_type)
1956 num_type_scc_trees += len;
1957 }
1958 else
1959 {
1960 /* Pickle stray references. */
1961 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1962 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1963 }
1964 }
1965
1966 /* Read in lto_in_decl_state objects. */
1967 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1968 data_end =
1969 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1970 num_decl_states = *data_ptr++;
1971
1972 gcc_assert (num_decl_states > 0);
1973 decl_data->global_decl_state = lto_new_in_decl_state ();
1974 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1975 decl_data->global_decl_state);
1976
1977 /* Read in per-function decl states and enter them in hash table. */
1978 decl_data->function_decl_states =
1979 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1980
1981 for (i = 1; i < num_decl_states; i++)
1982 {
1983 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1984 void **slot;
1985
1986 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1987 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1988 gcc_assert (*slot == NULL);
1989 *slot = state;
1990 }
1991
1992 if (data_ptr != data_end)
1993 internal_error ("bytecode stream: garbage at the end of symbols section");
1994
1995 /* Set the current decl state to be the global state. */
1996 decl_data->current_decl_state = decl_data->global_decl_state;
1997
1998 lto_data_in_delete (data_in);
1999 }
2000
2001 /* Custom version of strtoll, which is not portable. */
2002
2003 static HOST_WIDEST_INT
2004 lto_parse_hex (const char *p)
2005 {
2006 HOST_WIDEST_INT ret = 0;
2007
2008 for (; *p != '\0'; ++p)
2009 {
2010 char c = *p;
2011 unsigned char part;
2012 ret <<= 4;
2013 if (c >= '0' && c <= '9')
2014 part = c - '0';
2015 else if (c >= 'a' && c <= 'f')
2016 part = c - 'a' + 10;
2017 else if (c >= 'A' && c <= 'F')
2018 part = c - 'A' + 10;
2019 else
2020 internal_error ("could not parse hex number");
2021 ret |= part;
2022 }
2023
2024 return ret;
2025 }
2026
2027 /* Read resolution for file named FILE_NAME. The resolution is read from
2028 RESOLUTION. */
2029
2030 static void
2031 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2032 {
2033 /* We require that objects in the resolution file are in the same
2034 order as the lto1 command line. */
2035 unsigned int name_len;
2036 char *obj_name;
2037 unsigned int num_symbols;
2038 unsigned int i;
2039 struct lto_file_decl_data *file_data;
2040 splay_tree_node nd = NULL;
2041
2042 if (!resolution)
2043 return;
2044
2045 name_len = strlen (file->filename);
2046 obj_name = XNEWVEC (char, name_len + 1);
2047 fscanf (resolution, " "); /* Read white space. */
2048
2049 fread (obj_name, sizeof (char), name_len, resolution);
2050 obj_name[name_len] = '\0';
2051 if (filename_cmp (obj_name, file->filename) != 0)
2052 internal_error ("unexpected file name %s in linker resolution file. "
2053 "Expected %s", obj_name, file->filename);
2054 if (file->offset != 0)
2055 {
2056 int t;
2057 char offset_p[17];
2058 HOST_WIDEST_INT offset;
2059 t = fscanf (resolution, "@0x%16s", offset_p);
2060 if (t != 1)
2061 internal_error ("could not parse file offset");
2062 offset = lto_parse_hex (offset_p);
2063 if (offset != file->offset)
2064 internal_error ("unexpected offset");
2065 }
2066
2067 free (obj_name);
2068
2069 fscanf (resolution, "%u", &num_symbols);
2070
2071 for (i = 0; i < num_symbols; i++)
2072 {
2073 int t;
2074 unsigned index;
2075 unsigned HOST_WIDE_INT id;
2076 char r_str[27];
2077 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2078 unsigned int j;
2079 unsigned int lto_resolution_str_len =
2080 sizeof (lto_resolution_str) / sizeof (char *);
2081 res_pair rp;
2082
2083 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2084 &index, &id, r_str);
2085 if (t != 3)
2086 internal_error ("invalid line in the resolution file");
2087
2088 for (j = 0; j < lto_resolution_str_len; j++)
2089 {
2090 if (strcmp (lto_resolution_str[j], r_str) == 0)
2091 {
2092 r = (enum ld_plugin_symbol_resolution) j;
2093 break;
2094 }
2095 }
2096 if (j == lto_resolution_str_len)
2097 internal_error ("invalid resolution in the resolution file");
2098
2099 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2100 {
2101 nd = lto_splay_tree_lookup (file_ids, id);
2102 if (nd == NULL)
2103 internal_error ("resolution sub id %wx not in object file", id);
2104 }
2105
2106 file_data = (struct lto_file_decl_data *)nd->value;
2107 /* The indexes are very sparse. To save memory save them in a compact
2108 format that is only unpacked later when the subfile is processed. */
2109 rp.res = r;
2110 rp.index = index;
2111 file_data->respairs.safe_push (rp);
2112 if (file_data->max_index < index)
2113 file_data->max_index = index;
2114 }
2115 }
2116
2117 /* List of file_decl_datas */
2118 struct file_data_list
2119 {
2120 struct lto_file_decl_data *first, *last;
2121 };
2122
2123 /* Is the name for a id'ed LTO section? */
2124
2125 static int
2126 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2127 {
2128 const char *s;
2129
2130 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2131 return 0;
2132 s = strrchr (name, '.');
2133 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2134 }
2135
2136 /* Create file_data of each sub file id */
2137
2138 static int
2139 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2140 struct file_data_list *list)
2141 {
2142 struct lto_section_slot s_slot, *new_slot;
2143 unsigned HOST_WIDE_INT id;
2144 splay_tree_node nd;
2145 void **hash_slot;
2146 char *new_name;
2147 struct lto_file_decl_data *file_data;
2148
2149 if (!lto_section_with_id (ls->name, &id))
2150 return 1;
2151
2152 /* Find hash table of sub module id */
2153 nd = lto_splay_tree_lookup (file_ids, id);
2154 if (nd != NULL)
2155 {
2156 file_data = (struct lto_file_decl_data *)nd->value;
2157 }
2158 else
2159 {
2160 file_data = ggc_alloc_lto_file_decl_data ();
2161 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2162 file_data->id = id;
2163 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2164 lto_splay_tree_insert (file_ids, id, file_data);
2165
2166 /* Maintain list in linker order */
2167 if (!list->first)
2168 list->first = file_data;
2169 if (list->last)
2170 list->last->next = file_data;
2171 list->last = file_data;
2172 }
2173
2174 /* Copy section into sub module hash table */
2175 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2176 s_slot.name = new_name;
2177 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2178 gcc_assert (*hash_slot == NULL);
2179
2180 new_slot = XDUP (struct lto_section_slot, ls);
2181 new_slot->name = new_name;
2182 *hash_slot = new_slot;
2183 return 1;
2184 }
2185
2186 /* Read declarations and other initializations for a FILE_DATA. */
2187
2188 static void
2189 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2190 {
2191 const char *data;
2192 size_t len;
2193 vec<ld_plugin_symbol_resolution_t>
2194 resolutions = vNULL;
2195 int i;
2196 res_pair *rp;
2197
2198 /* Create vector for fast access of resolution. We do this lazily
2199 to save memory. */
2200 resolutions.safe_grow_cleared (file_data->max_index + 1);
2201 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2202 resolutions[rp->index] = rp->res;
2203 file_data->respairs.release ();
2204
2205 file_data->renaming_hash_table = lto_create_renaming_table ();
2206 file_data->file_name = file->filename;
2207 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2208 if (data == NULL)
2209 {
2210 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2211 return;
2212 }
2213 /* Frees resolutions */
2214 lto_read_decls (file_data, data, resolutions);
2215 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2216 }
2217
2218 /* Finalize FILE_DATA in FILE and increase COUNT. */
2219
2220 static int
2221 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2222 int *count)
2223 {
2224 lto_file_finalize (file_data, file);
2225 if (cgraph_dump_file)
2226 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2227 file_data->file_name, file_data->id);
2228 (*count)++;
2229 return 0;
2230 }
2231
2232 /* Generate a TREE representation for all types and external decls
2233 entities in FILE.
2234
2235 Read all of the globals out of the file. Then read the cgraph
2236 and process the .o index into the cgraph nodes so that it can open
2237 the .o file to load the functions and ipa information. */
2238
2239 static struct lto_file_decl_data *
2240 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2241 {
2242 struct lto_file_decl_data *file_data = NULL;
2243 splay_tree file_ids;
2244 htab_t section_hash_table;
2245 struct lto_section_slot *section;
2246 struct file_data_list file_list;
2247 struct lto_section_list section_list;
2248
2249 memset (&section_list, 0, sizeof (struct lto_section_list));
2250 section_hash_table = lto_obj_build_section_table (file, &section_list);
2251
2252 /* Find all sub modules in the object and put their sections into new hash
2253 tables in a splay tree. */
2254 file_ids = lto_splay_tree_new ();
2255 memset (&file_list, 0, sizeof (struct file_data_list));
2256 for (section = section_list.first; section != NULL; section = section->next)
2257 create_subid_section_table (section, file_ids, &file_list);
2258
2259 /* Add resolutions to file ids */
2260 lto_resolution_read (file_ids, resolution_file, file);
2261
2262 /* Finalize each lto file for each submodule in the merged object */
2263 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2264 lto_create_files_from_ids (file, file_data, count);
2265
2266 splay_tree_delete (file_ids);
2267 htab_delete (section_hash_table);
2268
2269 return file_list.first;
2270 }
2271
2272 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2273 #define LTO_MMAP_IO 1
2274 #endif
2275
2276 #if LTO_MMAP_IO
2277 /* Page size of machine is used for mmap and munmap calls. */
2278 static size_t page_mask;
2279 #endif
2280
2281 /* Get the section data of length LEN from FILENAME starting at
2282 OFFSET. The data segment must be freed by the caller when the
2283 caller is finished. Returns NULL if all was not well. */
2284
2285 static char *
2286 lto_read_section_data (struct lto_file_decl_data *file_data,
2287 intptr_t offset, size_t len)
2288 {
2289 char *result;
2290 static int fd = -1;
2291 static char *fd_name;
2292 #if LTO_MMAP_IO
2293 intptr_t computed_len;
2294 intptr_t computed_offset;
2295 intptr_t diff;
2296 #endif
2297
2298 /* Keep a single-entry file-descriptor cache. The last file we
2299 touched will get closed at exit.
2300 ??? Eventually we want to add a more sophisticated larger cache
2301 or rather fix function body streaming to not stream them in
2302 practically random order. */
2303 if (fd != -1
2304 && filename_cmp (fd_name, file_data->file_name) != 0)
2305 {
2306 free (fd_name);
2307 close (fd);
2308 fd = -1;
2309 }
2310 if (fd == -1)
2311 {
2312 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2313 if (fd == -1)
2314 {
2315 fatal_error ("Cannot open %s", file_data->file_name);
2316 return NULL;
2317 }
2318 fd_name = xstrdup (file_data->file_name);
2319 }
2320
2321 #if LTO_MMAP_IO
2322 if (!page_mask)
2323 {
2324 size_t page_size = sysconf (_SC_PAGE_SIZE);
2325 page_mask = ~(page_size - 1);
2326 }
2327
2328 computed_offset = offset & page_mask;
2329 diff = offset - computed_offset;
2330 computed_len = len + diff;
2331
2332 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2333 fd, computed_offset);
2334 if (result == MAP_FAILED)
2335 {
2336 fatal_error ("Cannot map %s", file_data->file_name);
2337 return NULL;
2338 }
2339
2340 return result + diff;
2341 #else
2342 result = (char *) xmalloc (len);
2343 if (lseek (fd, offset, SEEK_SET) != offset
2344 || read (fd, result, len) != (ssize_t) len)
2345 {
2346 free (result);
2347 fatal_error ("Cannot read %s", file_data->file_name);
2348 result = NULL;
2349 }
2350 #ifdef __MINGW32__
2351 /* Native windows doesn't supports delayed unlink on opened file. So
2352 we close file here again. This produces higher I/O load, but at least
2353 it prevents to have dangling file handles preventing unlink. */
2354 free (fd_name);
2355 fd_name = NULL;
2356 close (fd);
2357 fd = -1;
2358 #endif
2359 return result;
2360 #endif
2361 }
2362
2363
2364 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2365 NAME will be NULL unless the section type is for a function
2366 body. */
2367
2368 static const char *
2369 get_section_data (struct lto_file_decl_data *file_data,
2370 enum lto_section_type section_type,
2371 const char *name,
2372 size_t *len)
2373 {
2374 htab_t section_hash_table = file_data->section_hash_table;
2375 struct lto_section_slot *f_slot;
2376 struct lto_section_slot s_slot;
2377 const char *section_name = lto_get_section_name (section_type, name, file_data);
2378 char *data = NULL;
2379
2380 *len = 0;
2381 s_slot.name = section_name;
2382 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2383 if (f_slot)
2384 {
2385 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2386 *len = f_slot->len;
2387 }
2388
2389 free (CONST_CAST (char *, section_name));
2390 return data;
2391 }
2392
2393
2394 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2395 starts at OFFSET and has LEN bytes. */
2396
2397 static void
2398 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2399 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2400 const char *name ATTRIBUTE_UNUSED,
2401 const char *offset, size_t len ATTRIBUTE_UNUSED)
2402 {
2403 #if LTO_MMAP_IO
2404 intptr_t computed_len;
2405 intptr_t computed_offset;
2406 intptr_t diff;
2407 #endif
2408
2409 #if LTO_MMAP_IO
2410 computed_offset = ((intptr_t) offset) & page_mask;
2411 diff = (intptr_t) offset - computed_offset;
2412 computed_len = len + diff;
2413
2414 munmap ((caddr_t) computed_offset, computed_len);
2415 #else
2416 free (CONST_CAST(char *, offset));
2417 #endif
2418 }
2419
2420 static lto_file *current_lto_file;
2421
2422 /* Helper for qsort; compare partitions and return one with smaller size.
2423 We sort from greatest to smallest so parallel build doesn't stale on the
2424 longest compilation being executed too late. */
2425
2426 static int
2427 cmp_partitions_size (const void *a, const void *b)
2428 {
2429 const struct ltrans_partition_def *pa
2430 = *(struct ltrans_partition_def *const *)a;
2431 const struct ltrans_partition_def *pb
2432 = *(struct ltrans_partition_def *const *)b;
2433 return pb->insns - pa->insns;
2434 }
2435
2436 /* Helper for qsort; compare partitions and return one with smaller order. */
2437
2438 static int
2439 cmp_partitions_order (const void *a, const void *b)
2440 {
2441 const struct ltrans_partition_def *pa
2442 = *(struct ltrans_partition_def *const *)a;
2443 const struct ltrans_partition_def *pb
2444 = *(struct ltrans_partition_def *const *)b;
2445 int ordera = -1, orderb = -1;
2446
2447 if (lto_symtab_encoder_size (pa->encoder))
2448 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2449 if (lto_symtab_encoder_size (pb->encoder))
2450 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2451 return orderb - ordera;
2452 }
2453
2454 /* Actually stream out ENCODER into TEMP_FILENAME. */
2455
2456 static void
2457 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2458 {
2459 lto_file *file = lto_obj_file_open (temp_filename, true);
2460 if (!file)
2461 fatal_error ("lto_obj_file_open() failed");
2462 lto_set_current_out_file (file);
2463
2464 ipa_write_optimization_summaries (encoder);
2465
2466 lto_set_current_out_file (NULL);
2467 lto_obj_file_close (file);
2468 free (file);
2469 }
2470
2471 /* Wait for forked process and signal errors. */
2472 #ifdef HAVE_WORKING_FORK
2473 static void
2474 wait_for_child ()
2475 {
2476 int status;
2477 do
2478 {
2479 int w = waitpid(0, &status, WUNTRACED | WCONTINUED);
2480 if (w == -1)
2481 fatal_error ("waitpid failed");
2482
2483 if (WIFEXITED (status) && WEXITSTATUS (status))
2484 fatal_error ("streaming subprocess failed");
2485 else if (WIFSIGNALED (status))
2486 fatal_error ("streaming subprocess was killed by signal");
2487 }
2488 while (!WIFEXITED(status) && !WIFSIGNALED(status));
2489 }
2490 #endif
2491
2492 /* Stream out ENCODER into TEMP_FILENAME
2493 Fork if that seems to help. */
2494
2495 static void
2496 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2497 {
2498 #ifdef HAVE_WORKING_FORK
2499 static int nruns;
2500
2501 if (lto_parallelism <= 1)
2502 {
2503 do_stream_out (temp_filename, encoder);
2504 return;
2505 }
2506
2507 /* Do not run more than LTO_PARALLELISM streamings
2508 FIXME: we ignore limits on jobserver. */
2509 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2510 {
2511 wait_for_child ();
2512 nruns --;
2513 }
2514 /* If this is not the last parallel partition, execute new
2515 streaming process. */
2516 if (!last)
2517 {
2518 pid_t cpid = fork ();
2519
2520 if (!cpid)
2521 {
2522 setproctitle ("lto1-wpa-streaming");
2523 do_stream_out (temp_filename, encoder);
2524 exit (0);
2525 }
2526 /* Fork failed; lets do the job ourseleves. */
2527 else if (cpid == -1)
2528 do_stream_out (temp_filename, encoder);
2529 else
2530 nruns++;
2531 }
2532 /* Last partition; stream it and wait for all children to die. */
2533 else
2534 {
2535 int i;
2536 do_stream_out (temp_filename, encoder);
2537 for (i = 0; i < nruns; i++)
2538 wait_for_child ();
2539 }
2540 asm_nodes_output = true;
2541 #else
2542 do_stream_out (temp_filename, encoder);
2543 #endif
2544 }
2545
2546 /* Write all output files in WPA mode and the file with the list of
2547 LTRANS units. */
2548
2549 static void
2550 lto_wpa_write_files (void)
2551 {
2552 unsigned i, n_sets;
2553 ltrans_partition part;
2554 FILE *ltrans_output_list_stream;
2555 char *temp_filename;
2556 vec <char *>temp_filenames = vNULL;
2557 size_t blen;
2558
2559 /* Open the LTRANS output list. */
2560 if (!ltrans_output_list)
2561 fatal_error ("no LTRANS output list filename provided");
2562
2563 timevar_push (TV_WHOPR_WPA);
2564
2565 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2566 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2567
2568 /* Find out statics that need to be promoted
2569 to globals with hidden visibility because they are accessed from multiple
2570 partitions. */
2571 lto_promote_cross_file_statics ();
2572
2573 timevar_pop (TV_WHOPR_WPA);
2574
2575 timevar_push (TV_WHOPR_WPA_IO);
2576
2577 /* Generate a prefix for the LTRANS unit files. */
2578 blen = strlen (ltrans_output_list);
2579 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2580 strcpy (temp_filename, ltrans_output_list);
2581 if (blen > sizeof (".out")
2582 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2583 ".out") == 0)
2584 temp_filename[blen - sizeof (".out") + 1] = '\0';
2585 blen = strlen (temp_filename);
2586
2587 n_sets = ltrans_partitions.length ();
2588
2589 /* Sort partitions by size so small ones are compiled last.
2590 FIXME: Even when not reordering we may want to output one list for parallel make
2591 and other for final link command. */
2592
2593 if (!flag_profile_reorder_functions || !flag_profile_use)
2594 ltrans_partitions.qsort (flag_toplevel_reorder
2595 ? cmp_partitions_size
2596 : cmp_partitions_order);
2597
2598 for (i = 0; i < n_sets; i++)
2599 {
2600 ltrans_partition part = ltrans_partitions[i];
2601
2602 /* Write all the nodes in SET. */
2603 sprintf (temp_filename + blen, "%u.o", i);
2604
2605 if (!quiet_flag)
2606 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2607 if (cgraph_dump_file)
2608 {
2609 lto_symtab_encoder_iterator lsei;
2610
2611 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2612 part->name, temp_filename, part->insns);
2613 fprintf (cgraph_dump_file, " Symbols in partition: ");
2614 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2615 lsei_next_in_partition (&lsei))
2616 {
2617 symtab_node *node = lsei_node (lsei);
2618 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2619 }
2620 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2621 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2622 lsei_next (&lsei))
2623 {
2624 symtab_node *node = lsei_node (lsei);
2625 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2626 {
2627 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2628 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2629 if (cnode
2630 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2631 fprintf (cgraph_dump_file, "(body included)");
2632 else
2633 {
2634 varpool_node *vnode = dyn_cast <varpool_node> (node);
2635 if (vnode
2636 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2637 fprintf (cgraph_dump_file, "(initializer included)");
2638 }
2639 }
2640 }
2641 fprintf (cgraph_dump_file, "\n");
2642 }
2643 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2644
2645 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2646
2647 part->encoder = NULL;
2648
2649 temp_filenames.safe_push (xstrdup (temp_filename));
2650 }
2651 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2652 if (ltrans_output_list_stream == NULL)
2653 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2654 for (i = 0; i < n_sets; i++)
2655 {
2656 unsigned int len = strlen (temp_filenames[i]);
2657 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2658 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2659 fatal_error ("writing to LTRANS output list %s: %m",
2660 ltrans_output_list);
2661 free (temp_filenames[i]);
2662 }
2663 temp_filenames.release();
2664
2665 lto_stats.num_output_files += n_sets;
2666
2667 /* Close the LTRANS output list. */
2668 if (fclose (ltrans_output_list_stream))
2669 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2670
2671 free_ltrans_partitions();
2672 free (temp_filename);
2673
2674 timevar_pop (TV_WHOPR_WPA_IO);
2675 }
2676
2677
2678 /* If TT is a variable or function decl replace it with its
2679 prevailing variant. */
2680 #define LTO_SET_PREVAIL(tt) \
2681 do {\
2682 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2683 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2684 { \
2685 tt = lto_symtab_prevailing_decl (tt); \
2686 fixed = true; \
2687 } \
2688 } while (0)
2689
2690 /* Ensure that TT isn't a replacable var of function decl. */
2691 #define LTO_NO_PREVAIL(tt) \
2692 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2693
2694 /* Given a tree T replace all fields referring to variables or functions
2695 with their prevailing variant. */
2696 static void
2697 lto_fixup_prevailing_decls (tree t)
2698 {
2699 enum tree_code code = TREE_CODE (t);
2700 bool fixed = false;
2701
2702 gcc_checking_assert (code != TREE_BINFO);
2703 LTO_NO_PREVAIL (TREE_TYPE (t));
2704 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2705 LTO_NO_PREVAIL (TREE_CHAIN (t));
2706 if (DECL_P (t))
2707 {
2708 LTO_NO_PREVAIL (DECL_NAME (t));
2709 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2710 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2711 {
2712 LTO_SET_PREVAIL (DECL_SIZE (t));
2713 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2714 LTO_SET_PREVAIL (DECL_INITIAL (t));
2715 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2716 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2717 }
2718 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2719 {
2720 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2721 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2722 }
2723 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2724 {
2725 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2726 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2727 LTO_NO_PREVAIL (DECL_VINDEX (t));
2728 }
2729 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2730 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2731 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2732 {
2733 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2734 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2735 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2736 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2737 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2738 }
2739 }
2740 else if (TYPE_P (t))
2741 {
2742 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2743 LTO_SET_PREVAIL (TYPE_SIZE (t));
2744 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2745 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2746 LTO_NO_PREVAIL (TYPE_NAME (t));
2747
2748 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2749 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2750 LTO_NO_PREVAIL (t->type_non_common.binfo);
2751
2752 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2753
2754 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2755 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2756 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2757 }
2758 else if (EXPR_P (t))
2759 {
2760 int i;
2761 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2762 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2763 }
2764 else if (TREE_CODE (t) == CONSTRUCTOR)
2765 {
2766 unsigned i;
2767 tree val;
2768 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2769 LTO_SET_PREVAIL (val);
2770 }
2771 else
2772 {
2773 switch (code)
2774 {
2775 case TREE_LIST:
2776 LTO_SET_PREVAIL (TREE_VALUE (t));
2777 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2778 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2779 break;
2780 default:
2781 gcc_unreachable ();
2782 }
2783 }
2784 /* If we fixed nothing, then we missed something seen by
2785 mentions_vars_p. */
2786 gcc_checking_assert (fixed);
2787 }
2788 #undef LTO_SET_PREVAIL
2789 #undef LTO_NO_PREVAIL
2790
2791 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2792 replaces var and function decls with the corresponding prevailing def. */
2793
2794 static void
2795 lto_fixup_state (struct lto_in_decl_state *state)
2796 {
2797 unsigned i, si;
2798 struct lto_tree_ref_table *table;
2799
2800 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2801 we still need to walk from all DECLs to find the reachable
2802 FUNCTION_DECLs and VAR_DECLs. */
2803 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2804 {
2805 table = &state->streams[si];
2806 for (i = 0; i < table->size; i++)
2807 {
2808 tree *tp = table->trees + i;
2809 if (VAR_OR_FUNCTION_DECL_P (*tp)
2810 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2811 *tp = lto_symtab_prevailing_decl (*tp);
2812 }
2813 }
2814 }
2815
2816 /* A callback of htab_traverse. Just extracts a state from SLOT
2817 and calls lto_fixup_state. */
2818
2819 static int
2820 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2821 {
2822 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2823 lto_fixup_state (state);
2824 return 1;
2825 }
2826
2827 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2828 prevailing one. */
2829
2830 static void
2831 lto_fixup_decls (struct lto_file_decl_data **files)
2832 {
2833 unsigned int i;
2834 tree t;
2835
2836 if (tree_with_vars)
2837 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2838 lto_fixup_prevailing_decls (t);
2839
2840 for (i = 0; files[i]; i++)
2841 {
2842 struct lto_file_decl_data *file = files[i];
2843 struct lto_in_decl_state *state = file->global_decl_state;
2844 lto_fixup_state (state);
2845
2846 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2847 }
2848 }
2849
2850 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2851
2852 /* Turn file datas for sub files into a single array, so that they look
2853 like separate files for further passes. */
2854
2855 static void
2856 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2857 {
2858 struct lto_file_decl_data *n, *next;
2859 int i, k;
2860
2861 lto_stats.num_input_files = count;
2862 all_file_decl_data
2863 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2864 /* Set the hooks so that all of the ipa passes can read in their data. */
2865 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2866 for (i = 0, k = 0; i < last_file_ix; i++)
2867 {
2868 for (n = orig[i]; n != NULL; n = next)
2869 {
2870 all_file_decl_data[k++] = n;
2871 next = n->next;
2872 n->next = NULL;
2873 }
2874 }
2875 all_file_decl_data[k] = NULL;
2876 gcc_assert (k == count);
2877 }
2878
2879 /* Input file data before flattening (i.e. splitting them to subfiles to support
2880 incremental linking. */
2881 static int real_file_count;
2882 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2883
2884 static void print_lto_report_1 (void);
2885
2886 /* Read all the symbols from the input files FNAMES. NFILES is the
2887 number of files requested in the command line. Instantiate a
2888 global call graph by aggregating all the sub-graphs found in each
2889 file. */
2890
2891 static void
2892 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2893 {
2894 unsigned int i, last_file_ix;
2895 FILE *resolution;
2896 int count = 0;
2897 struct lto_file_decl_data **decl_data;
2898 void **res;
2899 symtab_node *snode;
2900
2901 init_cgraph ();
2902
2903 timevar_push (TV_IPA_LTO_DECL_IN);
2904
2905 real_file_decl_data
2906 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2907 real_file_count = nfiles;
2908
2909 /* Read the resolution file. */
2910 resolution = NULL;
2911 if (resolution_file_name)
2912 {
2913 int t;
2914 unsigned num_objects;
2915
2916 resolution = fopen (resolution_file_name, "r");
2917 if (resolution == NULL)
2918 fatal_error ("could not open symbol resolution file: %m");
2919
2920 t = fscanf (resolution, "%u", &num_objects);
2921 gcc_assert (t == 1);
2922
2923 /* True, since the plugin splits the archives. */
2924 gcc_assert (num_objects == nfiles);
2925 }
2926 cgraph_state = CGRAPH_LTO_STREAMING;
2927
2928 canonical_type_hash_cache = new pointer_map <hashval_t>;
2929 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2930 gimple_canonical_type_eq, 0);
2931 gcc_obstack_init (&tree_scc_hash_obstack);
2932 tree_scc_hash.create (4096);
2933
2934 /* Register the common node types with the canonical type machinery so
2935 we properly share alias-sets across languages and TUs. Do not
2936 expose the common nodes as type merge target - those that should be
2937 are already exposed so by pre-loading the LTO streamer caches.
2938 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2939 for (i = 0; i < itk_none; ++i)
2940 lto_register_canonical_types (integer_types[i], true);
2941 for (i = 0; i < stk_type_kind_last; ++i)
2942 lto_register_canonical_types (sizetype_tab[i], true);
2943 for (i = 0; i < TI_MAX; ++i)
2944 lto_register_canonical_types (global_trees[i], true);
2945 for (i = 0; i < itk_none; ++i)
2946 lto_register_canonical_types (integer_types[i], false);
2947 for (i = 0; i < stk_type_kind_last; ++i)
2948 lto_register_canonical_types (sizetype_tab[i], false);
2949 for (i = 0; i < TI_MAX; ++i)
2950 lto_register_canonical_types (global_trees[i], false);
2951
2952 if (!quiet_flag)
2953 fprintf (stderr, "Reading object files:");
2954
2955 /* Read all of the object files specified on the command line. */
2956 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2957 {
2958 struct lto_file_decl_data *file_data = NULL;
2959 if (!quiet_flag)
2960 {
2961 fprintf (stderr, " %s", fnames[i]);
2962 fflush (stderr);
2963 }
2964
2965 current_lto_file = lto_obj_file_open (fnames[i], false);
2966 if (!current_lto_file)
2967 break;
2968
2969 file_data = lto_file_read (current_lto_file, resolution, &count);
2970 if (!file_data)
2971 {
2972 lto_obj_file_close (current_lto_file);
2973 free (current_lto_file);
2974 current_lto_file = NULL;
2975 break;
2976 }
2977
2978 decl_data[last_file_ix++] = file_data;
2979
2980 lto_obj_file_close (current_lto_file);
2981 free (current_lto_file);
2982 current_lto_file = NULL;
2983 }
2984
2985 lto_flatten_files (decl_data, count, last_file_ix);
2986 lto_stats.num_input_files = count;
2987 ggc_free(decl_data);
2988 real_file_decl_data = NULL;
2989
2990 if (resolution_file_name)
2991 fclose (resolution);
2992
2993 /* Show the LTO report before launching LTRANS. */
2994 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2995 print_lto_report_1 ();
2996
2997 /* Free gimple type merging datastructures. */
2998 tree_scc_hash.dispose ();
2999 obstack_free (&tree_scc_hash_obstack, NULL);
3000 htab_delete (gimple_canonical_types);
3001 gimple_canonical_types = NULL;
3002 delete canonical_type_hash_cache;
3003 canonical_type_hash_cache = NULL;
3004 ggc_collect ();
3005
3006 /* Set the hooks so that all of the ipa passes can read in their data. */
3007 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3008
3009 timevar_pop (TV_IPA_LTO_DECL_IN);
3010
3011 if (!quiet_flag)
3012 fprintf (stderr, "\nReading the callgraph\n");
3013
3014 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3015 /* Read the symtab. */
3016 input_symtab ();
3017
3018 /* Store resolutions into the symbol table. */
3019
3020 FOR_EACH_SYMBOL (snode)
3021 if (symtab_real_symbol_p (snode)
3022 && snode->lto_file_data
3023 && snode->lto_file_data->resolution_map
3024 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3025 snode->decl)))
3026 snode->resolution
3027 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3028 for (i = 0; all_file_decl_data[i]; i++)
3029 if (all_file_decl_data[i]->resolution_map)
3030 {
3031 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3032 all_file_decl_data[i]->resolution_map = NULL;
3033 }
3034
3035 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3036
3037 if (!quiet_flag)
3038 fprintf (stderr, "Merging declarations\n");
3039
3040 timevar_push (TV_IPA_LTO_DECL_MERGE);
3041 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3042 need to care about resolving symbols again, we only need to replace
3043 duplicated declarations read from the callgraph and from function
3044 sections. */
3045 if (!flag_ltrans)
3046 {
3047 lto_symtab_merge_decls ();
3048
3049 /* If there were errors during symbol merging bail out, we have no
3050 good way to recover here. */
3051 if (seen_error ())
3052 fatal_error ("errors during merging of translation units");
3053
3054 /* Fixup all decls. */
3055 lto_fixup_decls (all_file_decl_data);
3056 }
3057 if (tree_with_vars)
3058 ggc_free (tree_with_vars);
3059 tree_with_vars = NULL;
3060 ggc_collect ();
3061
3062 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3063 /* Each pass will set the appropriate timer. */
3064
3065 if (!quiet_flag)
3066 fprintf (stderr, "Reading summaries\n");
3067
3068 /* Read the IPA summary data. */
3069 if (flag_ltrans)
3070 ipa_read_optimization_summaries ();
3071 else
3072 ipa_read_summaries ();
3073
3074 for (i = 0; all_file_decl_data[i]; i++)
3075 {
3076 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3077 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3078 all_file_decl_data[i]->symtab_node_encoder = NULL;
3079 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3080 all_file_decl_data[i]->global_decl_state = NULL;
3081 all_file_decl_data[i]->current_decl_state = NULL;
3082 }
3083
3084 /* Finally merge the cgraph according to the decl merging decisions. */
3085 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3086 if (cgraph_dump_file)
3087 {
3088 fprintf (cgraph_dump_file, "Before merging:\n");
3089 dump_symtab (cgraph_dump_file);
3090 }
3091 lto_symtab_merge_symbols ();
3092 ggc_collect ();
3093 cgraph_state = CGRAPH_STATE_IPA_SSA;
3094
3095 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3096
3097 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3098
3099 /* Indicate that the cgraph is built and ready. */
3100 cgraph_function_flags_ready = true;
3101
3102 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3103 ggc_free (all_file_decl_data);
3104 all_file_decl_data = NULL;
3105 }
3106
3107
3108 /* Materialize all the bodies for all the nodes in the callgraph. */
3109
3110 static void
3111 materialize_cgraph (void)
3112 {
3113 struct cgraph_node *node;
3114 timevar_id_t lto_timer;
3115
3116 if (!quiet_flag)
3117 fprintf (stderr,
3118 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3119
3120 /* Now that we have input the cgraph, we need to clear all of the aux
3121 nodes and read the functions if we are not running in WPA mode. */
3122 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3123
3124 FOR_EACH_FUNCTION (node)
3125 {
3126 if (node->lto_file_data)
3127 {
3128 lto_materialize_function (node);
3129 lto_stats.num_input_cgraph_nodes++;
3130 }
3131 }
3132
3133 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3134
3135 /* Start the appropriate timer depending on the mode that we are
3136 operating in. */
3137 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3138 : (flag_ltrans) ? TV_WHOPR_LTRANS
3139 : TV_LTO;
3140 timevar_push (lto_timer);
3141
3142 current_function_decl = NULL;
3143 set_cfun (NULL);
3144
3145 if (!quiet_flag)
3146 fprintf (stderr, "\n");
3147
3148 timevar_pop (lto_timer);
3149 }
3150
3151
3152 /* Show various memory usage statistics related to LTO. */
3153 static void
3154 print_lto_report_1 (void)
3155 {
3156 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3157 fprintf (stderr, "%s statistics\n", pfx);
3158
3159 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3160 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3161 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3162 if (flag_wpa && tree_scc_hash.is_created ())
3163 {
3164 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3165 "collision ratio: %f\n", pfx,
3166 (long) tree_scc_hash.size (),
3167 (long) tree_scc_hash.elements (),
3168 tree_scc_hash.collisions ());
3169 hash_table<tree_scc_hasher>::iterator hiter;
3170 tree_scc *scc, *max_scc = NULL;
3171 unsigned max_length = 0;
3172 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3173 {
3174 unsigned length = 0;
3175 tree_scc *s = scc;
3176 for (; s; s = s->next)
3177 length++;
3178 if (length > max_length)
3179 {
3180 max_length = length;
3181 max_scc = scc;
3182 }
3183 }
3184 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3185 pfx, max_length, max_scc->len);
3186 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3187 num_scc_compares, num_scc_compare_collisions,
3188 num_scc_compare_collisions / (double) num_scc_compares);
3189 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3190 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3191 total_scc_size_merged);
3192 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3193 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3194 pfx, num_prevailing_types, num_type_scc_trees);
3195 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3196 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3197 (long) htab_size (gimple_canonical_types),
3198 (long) htab_elements (gimple_canonical_types),
3199 (long) gimple_canonical_types->searches,
3200 (long) gimple_canonical_types->collisions,
3201 htab_collisions (gimple_canonical_types));
3202 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3203 "%lu elements, %ld searches\n", pfx,
3204 num_canonical_type_hash_entries,
3205 num_canonical_type_hash_queries);
3206 }
3207
3208 print_lto_report (pfx);
3209 }
3210
3211 /* Perform whole program analysis (WPA) on the callgraph and write out the
3212 optimization plan. */
3213
3214 static void
3215 do_whole_program_analysis (void)
3216 {
3217 symtab_node *node;
3218
3219 lto_parallelism = 1;
3220
3221 /* TODO: jobserver communicatoin is not supported, yet. */
3222 if (!strcmp (flag_wpa, "jobserver"))
3223 lto_parallelism = -1;
3224 else
3225 {
3226 lto_parallelism = atoi (flag_wpa);
3227 if (lto_parallelism <= 0)
3228 lto_parallelism = 0;
3229 }
3230
3231 timevar_start (TV_PHASE_OPT_GEN);
3232
3233 /* Note that since we are in WPA mode, materialize_cgraph will not
3234 actually read in all the function bodies. It only materializes
3235 the decls and cgraph nodes so that analysis can be performed. */
3236 materialize_cgraph ();
3237
3238 /* Reading in the cgraph uses different timers, start timing WPA now. */
3239 timevar_push (TV_WHOPR_WPA);
3240
3241 if (pre_ipa_mem_report)
3242 {
3243 fprintf (stderr, "Memory consumption before IPA\n");
3244 dump_memory_report (false);
3245 }
3246
3247 cgraph_function_flags_ready = true;
3248
3249 if (cgraph_dump_file)
3250 dump_symtab (cgraph_dump_file);
3251 bitmap_obstack_initialize (NULL);
3252 cgraph_state = CGRAPH_STATE_IPA_SSA;
3253
3254 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3255 symtab_remove_unreachable_nodes (false, dump_file);
3256
3257 if (cgraph_dump_file)
3258 {
3259 fprintf (cgraph_dump_file, "Optimized ");
3260 dump_symtab (cgraph_dump_file);
3261 }
3262 #ifdef ENABLE_CHECKING
3263 verify_cgraph ();
3264 #endif
3265 bitmap_obstack_release (NULL);
3266
3267 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3268 timevar_pop (TV_WHOPR_WPA);
3269
3270 timevar_push (TV_WHOPR_PARTITIONING);
3271 if (flag_lto_partition_1to1)
3272 lto_1_to_1_map ();
3273 else if (flag_lto_partition_max)
3274 lto_max_map ();
3275 else
3276 lto_balanced_map ();
3277
3278 /* AUX pointers are used by partitioning code to bookkeep number of
3279 partitions symbol is in. This is no longer needed. */
3280 FOR_EACH_SYMBOL (node)
3281 node->aux = NULL;
3282
3283 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3284 timevar_pop (TV_WHOPR_PARTITIONING);
3285
3286 timevar_stop (TV_PHASE_OPT_GEN);
3287 timevar_start (TV_PHASE_STREAM_OUT);
3288
3289 if (!quiet_flag)
3290 {
3291 fprintf (stderr, "\nStreaming out");
3292 fflush (stderr);
3293 }
3294 lto_wpa_write_files ();
3295 if (!quiet_flag)
3296 fprintf (stderr, "\n");
3297
3298 timevar_stop (TV_PHASE_STREAM_OUT);
3299
3300 ggc_collect ();
3301 if (post_ipa_mem_report)
3302 {
3303 fprintf (stderr, "Memory consumption after IPA\n");
3304 dump_memory_report (false);
3305 }
3306
3307 /* Show the LTO report before launching LTRANS. */
3308 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3309 print_lto_report_1 ();
3310 if (mem_report_wpa)
3311 dump_memory_report (true);
3312 }
3313
3314
3315 static GTY(()) tree lto_eh_personality_decl;
3316
3317 /* Return the LTO personality function decl. */
3318
3319 tree
3320 lto_eh_personality (void)
3321 {
3322 if (!lto_eh_personality_decl)
3323 {
3324 /* Use the first personality DECL for our personality if we don't
3325 support multiple ones. This ensures that we don't artificially
3326 create the need for them in a single-language program. */
3327 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3328 lto_eh_personality_decl = first_personality_decl;
3329 else
3330 lto_eh_personality_decl = lhd_gcc_personality ();
3331 }
3332
3333 return lto_eh_personality_decl;
3334 }
3335
3336 /* Set the process name based on the LTO mode. */
3337
3338 static void
3339 lto_process_name (void)
3340 {
3341 if (flag_lto)
3342 setproctitle ("lto1-lto");
3343 if (flag_wpa)
3344 setproctitle ("lto1-wpa");
3345 if (flag_ltrans)
3346 setproctitle ("lto1-ltrans");
3347 }
3348
3349
3350 /* Initialize the LTO front end. */
3351
3352 static void
3353 lto_init (void)
3354 {
3355 lto_process_name ();
3356 lto_streamer_hooks_init ();
3357 lto_reader_init ();
3358 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3359 memset (&lto_stats, 0, sizeof (lto_stats));
3360 bitmap_obstack_initialize (NULL);
3361 gimple_register_cfg_hooks ();
3362 }
3363
3364
3365 /* Main entry point for the GIMPLE front end. This front end has
3366 three main personalities:
3367
3368 - LTO (-flto). All the object files on the command line are
3369 loaded in memory and processed as a single translation unit.
3370 This is the traditional link-time optimization behavior.
3371
3372 - WPA (-fwpa). Only the callgraph and summary information for
3373 files in the command file are loaded. A single callgraph
3374 (without function bodies) is instantiated for the whole set of
3375 files. IPA passes are only allowed to analyze the call graph
3376 and make transformation decisions. The callgraph is
3377 partitioned, each partition is written to a new object file
3378 together with the transformation decisions.
3379
3380 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3381 summary files from running again. Since WPA computed summary
3382 information and decided what transformations to apply, LTRANS
3383 simply applies them. */
3384
3385 void
3386 lto_main (void)
3387 {
3388 /* LTO is called as a front end, even though it is not a front end.
3389 Because it is called as a front end, TV_PHASE_PARSING and
3390 TV_PARSE_GLOBAL are active, and we need to turn them off while
3391 doing LTO. Later we turn them back on so they are active up in
3392 toplev.c. */
3393 timevar_pop (TV_PARSE_GLOBAL);
3394 timevar_stop (TV_PHASE_PARSING);
3395
3396 timevar_start (TV_PHASE_SETUP);
3397
3398 /* Initialize the LTO front end. */
3399 lto_init ();
3400
3401 timevar_stop (TV_PHASE_SETUP);
3402 timevar_start (TV_PHASE_STREAM_IN);
3403
3404 /* Read all the symbols and call graph from all the files in the
3405 command line. */
3406 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3407
3408 timevar_stop (TV_PHASE_STREAM_IN);
3409
3410 if (!seen_error ())
3411 {
3412 /* If WPA is enabled analyze the whole call graph and create an
3413 optimization plan. Otherwise, read in all the function
3414 bodies and continue with optimization. */
3415 if (flag_wpa)
3416 do_whole_program_analysis ();
3417 else
3418 {
3419 timevar_start (TV_PHASE_OPT_GEN);
3420
3421 materialize_cgraph ();
3422 if (!flag_ltrans)
3423 lto_promote_statics_nonwpa ();
3424
3425 /* Let the middle end know that we have read and merged all of
3426 the input files. */
3427 compile ();
3428
3429 timevar_stop (TV_PHASE_OPT_GEN);
3430
3431 /* FIXME lto, if the processes spawned by WPA fail, we miss
3432 the chance to print WPA's report, so WPA will call
3433 print_lto_report before launching LTRANS. If LTRANS was
3434 launched directly by the driver we would not need to do
3435 this. */
3436 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3437 print_lto_report_1 ();
3438 }
3439 }
3440
3441 /* Here we make LTO pretend to be a parser. */
3442 timevar_start (TV_PHASE_PARSING);
3443 timevar_push (TV_PARSE_GLOBAL);
3444 }
3445
3446 #include "gt-lto-lto.h"