gimple.c (gimple_canonical_types, [...]): Move to lto/lto.c
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48 #include "data-streamer.h"
49 #include "context.h"
50 #include "pass_manager.h"
51
52 /* Vector to keep track of external variables we've seen so far. */
53 vec<tree, va_gc> *lto_global_var_decls;
54
55 static GTY(()) tree first_personality_decl;
56
57 /* Returns a hash code for P. */
58
59 static hashval_t
60 hash_name (const void *p)
61 {
62 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
63 return (hashval_t) htab_hash_string (ds->name);
64 }
65
66
67 /* Returns nonzero if P1 and P2 are equal. */
68
69 static int
70 eq_name (const void *p1, const void *p2)
71 {
72 const struct lto_section_slot *s1 =
73 (const struct lto_section_slot *) p1;
74 const struct lto_section_slot *s2 =
75 (const struct lto_section_slot *) p2;
76
77 return strcmp (s1->name, s2->name) == 0;
78 }
79
80 /* Free lto_section_slot */
81
82 static void
83 free_with_string (void *arg)
84 {
85 struct lto_section_slot *s = (struct lto_section_slot *)arg;
86
87 free (CONST_CAST (char *, s->name));
88 free (arg);
89 }
90
91 /* Create section hash table */
92
93 htab_t
94 lto_obj_create_section_hash_table (void)
95 {
96 return htab_create (37, hash_name, eq_name, free_with_string);
97 }
98
99 /* Delete an allocated integer KEY in the splay tree. */
100
101 static void
102 lto_splay_tree_delete_id (splay_tree_key key)
103 {
104 free ((void *) key);
105 }
106
107 /* Compare splay tree node ids A and B. */
108
109 static int
110 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
111 {
112 unsigned HOST_WIDE_INT ai;
113 unsigned HOST_WIDE_INT bi;
114
115 ai = *(unsigned HOST_WIDE_INT *) a;
116 bi = *(unsigned HOST_WIDE_INT *) b;
117
118 if (ai < bi)
119 return -1;
120 else if (ai > bi)
121 return 1;
122 return 0;
123 }
124
125 /* Look up splay tree node by ID in splay tree T. */
126
127 static splay_tree_node
128 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
129 {
130 return splay_tree_lookup (t, (splay_tree_key) &id);
131 }
132
133 /* Check if KEY has ID. */
134
135 static bool
136 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
137 {
138 return *(unsigned HOST_WIDE_INT *) key == id;
139 }
140
141 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
142 The ID is allocated separately because we need HOST_WIDE_INTs which may
143 be wider than a splay_tree_key. */
144
145 static void
146 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
147 struct lto_file_decl_data *file_data)
148 {
149 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
150 *idp = id;
151 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
152 }
153
154 /* Create a splay tree. */
155
156 static splay_tree
157 lto_splay_tree_new (void)
158 {
159 return splay_tree_new (lto_splay_tree_compare_ids,
160 lto_splay_tree_delete_id,
161 NULL);
162 }
163
164 /* Return true when NODE has a clone that is analyzed (i.e. we need
165 to load its body even if the node itself is not needed). */
166
167 static bool
168 has_analyzed_clone_p (struct cgraph_node *node)
169 {
170 struct cgraph_node *orig = node;
171 node = node->clones;
172 if (node)
173 while (node != orig)
174 {
175 if (node->symbol.analyzed)
176 return true;
177 if (node->clones)
178 node = node->clones;
179 else if (node->next_sibling_clone)
180 node = node->next_sibling_clone;
181 else
182 {
183 while (node != orig && !node->next_sibling_clone)
184 node = node->clone_of;
185 if (node != orig)
186 node = node->next_sibling_clone;
187 }
188 }
189 return false;
190 }
191
192 /* Read the function body for the function associated with NODE. */
193
194 static void
195 lto_materialize_function (struct cgraph_node *node)
196 {
197 tree decl;
198
199 decl = node->symbol.decl;
200 /* Read in functions with body (analyzed nodes)
201 and also functions that are needed to produce virtual clones. */
202 if ((cgraph_function_with_gimple_body_p (node) && node->symbol.analyzed)
203 || node->used_as_abstract_origin
204 || has_analyzed_clone_p (node))
205 {
206 /* Clones don't need to be read. */
207 if (node->clone_of)
208 return;
209 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
210 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
211 }
212
213 /* Let the middle end know about the function. */
214 rest_of_decl_compilation (decl, 1, 0);
215 }
216
217
218 /* Decode the content of memory pointed to by DATA in the in decl
219 state object STATE. DATA_IN points to a data_in structure for
220 decoding. Return the address after the decoded object in the
221 input. */
222
223 static const uint32_t *
224 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
225 struct lto_in_decl_state *state)
226 {
227 uint32_t ix;
228 tree decl;
229 uint32_t i, j;
230
231 ix = *data++;
232 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
233 if (TREE_CODE (decl) != FUNCTION_DECL)
234 {
235 gcc_assert (decl == void_type_node);
236 decl = NULL_TREE;
237 }
238 state->fn_decl = decl;
239
240 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
241 {
242 uint32_t size = *data++;
243 tree *decls = ggc_alloc_vec_tree (size);
244
245 for (j = 0; j < size; j++)
246 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
247
248 state->streams[i].size = size;
249 state->streams[i].trees = decls;
250 data += size;
251 }
252
253 return data;
254 }
255
256
257 /* Global canonical type table. */
258 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
259 htab_t gimple_canonical_types;
260 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
261 htab_t canonical_type_hash_cache;
262
263 /* Returning a hash value for gimple type TYPE combined with VAL.
264
265 The hash value returned is equal for types considered compatible
266 by gimple_canonical_types_compatible_p. */
267
268 static hashval_t
269 iterative_hash_canonical_type (tree type, hashval_t val)
270 {
271 hashval_t v;
272 void **slot;
273 struct tree_int_map *mp, m;
274
275 m.base.from = type;
276 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, NO_INSERT)))
277 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
278
279 /* Combine a few common features of types so that types are grouped into
280 smaller sets; when searching for existing matching types to merge,
281 only existing types having the same features as the new type will be
282 checked. */
283 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
284 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
285 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
286 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
287
288 /* Incorporate common features of numerical types. */
289 if (INTEGRAL_TYPE_P (type)
290 || SCALAR_FLOAT_TYPE_P (type)
291 || FIXED_POINT_TYPE_P (type)
292 || TREE_CODE (type) == OFFSET_TYPE
293 || POINTER_TYPE_P (type))
294 {
295 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
296 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
297 }
298
299 if (VECTOR_TYPE_P (type))
300 {
301 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
302 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
303 }
304
305 if (TREE_CODE (type) == COMPLEX_TYPE)
306 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
307
308 /* For pointer and reference types, fold in information about the type
309 pointed to but do not recurse to the pointed-to type. */
310 if (POINTER_TYPE_P (type))
311 {
312 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
313 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
314 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
315 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
316 }
317
318 /* For integer types hash only the string flag. */
319 if (TREE_CODE (type) == INTEGER_TYPE)
320 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
321
322 /* For array types hash the domain bounds and the string flag. */
323 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
324 {
325 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
326 /* OMP lowering can introduce error_mark_node in place of
327 random local decls in types. */
328 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
329 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
330 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
331 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
332 }
333
334 /* Recurse for aggregates with a single element type. */
335 if (TREE_CODE (type) == ARRAY_TYPE
336 || TREE_CODE (type) == COMPLEX_TYPE
337 || TREE_CODE (type) == VECTOR_TYPE)
338 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
339
340 /* Incorporate function return and argument types. */
341 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
342 {
343 unsigned na;
344 tree p;
345
346 /* For method types also incorporate their parent class. */
347 if (TREE_CODE (type) == METHOD_TYPE)
348 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
349
350 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
351
352 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
353 {
354 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
355 na++;
356 }
357
358 v = iterative_hash_hashval_t (na, v);
359 }
360
361 if (RECORD_OR_UNION_TYPE_P (type))
362 {
363 unsigned nf;
364 tree f;
365
366 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
367 if (TREE_CODE (f) == FIELD_DECL)
368 {
369 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
370 nf++;
371 }
372
373 v = iterative_hash_hashval_t (nf, v);
374 }
375
376 /* Cache the just computed hash value. */
377 mp = ggc_alloc_cleared_tree_int_map ();
378 mp->base.from = type;
379 mp->to = v;
380 /* As we recurse the hashtable may expand between looking up the
381 cached value (and not finding one) and here, so we have to
382 re-lookup the slot. */
383 slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT);
384 *slot = (void *) mp;
385
386 return iterative_hash_hashval_t (v, val);
387 }
388
389 static hashval_t
390 gimple_canonical_type_hash (const void *p)
391 {
392 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
393 }
394
395
396 /* The TYPE_CANONICAL merging machinery. It should closely resemble
397 the middle-end types_compatible_p function. It needs to avoid
398 claiming types are different for types that should be treated
399 the same with respect to TBAA. Canonical types are also used
400 for IL consistency checks via the useless_type_conversion_p
401 predicate which does not handle all type kinds itself but falls
402 back to pointer-comparison of TYPE_CANONICAL for aggregates
403 for example. */
404
405 /* Return true iff T1 and T2 are structurally identical for what
406 TBAA is concerned. */
407
408 static bool
409 gimple_canonical_types_compatible_p (tree t1, tree t2)
410 {
411 /* Before starting to set up the SCC machinery handle simple cases. */
412
413 /* Check first for the obvious case of pointer identity. */
414 if (t1 == t2)
415 return true;
416
417 /* Check that we have two types to compare. */
418 if (t1 == NULL_TREE || t2 == NULL_TREE)
419 return false;
420
421 /* If the types have been previously registered and found equal
422 they still are. */
423 if (TYPE_CANONICAL (t1)
424 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
425 return true;
426
427 /* Can't be the same type if the types don't have the same code. */
428 if (TREE_CODE (t1) != TREE_CODE (t2))
429 return false;
430
431 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
432 return false;
433
434 /* Qualifiers do not matter for canonical type comparison purposes. */
435
436 /* Void types and nullptr types are always the same. */
437 if (TREE_CODE (t1) == VOID_TYPE
438 || TREE_CODE (t1) == NULLPTR_TYPE)
439 return true;
440
441 /* Can't be the same type if they have different alignment, or mode. */
442 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
443 || TYPE_MODE (t1) != TYPE_MODE (t2))
444 return false;
445
446 /* Non-aggregate types can be handled cheaply. */
447 if (INTEGRAL_TYPE_P (t1)
448 || SCALAR_FLOAT_TYPE_P (t1)
449 || FIXED_POINT_TYPE_P (t1)
450 || TREE_CODE (t1) == VECTOR_TYPE
451 || TREE_CODE (t1) == COMPLEX_TYPE
452 || TREE_CODE (t1) == OFFSET_TYPE
453 || POINTER_TYPE_P (t1))
454 {
455 /* Can't be the same type if they have different sign or precision. */
456 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
457 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
458 return false;
459
460 if (TREE_CODE (t1) == INTEGER_TYPE
461 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
462 return false;
463
464 /* For canonical type comparisons we do not want to build SCCs
465 so we cannot compare pointed-to types. But we can, for now,
466 require the same pointed-to type kind and match what
467 useless_type_conversion_p would do. */
468 if (POINTER_TYPE_P (t1))
469 {
470 /* If the two pointers have different ref-all attributes,
471 they can't be the same type. */
472 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
473 return false;
474
475 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
476 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
477 return false;
478
479 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
480 return false;
481
482 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
483 return false;
484 }
485
486 /* Tail-recurse to components. */
487 if (TREE_CODE (t1) == VECTOR_TYPE
488 || TREE_CODE (t1) == COMPLEX_TYPE)
489 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
490 TREE_TYPE (t2));
491
492 return true;
493 }
494
495 /* Do type-specific comparisons. */
496 switch (TREE_CODE (t1))
497 {
498 case ARRAY_TYPE:
499 /* Array types are the same if the element types are the same and
500 the number of elements are the same. */
501 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
502 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
503 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
504 return false;
505 else
506 {
507 tree i1 = TYPE_DOMAIN (t1);
508 tree i2 = TYPE_DOMAIN (t2);
509
510 /* For an incomplete external array, the type domain can be
511 NULL_TREE. Check this condition also. */
512 if (i1 == NULL_TREE && i2 == NULL_TREE)
513 return true;
514 else if (i1 == NULL_TREE || i2 == NULL_TREE)
515 return false;
516 else
517 {
518 tree min1 = TYPE_MIN_VALUE (i1);
519 tree min2 = TYPE_MIN_VALUE (i2);
520 tree max1 = TYPE_MAX_VALUE (i1);
521 tree max2 = TYPE_MAX_VALUE (i2);
522
523 /* The minimum/maximum values have to be the same. */
524 if ((min1 == min2
525 || (min1 && min2
526 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
527 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
528 || operand_equal_p (min1, min2, 0))))
529 && (max1 == max2
530 || (max1 && max2
531 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
532 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
533 || operand_equal_p (max1, max2, 0)))))
534 return true;
535 else
536 return false;
537 }
538 }
539
540 case METHOD_TYPE:
541 case FUNCTION_TYPE:
542 /* Function types are the same if the return type and arguments types
543 are the same. */
544 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
545 return false;
546
547 if (!comp_type_attributes (t1, t2))
548 return false;
549
550 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
551 return true;
552 else
553 {
554 tree parms1, parms2;
555
556 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
557 parms1 && parms2;
558 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
559 {
560 if (!gimple_canonical_types_compatible_p
561 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
562 return false;
563 }
564
565 if (parms1 || parms2)
566 return false;
567
568 return true;
569 }
570
571 case RECORD_TYPE:
572 case UNION_TYPE:
573 case QUAL_UNION_TYPE:
574 {
575 tree f1, f2;
576
577 /* For aggregate types, all the fields must be the same. */
578 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
579 f1 || f2;
580 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
581 {
582 /* Skip non-fields. */
583 while (f1 && TREE_CODE (f1) != FIELD_DECL)
584 f1 = TREE_CHAIN (f1);
585 while (f2 && TREE_CODE (f2) != FIELD_DECL)
586 f2 = TREE_CHAIN (f2);
587 if (!f1 || !f2)
588 break;
589 /* The fields must have the same name, offset and type. */
590 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
591 || !gimple_compare_field_offset (f1, f2)
592 || !gimple_canonical_types_compatible_p
593 (TREE_TYPE (f1), TREE_TYPE (f2)))
594 return false;
595 }
596
597 /* If one aggregate has more fields than the other, they
598 are not the same. */
599 if (f1 || f2)
600 return false;
601
602 return true;
603 }
604
605 default:
606 gcc_unreachable ();
607 }
608 }
609
610
611 /* Returns nonzero if P1 and P2 are equal. */
612
613 static int
614 gimple_canonical_type_eq (const void *p1, const void *p2)
615 {
616 const_tree t1 = (const_tree) p1;
617 const_tree t2 = (const_tree) p2;
618 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
619 CONST_CAST_TREE (t2));
620 }
621
622 /* Register type T in the global type table gimple_types.
623 If another type T', compatible with T, already existed in
624 gimple_types then return T', otherwise return T. This is used by
625 LTO to merge identical types read from different TUs.
626
627 ??? This merging does not exactly match how the tree.c middle-end
628 functions will assign TYPE_CANONICAL when new types are created
629 during optimization (which at least happens for pointer and array
630 types). */
631
632 static tree
633 gimple_register_canonical_type (tree t)
634 {
635 void **slot;
636
637 gcc_assert (TYPE_P (t));
638
639 if (TYPE_CANONICAL (t))
640 return TYPE_CANONICAL (t);
641
642 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
643 if (*slot
644 && *(tree *)slot != t)
645 {
646 tree new_type = (tree) *((tree *) slot);
647
648 TYPE_CANONICAL (t) = new_type;
649 t = new_type;
650 }
651 else
652 {
653 TYPE_CANONICAL (t) = t;
654 *slot = (void *) t;
655 }
656
657 return t;
658 }
659
660 /* Re-compute TYPE_CANONICAL for NODE and related types. */
661
662 static void
663 lto_register_canonical_types (tree node)
664 {
665 if (!node
666 || !TYPE_P (node))
667 return;
668
669 TYPE_CANONICAL (node) = NULL_TREE;
670 TYPE_CANONICAL (node) = gimple_register_canonical_type (node);
671
672 if (POINTER_TYPE_P (node)
673 || TREE_CODE (node) == COMPLEX_TYPE
674 || TREE_CODE (node) == ARRAY_TYPE)
675 lto_register_canonical_types (TREE_TYPE (node));
676 }
677
678
679 /* ??? Old hashing and merging code follows, we keep it for statistics
680 purposes for now. */
681
682 /* Global type table. FIXME, it should be possible to re-use some
683 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
684 etc), but those assume that types were built with the various
685 build_*_type routines which is not the case with the streamer. */
686 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
687 htab_t gimple_types;
688 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
689 htab_t type_hash_cache;
690
691 static hashval_t gimple_type_hash (const void *);
692
693 /* Structure used to maintain a cache of some type pairs compared by
694 gimple_types_compatible_p when comparing aggregate types. There are
695 three possible values for SAME_P:
696
697 -2: The pair (T1, T2) has just been inserted in the table.
698 0: T1 and T2 are different types.
699 1: T1 and T2 are the same type. */
700
701 struct type_pair_d
702 {
703 unsigned int uid1;
704 unsigned int uid2;
705 signed char same_p;
706 };
707 typedef struct type_pair_d *type_pair_t;
708
709 #define GIMPLE_TYPE_PAIR_SIZE 16381
710 struct type_pair_d *type_pair_cache;
711
712
713 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
714 entry if none existed. */
715
716 static inline type_pair_t
717 lookup_type_pair (tree t1, tree t2)
718 {
719 unsigned int index;
720 unsigned int uid1, uid2;
721
722 if (TYPE_UID (t1) < TYPE_UID (t2))
723 {
724 uid1 = TYPE_UID (t1);
725 uid2 = TYPE_UID (t2);
726 }
727 else
728 {
729 uid1 = TYPE_UID (t2);
730 uid2 = TYPE_UID (t1);
731 }
732 gcc_checking_assert (uid1 != uid2);
733
734 /* iterative_hash_hashval_t imply an function calls.
735 We know that UIDS are in limited range. */
736 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
737 % GIMPLE_TYPE_PAIR_SIZE);
738 if (type_pair_cache [index].uid1 == uid1
739 && type_pair_cache [index].uid2 == uid2)
740 return &type_pair_cache[index];
741
742 type_pair_cache [index].uid1 = uid1;
743 type_pair_cache [index].uid2 = uid2;
744 type_pair_cache [index].same_p = -2;
745
746 return &type_pair_cache[index];
747 }
748
749 /* Per pointer state for the SCC finding. The on_sccstack flag
750 is not strictly required, it is true when there is no hash value
751 recorded for the type and false otherwise. But querying that
752 is slower. */
753
754 struct sccs
755 {
756 unsigned int dfsnum;
757 unsigned int low;
758 bool on_sccstack;
759 union {
760 hashval_t hash;
761 signed char same_p;
762 } u;
763 };
764
765 static unsigned int next_dfs_num;
766 static unsigned int gtc_next_dfs_num;
767
768 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
769 true then if any type has no name return false, otherwise return
770 true if both types have no names. */
771
772 static bool
773 compare_type_names_p (tree t1, tree t2)
774 {
775 tree name1 = TYPE_NAME (t1);
776 tree name2 = TYPE_NAME (t2);
777
778 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
779 return false;
780
781 if (name1 == NULL_TREE)
782 return true;
783
784 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
785 if (TREE_CODE (name1) != TREE_CODE (name2))
786 return false;
787
788 if (TREE_CODE (name1) == TYPE_DECL)
789 name1 = DECL_NAME (name1);
790 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
791
792 if (TREE_CODE (name2) == TYPE_DECL)
793 name2 = DECL_NAME (name2);
794 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
795
796 /* Identifiers can be compared with pointer equality rather
797 than a string comparison. */
798 if (name1 == name2)
799 return true;
800
801 return false;
802 }
803
804 static bool
805 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
806 vec<type_pair_t> *,
807 struct pointer_map_t *, struct obstack *);
808
809 /* DFS visit the edge from the callers type pair with state *STATE to
810 the pair T1, T2 while operating in FOR_MERGING_P mode.
811 Update the merging status if it is not part of the SCC containing the
812 callers pair and return it.
813 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
814
815 static bool
816 gtc_visit (tree t1, tree t2,
817 struct sccs *state,
818 vec<type_pair_t> *sccstack,
819 struct pointer_map_t *sccstate,
820 struct obstack *sccstate_obstack)
821 {
822 struct sccs *cstate = NULL;
823 type_pair_t p;
824 void **slot;
825
826 /* Check first for the obvious case of pointer identity. */
827 if (t1 == t2)
828 return true;
829
830 /* Check that we have two types to compare. */
831 if (t1 == NULL_TREE || t2 == NULL_TREE)
832 return false;
833
834 /* Can't be the same type if the types don't have the same code. */
835 if (TREE_CODE (t1) != TREE_CODE (t2))
836 return false;
837
838 /* Can't be the same type if they have different CV qualifiers. */
839 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
840 return false;
841
842 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
843 return false;
844
845 /* Void types and nullptr types are always the same. */
846 if (TREE_CODE (t1) == VOID_TYPE
847 || TREE_CODE (t1) == NULLPTR_TYPE)
848 return true;
849
850 /* Can't be the same type if they have different alignment or mode. */
851 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
852 || TYPE_MODE (t1) != TYPE_MODE (t2))
853 return false;
854
855 /* Do some simple checks before doing three hashtable queries. */
856 if (INTEGRAL_TYPE_P (t1)
857 || SCALAR_FLOAT_TYPE_P (t1)
858 || FIXED_POINT_TYPE_P (t1)
859 || TREE_CODE (t1) == VECTOR_TYPE
860 || TREE_CODE (t1) == COMPLEX_TYPE
861 || TREE_CODE (t1) == OFFSET_TYPE
862 || POINTER_TYPE_P (t1))
863 {
864 /* Can't be the same type if they have different sign or precision. */
865 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
866 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
867 return false;
868
869 if (TREE_CODE (t1) == INTEGER_TYPE
870 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
871 return false;
872
873 /* That's all we need to check for float and fixed-point types. */
874 if (SCALAR_FLOAT_TYPE_P (t1)
875 || FIXED_POINT_TYPE_P (t1))
876 return true;
877
878 /* For other types fall through to more complex checks. */
879 }
880
881 /* If the hash values of t1 and t2 are different the types can't
882 possibly be the same. This helps keeping the type-pair hashtable
883 small, only tracking comparisons for hash collisions. */
884 if (gimple_type_hash (t1) != gimple_type_hash (t2))
885 return false;
886
887 /* Allocate a new cache entry for this comparison. */
888 p = lookup_type_pair (t1, t2);
889 if (p->same_p == 0 || p->same_p == 1)
890 {
891 /* We have already decided whether T1 and T2 are the
892 same, return the cached result. */
893 return p->same_p == 1;
894 }
895
896 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
897 cstate = (struct sccs *)*slot;
898 /* Not yet visited. DFS recurse. */
899 if (!cstate)
900 {
901 gimple_types_compatible_p_1 (t1, t2, p,
902 sccstack, sccstate, sccstate_obstack);
903 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
904 state->low = MIN (state->low, cstate->low);
905 }
906 /* If the type is still on the SCC stack adjust the parents low. */
907 if (cstate->dfsnum < state->dfsnum
908 && cstate->on_sccstack)
909 state->low = MIN (cstate->dfsnum, state->low);
910
911 /* Return the current lattice value. We start with an equality
912 assumption so types part of a SCC will be optimistically
913 treated equal unless proven otherwise. */
914 return cstate->u.same_p;
915 }
916
917 /* Worker for gimple_types_compatible.
918 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
919
920 static bool
921 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
922 vec<type_pair_t> *sccstack,
923 struct pointer_map_t *sccstate,
924 struct obstack *sccstate_obstack)
925 {
926 struct sccs *state;
927
928 gcc_assert (p->same_p == -2);
929
930 state = XOBNEW (sccstate_obstack, struct sccs);
931 *pointer_map_insert (sccstate, p) = state;
932
933 sccstack->safe_push (p);
934 state->dfsnum = gtc_next_dfs_num++;
935 state->low = state->dfsnum;
936 state->on_sccstack = true;
937 /* Start with an equality assumption. As we DFS recurse into child
938 SCCs this assumption may get revisited. */
939 state->u.same_p = 1;
940
941 /* The struct tags shall compare equal. */
942 if (!compare_type_names_p (t1, t2))
943 goto different_types;
944
945 /* The main variant of both types should compare equal. */
946 if (TYPE_MAIN_VARIANT (t1) != t1
947 || TYPE_MAIN_VARIANT (t2) != t2)
948 {
949 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
950 state, sccstack, sccstate, sccstate_obstack))
951 goto different_types;
952 }
953
954 /* We may not merge typedef types to the same type in different
955 contexts. */
956 if (TYPE_NAME (t1)
957 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
958 && DECL_CONTEXT (TYPE_NAME (t1))
959 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
960 {
961 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
962 DECL_CONTEXT (TYPE_NAME (t2)),
963 state, sccstack, sccstate, sccstate_obstack))
964 goto different_types;
965 }
966
967 /* If their attributes are not the same they can't be the same type. */
968 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
969 goto different_types;
970
971 /* Do type-specific comparisons. */
972 switch (TREE_CODE (t1))
973 {
974 case VECTOR_TYPE:
975 case COMPLEX_TYPE:
976 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
977 state, sccstack, sccstate, sccstate_obstack))
978 goto different_types;
979 goto same_types;
980
981 case ARRAY_TYPE:
982 /* Array types are the same if the element types are the same and
983 the number of elements are the same. */
984 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
985 state, sccstack, sccstate, sccstate_obstack)
986 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
987 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
988 goto different_types;
989 else
990 {
991 tree i1 = TYPE_DOMAIN (t1);
992 tree i2 = TYPE_DOMAIN (t2);
993
994 /* For an incomplete external array, the type domain can be
995 NULL_TREE. Check this condition also. */
996 if (i1 == NULL_TREE && i2 == NULL_TREE)
997 goto same_types;
998 else if (i1 == NULL_TREE || i2 == NULL_TREE)
999 goto different_types;
1000 else
1001 {
1002 tree min1 = TYPE_MIN_VALUE (i1);
1003 tree min2 = TYPE_MIN_VALUE (i2);
1004 tree max1 = TYPE_MAX_VALUE (i1);
1005 tree max2 = TYPE_MAX_VALUE (i2);
1006
1007 /* The minimum/maximum values have to be the same. */
1008 if ((min1 == min2
1009 || (min1 && min2
1010 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
1011 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
1012 || operand_equal_p (min1, min2, 0))))
1013 && (max1 == max2
1014 || (max1 && max2
1015 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
1016 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
1017 || operand_equal_p (max1, max2, 0)))))
1018 goto same_types;
1019 else
1020 goto different_types;
1021 }
1022 }
1023
1024 case METHOD_TYPE:
1025 /* Method types should belong to the same class. */
1026 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
1027 state, sccstack, sccstate, sccstate_obstack))
1028 goto different_types;
1029
1030 /* Fallthru */
1031
1032 case FUNCTION_TYPE:
1033 /* Function types are the same if the return type and arguments types
1034 are the same. */
1035 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
1036 state, sccstack, sccstate, sccstate_obstack))
1037 goto different_types;
1038
1039 if (!comp_type_attributes (t1, t2))
1040 goto different_types;
1041
1042 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
1043 goto same_types;
1044 else
1045 {
1046 tree parms1, parms2;
1047
1048 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1049 parms1 && parms2;
1050 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1051 {
1052 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
1053 state, sccstack, sccstate, sccstate_obstack))
1054 goto different_types;
1055 }
1056
1057 if (parms1 || parms2)
1058 goto different_types;
1059
1060 goto same_types;
1061 }
1062
1063 case OFFSET_TYPE:
1064 {
1065 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
1066 state, sccstack, sccstate, sccstate_obstack)
1067 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
1068 TYPE_OFFSET_BASETYPE (t2),
1069 state, sccstack, sccstate, sccstate_obstack))
1070 goto different_types;
1071
1072 goto same_types;
1073 }
1074
1075 case POINTER_TYPE:
1076 case REFERENCE_TYPE:
1077 {
1078 /* If the two pointers have different ref-all attributes,
1079 they can't be the same type. */
1080 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
1081 goto different_types;
1082
1083 /* Otherwise, pointer and reference types are the same if the
1084 pointed-to types are the same. */
1085 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
1086 state, sccstack, sccstate, sccstate_obstack))
1087 goto same_types;
1088
1089 goto different_types;
1090 }
1091
1092 case INTEGER_TYPE:
1093 case BOOLEAN_TYPE:
1094 {
1095 tree min1 = TYPE_MIN_VALUE (t1);
1096 tree max1 = TYPE_MAX_VALUE (t1);
1097 tree min2 = TYPE_MIN_VALUE (t2);
1098 tree max2 = TYPE_MAX_VALUE (t2);
1099 bool min_equal_p = false;
1100 bool max_equal_p = false;
1101
1102 /* If either type has a minimum value, the other type must
1103 have the same. */
1104 if (min1 == NULL_TREE && min2 == NULL_TREE)
1105 min_equal_p = true;
1106 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
1107 min_equal_p = true;
1108
1109 /* Likewise, if either type has a maximum value, the other
1110 type must have the same. */
1111 if (max1 == NULL_TREE && max2 == NULL_TREE)
1112 max_equal_p = true;
1113 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
1114 max_equal_p = true;
1115
1116 if (!min_equal_p || !max_equal_p)
1117 goto different_types;
1118
1119 goto same_types;
1120 }
1121
1122 case ENUMERAL_TYPE:
1123 {
1124 /* FIXME lto, we cannot check bounds on enumeral types because
1125 different front ends will produce different values.
1126 In C, enumeral types are integers, while in C++ each element
1127 will have its own symbolic value. We should decide how enums
1128 are to be represented in GIMPLE and have each front end lower
1129 to that. */
1130 tree v1, v2;
1131
1132 /* For enumeral types, all the values must be the same. */
1133 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
1134 goto same_types;
1135
1136 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1137 v1 && v2;
1138 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1139 {
1140 tree c1 = TREE_VALUE (v1);
1141 tree c2 = TREE_VALUE (v2);
1142
1143 if (TREE_CODE (c1) == CONST_DECL)
1144 c1 = DECL_INITIAL (c1);
1145
1146 if (TREE_CODE (c2) == CONST_DECL)
1147 c2 = DECL_INITIAL (c2);
1148
1149 if (tree_int_cst_equal (c1, c2) != 1)
1150 goto different_types;
1151
1152 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1153 goto different_types;
1154 }
1155
1156 /* If one enumeration has more values than the other, they
1157 are not the same. */
1158 if (v1 || v2)
1159 goto different_types;
1160
1161 goto same_types;
1162 }
1163
1164 case RECORD_TYPE:
1165 case UNION_TYPE:
1166 case QUAL_UNION_TYPE:
1167 {
1168 tree f1, f2;
1169
1170 /* For aggregate types, all the fields must be the same. */
1171 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1172 f1 && f2;
1173 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1174 {
1175 /* Different field kinds are not compatible. */
1176 if (TREE_CODE (f1) != TREE_CODE (f2))
1177 goto different_types;
1178 /* Field decls must have the same name and offset. */
1179 if (TREE_CODE (f1) == FIELD_DECL
1180 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
1181 || !gimple_compare_field_offset (f1, f2)))
1182 goto different_types;
1183 /* All entities should have the same name and type. */
1184 if (DECL_NAME (f1) != DECL_NAME (f2)
1185 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
1186 state, sccstack, sccstate, sccstate_obstack))
1187 goto different_types;
1188 }
1189
1190 /* If one aggregate has more fields than the other, they
1191 are not the same. */
1192 if (f1 || f2)
1193 goto different_types;
1194
1195 goto same_types;
1196 }
1197
1198 default:
1199 gcc_unreachable ();
1200 }
1201
1202 /* Common exit path for types that are not compatible. */
1203 different_types:
1204 state->u.same_p = 0;
1205 goto pop;
1206
1207 /* Common exit path for types that are compatible. */
1208 same_types:
1209 gcc_assert (state->u.same_p == 1);
1210
1211 pop:
1212 if (state->low == state->dfsnum)
1213 {
1214 type_pair_t x;
1215
1216 /* Pop off the SCC and set its cache values to the final
1217 comparison result. */
1218 do
1219 {
1220 struct sccs *cstate;
1221 x = sccstack->pop ();
1222 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1223 cstate->on_sccstack = false;
1224 x->same_p = state->u.same_p;
1225 }
1226 while (x != p);
1227 }
1228
1229 return state->u.same_p;
1230 }
1231
1232 /* Return true iff T1 and T2 are structurally identical. When
1233 FOR_MERGING_P is true the an incomplete type and a complete type
1234 are considered different, otherwise they are considered compatible. */
1235
1236 static bool
1237 gimple_types_compatible_p (tree t1, tree t2)
1238 {
1239 vec<type_pair_t> sccstack = vNULL;
1240 struct pointer_map_t *sccstate;
1241 struct obstack sccstate_obstack;
1242 type_pair_t p = NULL;
1243 bool res;
1244
1245 /* Before starting to set up the SCC machinery handle simple cases. */
1246
1247 /* Check first for the obvious case of pointer identity. */
1248 if (t1 == t2)
1249 return true;
1250
1251 /* Check that we have two types to compare. */
1252 if (t1 == NULL_TREE || t2 == NULL_TREE)
1253 return false;
1254
1255 /* Can't be the same type if the types don't have the same code. */
1256 if (TREE_CODE (t1) != TREE_CODE (t2))
1257 return false;
1258
1259 /* Can't be the same type if they have different CV qualifiers. */
1260 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1261 return false;
1262
1263 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
1264 return false;
1265
1266 /* Void types and nullptr types are always the same. */
1267 if (TREE_CODE (t1) == VOID_TYPE
1268 || TREE_CODE (t1) == NULLPTR_TYPE)
1269 return true;
1270
1271 /* Can't be the same type if they have different alignment or mode. */
1272 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
1273 || TYPE_MODE (t1) != TYPE_MODE (t2))
1274 return false;
1275
1276 /* Do some simple checks before doing three hashtable queries. */
1277 if (INTEGRAL_TYPE_P (t1)
1278 || SCALAR_FLOAT_TYPE_P (t1)
1279 || FIXED_POINT_TYPE_P (t1)
1280 || TREE_CODE (t1) == VECTOR_TYPE
1281 || TREE_CODE (t1) == COMPLEX_TYPE
1282 || TREE_CODE (t1) == OFFSET_TYPE
1283 || POINTER_TYPE_P (t1))
1284 {
1285 /* Can't be the same type if they have different sign or precision. */
1286 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
1287 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1288 return false;
1289
1290 if (TREE_CODE (t1) == INTEGER_TYPE
1291 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1292 return false;
1293
1294 /* That's all we need to check for float and fixed-point types. */
1295 if (SCALAR_FLOAT_TYPE_P (t1)
1296 || FIXED_POINT_TYPE_P (t1))
1297 return true;
1298
1299 /* For other types fall through to more complex checks. */
1300 }
1301
1302 /* If the hash values of t1 and t2 are different the types can't
1303 possibly be the same. This helps keeping the type-pair hashtable
1304 small, only tracking comparisons for hash collisions. */
1305 if (gimple_type_hash (t1) != gimple_type_hash (t2))
1306 return false;
1307
1308 /* If we've visited this type pair before (in the case of aggregates
1309 with self-referential types), and we made a decision, return it. */
1310 p = lookup_type_pair (t1, t2);
1311 if (p->same_p == 0 || p->same_p == 1)
1312 {
1313 /* We have already decided whether T1 and T2 are the
1314 same, return the cached result. */
1315 return p->same_p == 1;
1316 }
1317
1318 /* Now set up the SCC machinery for the comparison. */
1319 gtc_next_dfs_num = 1;
1320 sccstate = pointer_map_create ();
1321 gcc_obstack_init (&sccstate_obstack);
1322 res = gimple_types_compatible_p_1 (t1, t2, p,
1323 &sccstack, sccstate, &sccstate_obstack);
1324 sccstack.release ();
1325 pointer_map_destroy (sccstate);
1326 obstack_free (&sccstate_obstack, NULL);
1327
1328 return res;
1329 }
1330
1331 static hashval_t
1332 iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
1333 struct pointer_map_t *, struct obstack *);
1334
1335 /* DFS visit the edge from the callers type with state *STATE to T.
1336 Update the callers type hash V with the hash for T if it is not part
1337 of the SCC containing the callers type and return it.
1338 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
1339
1340 static hashval_t
1341 visit (tree t, struct sccs *state, hashval_t v,
1342 vec<tree> *sccstack,
1343 struct pointer_map_t *sccstate,
1344 struct obstack *sccstate_obstack)
1345 {
1346 struct sccs *cstate = NULL;
1347 struct tree_int_map m;
1348 void **slot;
1349
1350 /* If there is a hash value recorded for this type then it can't
1351 possibly be part of our parent SCC. Simply mix in its hash. */
1352 m.base.from = t;
1353 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1354 && *slot)
1355 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1356
1357 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1358 cstate = (struct sccs *)*slot;
1359 if (!cstate)
1360 {
1361 hashval_t tem;
1362 /* Not yet visited. DFS recurse. */
1363 tem = iterative_hash_gimple_type (t, v,
1364 sccstack, sccstate, sccstate_obstack);
1365 if (!cstate)
1366 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1367 state->low = MIN (state->low, cstate->low);
1368 /* If the type is no longer on the SCC stack and thus is not part
1369 of the parents SCC mix in its hash value. Otherwise we will
1370 ignore the type for hashing purposes and return the unaltered
1371 hash value. */
1372 if (!cstate->on_sccstack)
1373 return tem;
1374 }
1375 if (cstate->dfsnum < state->dfsnum
1376 && cstate->on_sccstack)
1377 state->low = MIN (cstate->dfsnum, state->low);
1378
1379 /* We are part of our parents SCC, skip this type during hashing
1380 and return the unaltered hash value. */
1381 return v;
1382 }
1383
1384 /* Hash NAME with the previous hash value V and return it. */
1385
1386 static hashval_t
1387 iterative_hash_name (tree name, hashval_t v)
1388 {
1389 if (!name)
1390 return v;
1391 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1392 if (TREE_CODE (name) == TYPE_DECL)
1393 name = DECL_NAME (name);
1394 if (!name)
1395 return v;
1396 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1397 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1398 }
1399
1400 /* A type, hashvalue pair for sorting SCC members. */
1401
1402 struct type_hash_pair {
1403 tree type;
1404 hashval_t hash;
1405 };
1406
1407 /* Compare two type, hashvalue pairs. */
1408
1409 static int
1410 type_hash_pair_compare (const void *p1_, const void *p2_)
1411 {
1412 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1413 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1414 if (p1->hash < p2->hash)
1415 return -1;
1416 else if (p1->hash > p2->hash)
1417 return 1;
1418 return 0;
1419 }
1420
1421 /* Returning a hash value for gimple type TYPE combined with VAL.
1422 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1423
1424 To hash a type we end up hashing in types that are reachable.
1425 Through pointers we can end up with cycles which messes up the
1426 required property that we need to compute the same hash value
1427 for structurally equivalent types. To avoid this we have to
1428 hash all types in a cycle (the SCC) in a commutative way. The
1429 easiest way is to not mix in the hashes of the SCC members at
1430 all. To make this work we have to delay setting the hash
1431 values of the SCC until it is complete. */
1432
1433 static hashval_t
1434 iterative_hash_gimple_type (tree type, hashval_t val,
1435 vec<tree> *sccstack,
1436 struct pointer_map_t *sccstate,
1437 struct obstack *sccstate_obstack)
1438 {
1439 hashval_t v;
1440 void **slot;
1441 struct sccs *state;
1442
1443 /* Not visited during this DFS walk. */
1444 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1445 state = XOBNEW (sccstate_obstack, struct sccs);
1446 *pointer_map_insert (sccstate, type) = state;
1447
1448 sccstack->safe_push (type);
1449 state->dfsnum = next_dfs_num++;
1450 state->low = state->dfsnum;
1451 state->on_sccstack = true;
1452
1453 /* Combine a few common features of types so that types are grouped into
1454 smaller sets; when searching for existing matching types to merge,
1455 only existing types having the same features as the new type will be
1456 checked. */
1457 v = iterative_hash_name (TYPE_NAME (type), 0);
1458 if (TYPE_NAME (type)
1459 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1460 && DECL_CONTEXT (TYPE_NAME (type))
1461 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1462 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1463 sccstack, sccstate, sccstate_obstack);
1464
1465 /* Factor in the variant structure. */
1466 if (TYPE_MAIN_VARIANT (type) != type)
1467 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1468 sccstack, sccstate, sccstate_obstack);
1469
1470 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1471 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1472 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1473
1474 /* Do not hash the types size as this will cause differences in
1475 hash values for the complete vs. the incomplete type variant. */
1476
1477 /* Incorporate common features of numerical types. */
1478 if (INTEGRAL_TYPE_P (type)
1479 || SCALAR_FLOAT_TYPE_P (type)
1480 || FIXED_POINT_TYPE_P (type))
1481 {
1482 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1483 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1484 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1485 }
1486
1487 /* For pointer and reference types, fold in information about the type
1488 pointed to. */
1489 if (POINTER_TYPE_P (type))
1490 v = visit (TREE_TYPE (type), state, v,
1491 sccstack, sccstate, sccstate_obstack);
1492
1493 /* For integer types hash the types min/max values and the string flag. */
1494 if (TREE_CODE (type) == INTEGER_TYPE)
1495 {
1496 /* OMP lowering can introduce error_mark_node in place of
1497 random local decls in types. */
1498 if (TYPE_MIN_VALUE (type) != error_mark_node)
1499 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1500 if (TYPE_MAX_VALUE (type) != error_mark_node)
1501 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1502 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1503 }
1504
1505 /* For array types hash the domain and the string flag. */
1506 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1507 {
1508 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1509 v = visit (TYPE_DOMAIN (type), state, v,
1510 sccstack, sccstate, sccstate_obstack);
1511 }
1512
1513 /* Recurse for aggregates with a single element type. */
1514 if (TREE_CODE (type) == ARRAY_TYPE
1515 || TREE_CODE (type) == COMPLEX_TYPE
1516 || TREE_CODE (type) == VECTOR_TYPE)
1517 v = visit (TREE_TYPE (type), state, v,
1518 sccstack, sccstate, sccstate_obstack);
1519
1520 /* Incorporate function return and argument types. */
1521 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1522 {
1523 unsigned na;
1524 tree p;
1525
1526 /* For method types also incorporate their parent class. */
1527 if (TREE_CODE (type) == METHOD_TYPE)
1528 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1529 sccstack, sccstate, sccstate_obstack);
1530
1531 /* Check result and argument types. */
1532 v = visit (TREE_TYPE (type), state, v,
1533 sccstack, sccstate, sccstate_obstack);
1534 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1535 {
1536 v = visit (TREE_VALUE (p), state, v,
1537 sccstack, sccstate, sccstate_obstack);
1538 na++;
1539 }
1540
1541 v = iterative_hash_hashval_t (na, v);
1542 }
1543
1544 if (RECORD_OR_UNION_TYPE_P (type))
1545 {
1546 unsigned nf;
1547 tree f;
1548
1549 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1550 {
1551 v = iterative_hash_name (DECL_NAME (f), v);
1552 v = visit (TREE_TYPE (f), state, v,
1553 sccstack, sccstate, sccstate_obstack);
1554 nf++;
1555 }
1556
1557 v = iterative_hash_hashval_t (nf, v);
1558 }
1559
1560 /* Record hash for us. */
1561 state->u.hash = v;
1562
1563 /* See if we found an SCC. */
1564 if (state->low == state->dfsnum)
1565 {
1566 tree x;
1567 struct tree_int_map *m;
1568
1569 /* Pop off the SCC and set its hash values. */
1570 x = sccstack->pop ();
1571 /* Optimize SCC size one. */
1572 if (x == type)
1573 {
1574 state->on_sccstack = false;
1575 m = ggc_alloc_cleared_tree_int_map ();
1576 m->base.from = x;
1577 m->to = v;
1578 slot = htab_find_slot (type_hash_cache, m, INSERT);
1579 gcc_assert (!*slot);
1580 *slot = (void *) m;
1581 }
1582 else
1583 {
1584 struct sccs *cstate;
1585 unsigned first, i, size, j;
1586 struct type_hash_pair *pairs;
1587 /* Pop off the SCC and build an array of type, hash pairs. */
1588 first = sccstack->length () - 1;
1589 while ((*sccstack)[first] != type)
1590 --first;
1591 size = sccstack->length () - first + 1;
1592 pairs = XALLOCAVEC (struct type_hash_pair, size);
1593 i = 0;
1594 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1595 cstate->on_sccstack = false;
1596 pairs[i].type = x;
1597 pairs[i].hash = cstate->u.hash;
1598 do
1599 {
1600 x = sccstack->pop ();
1601 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1602 cstate->on_sccstack = false;
1603 ++i;
1604 pairs[i].type = x;
1605 pairs[i].hash = cstate->u.hash;
1606 }
1607 while (x != type);
1608 gcc_assert (i + 1 == size);
1609 /* Sort the arrays of type, hash pairs so that when we mix in
1610 all members of the SCC the hash value becomes independent on
1611 the order we visited the SCC. Disregard hashes equal to
1612 the hash of the type we mix into because we cannot guarantee
1613 a stable sort for those across different TUs. */
1614 qsort (pairs, size, sizeof (struct type_hash_pair),
1615 type_hash_pair_compare);
1616 for (i = 0; i < size; ++i)
1617 {
1618 hashval_t hash;
1619 m = ggc_alloc_cleared_tree_int_map ();
1620 m->base.from = pairs[i].type;
1621 hash = pairs[i].hash;
1622 /* Skip same hashes. */
1623 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1624 ;
1625 for (; j < size; ++j)
1626 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1627 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1628 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1629 m->to = hash;
1630 if (pairs[i].type == type)
1631 v = hash;
1632 slot = htab_find_slot (type_hash_cache, m, INSERT);
1633 gcc_assert (!*slot);
1634 *slot = (void *) m;
1635 }
1636 }
1637 }
1638
1639 return iterative_hash_hashval_t (v, val);
1640 }
1641
1642 /* Returns a hash value for P (assumed to be a type). The hash value
1643 is computed using some distinguishing features of the type. Note
1644 that we cannot use pointer hashing here as we may be dealing with
1645 two distinct instances of the same type.
1646
1647 This function should produce the same hash value for two compatible
1648 types according to gimple_types_compatible_p. */
1649
1650 static hashval_t
1651 gimple_type_hash (const void *p)
1652 {
1653 const_tree t = (const_tree) p;
1654 vec<tree> sccstack = vNULL;
1655 struct pointer_map_t *sccstate;
1656 struct obstack sccstate_obstack;
1657 hashval_t val;
1658 void **slot;
1659 struct tree_int_map m;
1660
1661 m.base.from = CONST_CAST_TREE (t);
1662 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1663 && *slot)
1664 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1665
1666 /* Perform a DFS walk and pre-hash all reachable types. */
1667 next_dfs_num = 1;
1668 sccstate = pointer_map_create ();
1669 gcc_obstack_init (&sccstate_obstack);
1670 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1671 &sccstack, sccstate, &sccstate_obstack);
1672 sccstack.release ();
1673 pointer_map_destroy (sccstate);
1674 obstack_free (&sccstate_obstack, NULL);
1675
1676 return val;
1677 }
1678
1679 /* Returns nonzero if P1 and P2 are equal. */
1680
1681 static int
1682 gimple_type_eq (const void *p1, const void *p2)
1683 {
1684 const_tree t1 = (const_tree) p1;
1685 const_tree t2 = (const_tree) p2;
1686 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1687 CONST_CAST_TREE (t2));
1688 }
1689
1690 /* Register type T in the global type table gimple_types. */
1691
1692 static tree
1693 gimple_register_type (tree t)
1694 {
1695 void **slot;
1696
1697 /* See if we already have an equivalent type registered. */
1698 slot = htab_find_slot (gimple_types, t, INSERT);
1699 if (*slot
1700 && *(tree *)slot != t)
1701 return (tree) *((tree *) slot);
1702
1703 /* If not, insert it to the cache and the hash. */
1704 *slot = (void *) t;
1705 return t;
1706 }
1707
1708 /* End of old merging code. */
1709
1710 /* Remember trees that contains references to declarations. */
1711 static GTY(()) vec <tree, va_gc> *tree_with_vars;
1712
1713 #define CHECK_VAR(tt) \
1714 do \
1715 { \
1716 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
1717 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
1718 return true; \
1719 } while (0)
1720
1721 #define CHECK_NO_VAR(tt) \
1722 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
1723
1724 /* Check presence of pointers to decls in fields of a tree_typed T. */
1725
1726 static inline bool
1727 mentions_vars_p_typed (tree t)
1728 {
1729 CHECK_NO_VAR (TREE_TYPE (t));
1730 return false;
1731 }
1732
1733 /* Check presence of pointers to decls in fields of a tree_common T. */
1734
1735 static inline bool
1736 mentions_vars_p_common (tree t)
1737 {
1738 if (mentions_vars_p_typed (t))
1739 return true;
1740 CHECK_NO_VAR (TREE_CHAIN (t));
1741 return false;
1742 }
1743
1744 /* Check presence of pointers to decls in fields of a decl_minimal T. */
1745
1746 static inline bool
1747 mentions_vars_p_decl_minimal (tree t)
1748 {
1749 if (mentions_vars_p_common (t))
1750 return true;
1751 CHECK_NO_VAR (DECL_NAME (t));
1752 CHECK_VAR (DECL_CONTEXT (t));
1753 return false;
1754 }
1755
1756 /* Check presence of pointers to decls in fields of a decl_common T. */
1757
1758 static inline bool
1759 mentions_vars_p_decl_common (tree t)
1760 {
1761 if (mentions_vars_p_decl_minimal (t))
1762 return true;
1763 CHECK_VAR (DECL_SIZE (t));
1764 CHECK_VAR (DECL_SIZE_UNIT (t));
1765 CHECK_VAR (DECL_INITIAL (t));
1766 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
1767 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
1768 return false;
1769 }
1770
1771 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
1772
1773 static inline bool
1774 mentions_vars_p_decl_with_vis (tree t)
1775 {
1776 if (mentions_vars_p_decl_common (t))
1777 return true;
1778
1779 /* Accessor macro has side-effects, use field-name here. */
1780 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
1781 CHECK_NO_VAR (DECL_SECTION_NAME (t));
1782 return false;
1783 }
1784
1785 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1786
1787 static inline bool
1788 mentions_vars_p_decl_non_common (tree t)
1789 {
1790 if (mentions_vars_p_decl_with_vis (t))
1791 return true;
1792 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
1793 CHECK_NO_VAR (DECL_RESULT_FLD (t));
1794 CHECK_NO_VAR (DECL_VINDEX (t));
1795 return false;
1796 }
1797
1798 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1799
1800 static bool
1801 mentions_vars_p_function (tree t)
1802 {
1803 if (mentions_vars_p_decl_non_common (t))
1804 return true;
1805 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
1806 return false;
1807 }
1808
1809 /* Check presence of pointers to decls in fields of a field_decl T. */
1810
1811 static bool
1812 mentions_vars_p_field_decl (tree t)
1813 {
1814 if (mentions_vars_p_decl_common (t))
1815 return true;
1816 CHECK_VAR (DECL_FIELD_OFFSET (t));
1817 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
1818 CHECK_NO_VAR (DECL_QUALIFIER (t));
1819 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
1820 CHECK_NO_VAR (DECL_FCONTEXT (t));
1821 return false;
1822 }
1823
1824 /* Check presence of pointers to decls in fields of a type T. */
1825
1826 static bool
1827 mentions_vars_p_type (tree t)
1828 {
1829 if (mentions_vars_p_common (t))
1830 return true;
1831 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
1832 CHECK_VAR (TYPE_SIZE (t));
1833 CHECK_VAR (TYPE_SIZE_UNIT (t));
1834 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
1835 CHECK_NO_VAR (TYPE_NAME (t));
1836
1837 CHECK_VAR (TYPE_MINVAL (t));
1838 CHECK_VAR (TYPE_MAXVAL (t));
1839
1840 /* Accessor is for derived node types only. */
1841 CHECK_NO_VAR (t->type_non_common.binfo);
1842
1843 CHECK_VAR (TYPE_CONTEXT (t));
1844 CHECK_NO_VAR (TYPE_CANONICAL (t));
1845 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
1846 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
1847 return false;
1848 }
1849
1850 /* Check presence of pointers to decls in fields of a BINFO T. */
1851
1852 static bool
1853 mentions_vars_p_binfo (tree t)
1854 {
1855 unsigned HOST_WIDE_INT i, n;
1856
1857 if (mentions_vars_p_common (t))
1858 return true;
1859 CHECK_VAR (BINFO_VTABLE (t));
1860 CHECK_NO_VAR (BINFO_OFFSET (t));
1861 CHECK_NO_VAR (BINFO_VIRTUALS (t));
1862 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
1863 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
1864 for (i = 0; i < n; i++)
1865 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
1866 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1867 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1868 n = BINFO_N_BASE_BINFOS (t);
1869 for (i = 0; i < n; i++)
1870 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
1871 return false;
1872 }
1873
1874 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
1875
1876 static bool
1877 mentions_vars_p_constructor (tree t)
1878 {
1879 unsigned HOST_WIDE_INT idx;
1880 constructor_elt *ce;
1881
1882 if (mentions_vars_p_typed (t))
1883 return true;
1884
1885 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
1886 {
1887 CHECK_NO_VAR (ce->index);
1888 CHECK_VAR (ce->value);
1889 }
1890 return false;
1891 }
1892
1893 /* Check presence of pointers to decls in fields of an expression tree T. */
1894
1895 static bool
1896 mentions_vars_p_expr (tree t)
1897 {
1898 int i;
1899 if (mentions_vars_p_typed (t))
1900 return true;
1901 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1902 CHECK_VAR (TREE_OPERAND (t, i));
1903 return false;
1904 }
1905
1906 /* Check presence of pointers to decls that needs later fixup in T. */
1907
1908 static bool
1909 mentions_vars_p (tree t)
1910 {
1911 switch (TREE_CODE (t))
1912 {
1913 case IDENTIFIER_NODE:
1914 break;
1915
1916 case TREE_LIST:
1917 CHECK_VAR (TREE_VALUE (t));
1918 CHECK_VAR (TREE_PURPOSE (t));
1919 CHECK_NO_VAR (TREE_CHAIN (t));
1920 break;
1921
1922 case FIELD_DECL:
1923 return mentions_vars_p_field_decl (t);
1924 break;
1925
1926 case LABEL_DECL:
1927 case CONST_DECL:
1928 case PARM_DECL:
1929 case RESULT_DECL:
1930 case IMPORTED_DECL:
1931 case NAMESPACE_DECL:
1932 return mentions_vars_p_decl_common (t);
1933 break;
1934
1935 case VAR_DECL:
1936 return mentions_vars_p_decl_with_vis (t);
1937 break;
1938
1939 case TYPE_DECL:
1940 return mentions_vars_p_decl_non_common (t);
1941 break;
1942
1943 case FUNCTION_DECL:
1944 return mentions_vars_p_function (t);
1945 break;
1946
1947 case TREE_BINFO:
1948 return mentions_vars_p_binfo (t);
1949 break;
1950
1951 case PLACEHOLDER_EXPR:
1952 return mentions_vars_p_common (t);
1953 break;
1954
1955 case BLOCK:
1956 case TRANSLATION_UNIT_DECL:
1957 case OPTIMIZATION_NODE:
1958 case TARGET_OPTION_NODE:
1959 break;
1960
1961 case CONSTRUCTOR:
1962 return mentions_vars_p_constructor (t);
1963 break;
1964
1965 default:
1966 if (TYPE_P (t))
1967 {
1968 if (mentions_vars_p_type (t))
1969 return true;
1970 }
1971 else if (EXPR_P (t))
1972 {
1973 if (mentions_vars_p_expr (t))
1974 return true;
1975 }
1976 else if (CONSTANT_CLASS_P (t))
1977 CHECK_NO_VAR (TREE_TYPE (t));
1978 else
1979 gcc_unreachable ();
1980 }
1981 return false;
1982 }
1983
1984
1985 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1986
1987 static enum ld_plugin_symbol_resolution
1988 get_resolution (struct data_in *data_in, unsigned index)
1989 {
1990 if (data_in->globals_resolution.exists ())
1991 {
1992 ld_plugin_symbol_resolution_t ret;
1993 /* We can have references to not emitted functions in
1994 DECL_FUNCTION_PERSONALITY at least. So we can and have
1995 to indeed return LDPR_UNKNOWN in some cases. */
1996 if (data_in->globals_resolution.length () <= index)
1997 return LDPR_UNKNOWN;
1998 ret = data_in->globals_resolution[index];
1999 return ret;
2000 }
2001 else
2002 /* Delay resolution finding until decl merging. */
2003 return LDPR_UNKNOWN;
2004 }
2005
2006 /* We need to record resolutions until symbol table is read. */
2007 static void
2008 register_resolution (struct lto_file_decl_data *file_data, tree decl,
2009 enum ld_plugin_symbol_resolution resolution)
2010 {
2011 if (resolution == LDPR_UNKNOWN)
2012 return;
2013 if (!file_data->resolution_map)
2014 file_data->resolution_map = pointer_map_create ();
2015 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
2016 }
2017
2018 /* Register DECL with the global symbol table and change its
2019 name if necessary to avoid name clashes for static globals across
2020 different files. */
2021
2022 static void
2023 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
2024 unsigned ix)
2025 {
2026 tree context;
2027
2028 /* Variable has file scope, not local. */
2029 if (!TREE_PUBLIC (decl)
2030 && !((context = decl_function_context (decl))
2031 && auto_var_in_fn_p (decl, context)))
2032 rest_of_decl_compilation (decl, 1, 0);
2033
2034 /* If this variable has already been declared, queue the
2035 declaration for merging. */
2036 if (TREE_PUBLIC (decl))
2037 register_resolution (data_in->file_data,
2038 decl, get_resolution (data_in, ix));
2039 }
2040
2041
2042 /* Register DECL with the global symbol table and change its
2043 name if necessary to avoid name clashes for static globals across
2044 different files. DATA_IN contains descriptors and tables for the
2045 file being read. */
2046
2047 static void
2048 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
2049 unsigned ix)
2050 {
2051 /* If this variable has already been declared, queue the
2052 declaration for merging. */
2053 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
2054 register_resolution (data_in->file_data,
2055 decl, get_resolution (data_in, ix));
2056 }
2057
2058
2059 /* For the type T re-materialize it in the type variant list and
2060 the pointer/reference-to chains. */
2061
2062 static void
2063 lto_fixup_prevailing_type (tree t)
2064 {
2065 /* The following re-creates proper variant lists while fixing up
2066 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
2067 variant list state before fixup is broken. */
2068
2069 /* If we are not our own variant leader link us into our new leaders
2070 variant list. */
2071 if (TYPE_MAIN_VARIANT (t) != t)
2072 {
2073 tree mv = TYPE_MAIN_VARIANT (t);
2074 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
2075 TYPE_NEXT_VARIANT (mv) = t;
2076 }
2077
2078 /* The following reconstructs the pointer chains
2079 of the new pointed-to type if we are a main variant. We do
2080 not stream those so they are broken before fixup. */
2081 if (TREE_CODE (t) == POINTER_TYPE
2082 && TYPE_MAIN_VARIANT (t) == t)
2083 {
2084 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
2085 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
2086 }
2087 else if (TREE_CODE (t) == REFERENCE_TYPE
2088 && TYPE_MAIN_VARIANT (t) == t)
2089 {
2090 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
2091 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
2092 }
2093 }
2094
2095
2096 /* We keep prevailing tree SCCs in a hashtable with manual collision
2097 handling (in case all hashes compare the same) and keep the colliding
2098 entries in the tree_scc->next chain. */
2099
2100 struct tree_scc
2101 {
2102 tree_scc *next;
2103 /* Hash of the whole SCC. */
2104 hashval_t hash;
2105 /* Number of trees in the SCC. */
2106 unsigned len;
2107 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
2108 which share the same individual tree hash). */
2109 unsigned entry_len;
2110 /* The members of the SCC.
2111 We only need to remember the first entry node candidate for prevailing
2112 SCCs (but of course have access to all entries for SCCs we are
2113 processing).
2114 ??? For prevailing SCCs we really only need hash and the first
2115 entry candidate, but that's too awkward to implement. */
2116 tree entries[1];
2117 };
2118
2119 struct tree_scc_hasher : typed_noop_remove <tree_scc>
2120 {
2121 typedef tree_scc value_type;
2122 typedef tree_scc compare_type;
2123 static inline hashval_t hash (const value_type *);
2124 static inline bool equal (const value_type *, const compare_type *);
2125 };
2126
2127 hashval_t
2128 tree_scc_hasher::hash (const value_type *scc)
2129 {
2130 return scc->hash;
2131 }
2132
2133 bool
2134 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
2135 {
2136 if (scc1->hash != scc2->hash
2137 || scc1->len != scc2->len
2138 || scc1->entry_len != scc2->entry_len)
2139 return false;
2140 return true;
2141 }
2142
2143 static hash_table <tree_scc_hasher> tree_scc_hash;
2144 static struct obstack tree_scc_hash_obstack;
2145
2146 static unsigned long num_merged_types;
2147 static unsigned long num_prevailing_types;
2148 static unsigned long num_not_merged_types;
2149 static unsigned long num_not_merged_types_in_same_scc;
2150 static unsigned long num_not_merged_types_trees;
2151 static unsigned long num_not_merged_types_in_same_scc_trees;
2152 static unsigned long num_type_scc_trees;
2153 static unsigned long total_scc_size;
2154 static unsigned long num_sccs_read;
2155 static unsigned long total_scc_size_merged;
2156 static unsigned long num_sccs_merged;
2157 static unsigned long num_scc_compares;
2158 static unsigned long num_scc_compare_collisions;
2159
2160
2161 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
2162 recursing through in-SCC tree edges. Returns true if the SCCs entered
2163 through T1 and T2 are equal and fills in *MAP with the pairs of
2164 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
2165
2166 static bool
2167 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
2168 {
2169 enum tree_code code;
2170
2171 /* Mark already visited nodes. */
2172 TREE_ASM_WRITTEN (t2) = 1;
2173
2174 /* Push the pair onto map. */
2175 (*map)[0] = t1;
2176 (*map)[1] = t2;
2177 *map = *map + 2;
2178
2179 /* Compare value-fields. */
2180 #define compare_values(X) \
2181 do { \
2182 if (X(t1) != X(t2)) \
2183 return false; \
2184 } while (0)
2185
2186 compare_values (TREE_CODE);
2187 code = TREE_CODE (t1);
2188
2189 if (!TYPE_P (t1))
2190 {
2191 compare_values (TREE_SIDE_EFFECTS);
2192 compare_values (TREE_CONSTANT);
2193 compare_values (TREE_READONLY);
2194 compare_values (TREE_PUBLIC);
2195 }
2196 compare_values (TREE_ADDRESSABLE);
2197 compare_values (TREE_THIS_VOLATILE);
2198 if (DECL_P (t1))
2199 compare_values (DECL_UNSIGNED);
2200 else if (TYPE_P (t1))
2201 compare_values (TYPE_UNSIGNED);
2202 if (TYPE_P (t1))
2203 compare_values (TYPE_ARTIFICIAL);
2204 else
2205 compare_values (TREE_NO_WARNING);
2206 compare_values (TREE_NOTHROW);
2207 compare_values (TREE_STATIC);
2208 if (code != TREE_BINFO)
2209 compare_values (TREE_PRIVATE);
2210 compare_values (TREE_PROTECTED);
2211 compare_values (TREE_DEPRECATED);
2212 if (TYPE_P (t1))
2213 {
2214 compare_values (TYPE_SATURATING);
2215 compare_values (TYPE_ADDR_SPACE);
2216 }
2217 else if (code == SSA_NAME)
2218 compare_values (SSA_NAME_IS_DEFAULT_DEF);
2219
2220 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
2221 {
2222 compare_values (TREE_INT_CST_LOW);
2223 compare_values (TREE_INT_CST_HIGH);
2224 }
2225
2226 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
2227 {
2228 /* ??? No suitable compare routine available. */
2229 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
2230 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
2231 if (r1.cl != r2.cl
2232 || r1.decimal != r2.decimal
2233 || r1.sign != r2.sign
2234 || r1.signalling != r2.signalling
2235 || r1.canonical != r2.canonical
2236 || r1.uexp != r2.uexp)
2237 return false;
2238 for (unsigned i = 0; i < SIGSZ; ++i)
2239 if (r1.sig[i] != r2.sig[i])
2240 return false;
2241 }
2242
2243 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
2244 if (!fixed_compare (EQ_EXPR,
2245 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
2246 return false;
2247
2248
2249 /* We don't want to compare locations, so there is nothing do compare
2250 for TS_DECL_MINIMAL. */
2251
2252 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2253 {
2254 compare_values (DECL_MODE);
2255 compare_values (DECL_NONLOCAL);
2256 compare_values (DECL_VIRTUAL_P);
2257 compare_values (DECL_IGNORED_P);
2258 compare_values (DECL_ABSTRACT);
2259 compare_values (DECL_ARTIFICIAL);
2260 compare_values (DECL_USER_ALIGN);
2261 compare_values (DECL_PRESERVE_P);
2262 compare_values (DECL_EXTERNAL);
2263 compare_values (DECL_GIMPLE_REG_P);
2264 compare_values (DECL_ALIGN);
2265 if (code == LABEL_DECL)
2266 {
2267 compare_values (EH_LANDING_PAD_NR);
2268 compare_values (LABEL_DECL_UID);
2269 }
2270 else if (code == FIELD_DECL)
2271 {
2272 compare_values (DECL_PACKED);
2273 compare_values (DECL_NONADDRESSABLE_P);
2274 compare_values (DECL_OFFSET_ALIGN);
2275 }
2276 else if (code == VAR_DECL)
2277 {
2278 compare_values (DECL_HAS_DEBUG_EXPR_P);
2279 compare_values (DECL_NONLOCAL_FRAME);
2280 }
2281 if (code == RESULT_DECL
2282 || code == PARM_DECL
2283 || code == VAR_DECL)
2284 {
2285 compare_values (DECL_BY_REFERENCE);
2286 if (code == VAR_DECL
2287 || code == PARM_DECL)
2288 compare_values (DECL_HAS_VALUE_EXPR_P);
2289 }
2290 }
2291
2292 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
2293 compare_values (DECL_REGISTER);
2294
2295 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2296 {
2297 compare_values (DECL_COMMON);
2298 compare_values (DECL_DLLIMPORT_P);
2299 compare_values (DECL_WEAK);
2300 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
2301 compare_values (DECL_COMDAT);
2302 compare_values (DECL_VISIBILITY);
2303 compare_values (DECL_VISIBILITY_SPECIFIED);
2304 if (code == VAR_DECL)
2305 {
2306 compare_values (DECL_HARD_REGISTER);
2307 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
2308 compare_values (DECL_IN_CONSTANT_POOL);
2309 compare_values (DECL_TLS_MODEL);
2310 }
2311 if (VAR_OR_FUNCTION_DECL_P (t1))
2312 compare_values (DECL_INIT_PRIORITY);
2313 }
2314
2315 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2316 {
2317 compare_values (DECL_BUILT_IN_CLASS);
2318 compare_values (DECL_STATIC_CONSTRUCTOR);
2319 compare_values (DECL_STATIC_DESTRUCTOR);
2320 compare_values (DECL_UNINLINABLE);
2321 compare_values (DECL_POSSIBLY_INLINED);
2322 compare_values (DECL_IS_NOVOPS);
2323 compare_values (DECL_IS_RETURNS_TWICE);
2324 compare_values (DECL_IS_MALLOC);
2325 compare_values (DECL_IS_OPERATOR_NEW);
2326 compare_values (DECL_DECLARED_INLINE_P);
2327 compare_values (DECL_STATIC_CHAIN);
2328 compare_values (DECL_NO_INLINE_WARNING_P);
2329 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
2330 compare_values (DECL_NO_LIMIT_STACK);
2331 compare_values (DECL_DISREGARD_INLINE_LIMITS);
2332 compare_values (DECL_PURE_P);
2333 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
2334 compare_values (DECL_FINAL_P);
2335 compare_values (DECL_CXX_CONSTRUCTOR_P);
2336 compare_values (DECL_CXX_DESTRUCTOR_P);
2337 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
2338 compare_values (DECL_FUNCTION_CODE);
2339 if (DECL_STATIC_DESTRUCTOR (t1))
2340 compare_values (DECL_FINI_PRIORITY);
2341 }
2342
2343 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
2344 {
2345 compare_values (TYPE_MODE);
2346 compare_values (TYPE_STRING_FLAG);
2347 compare_values (TYPE_NO_FORCE_BLK);
2348 compare_values (TYPE_NEEDS_CONSTRUCTING);
2349 if (RECORD_OR_UNION_TYPE_P (t1))
2350 {
2351 compare_values (TYPE_TRANSPARENT_AGGR);
2352 compare_values (TYPE_FINAL_P);
2353 }
2354 else if (code == ARRAY_TYPE)
2355 compare_values (TYPE_NONALIASED_COMPONENT);
2356 compare_values (TYPE_PACKED);
2357 compare_values (TYPE_RESTRICT);
2358 compare_values (TYPE_USER_ALIGN);
2359 compare_values (TYPE_READONLY);
2360 compare_values (TYPE_PRECISION);
2361 compare_values (TYPE_ALIGN);
2362 compare_values (TYPE_ALIAS_SET);
2363 }
2364
2365 /* We don't want to compare locations, so there is nothing do compare
2366 for TS_EXP. */
2367
2368 /* BLOCKs are function local and we don't merge anything there, so
2369 simply refuse to merge. */
2370 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
2371 return false;
2372
2373 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
2374 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
2375 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
2376 return false;
2377
2378 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
2379 if (memcmp (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2),
2380 sizeof (struct cl_target_option)) != 0)
2381 return false;
2382
2383 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
2384 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
2385 sizeof (struct cl_optimization)) != 0)
2386 return false;
2387
2388 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
2389 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
2390 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
2391 return false;
2392
2393 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
2394 compare_values (CONSTRUCTOR_NELTS);
2395
2396 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
2397 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
2398 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
2399 IDENTIFIER_LENGTH (t1)) != 0)
2400 return false;
2401
2402 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
2403 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
2404 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2405 TREE_STRING_LENGTH (t1)) != 0)
2406 return false;
2407
2408 #undef compare_values
2409
2410
2411 /* Compare pointer fields. */
2412
2413 /* Recurse. Search & Replaced from DFS_write_tree_body.
2414 Folding the early checks into the compare_tree_edges recursion
2415 macro makes debugging way quicker as you are able to break on
2416 compare_tree_sccs_1 and simply finish until a call returns false
2417 to spot the SCC members with the difference. */
2418 #define compare_tree_edges(E1, E2) \
2419 do { \
2420 tree t1_ = (E1), t2_ = (E2); \
2421 if (t1_ != t2_ \
2422 && (!t1_ || !t2_ \
2423 || !TREE_VISITED (t2_) \
2424 || (!TREE_ASM_WRITTEN (t2_) \
2425 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
2426 return false; \
2427 /* Only non-NULL trees outside of the SCC may compare equal. */ \
2428 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
2429 } while (0)
2430
2431 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
2432 {
2433 if (code != IDENTIFIER_NODE)
2434 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
2435 }
2436
2437 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
2438 {
2439 unsigned i;
2440 /* Note that the number of elements for EXPR has already been emitted
2441 in EXPR's header (see streamer_write_tree_header). */
2442 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
2443 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
2444 }
2445
2446 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
2447 {
2448 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
2449 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
2450 }
2451
2452 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
2453 {
2454 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
2455 /* ??? Global decls from different TUs have non-matching
2456 TRANSLATION_UNIT_DECLs. Only consider a small set of
2457 decls equivalent, we should not end up merging others. */
2458 if ((code == TYPE_DECL
2459 || code == NAMESPACE_DECL
2460 || code == IMPORTED_DECL
2461 || code == CONST_DECL
2462 || (VAR_OR_FUNCTION_DECL_P (t1)
2463 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
2464 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
2465 ;
2466 else
2467 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
2468 }
2469
2470 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2471 {
2472 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
2473 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
2474 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
2475 if ((code == VAR_DECL
2476 || code == PARM_DECL)
2477 && DECL_HAS_VALUE_EXPR_P (t1))
2478 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
2479 if (code == VAR_DECL
2480 && DECL_HAS_DEBUG_EXPR_P (t1))
2481 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
2482 /* LTO specific edges. */
2483 if (code != FUNCTION_DECL
2484 && code != TRANSLATION_UNIT_DECL)
2485 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
2486 }
2487
2488 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2489 {
2490 if (code == FUNCTION_DECL)
2491 {
2492 tree a1, a2;
2493 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
2494 a1 || a2;
2495 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
2496 compare_tree_edges (a1, a2);
2497 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
2498 }
2499 else if (code == TYPE_DECL)
2500 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
2501 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
2502 }
2503
2504 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2505 {
2506 /* Make sure we don't inadvertently set the assembler name. */
2507 if (DECL_ASSEMBLER_NAME_SET_P (t1))
2508 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
2509 DECL_ASSEMBLER_NAME (t2));
2510 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
2511 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
2512 }
2513
2514 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2515 {
2516 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
2517 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
2518 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
2519 DECL_BIT_FIELD_REPRESENTATIVE (t2));
2520 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
2521 DECL_FIELD_BIT_OFFSET (t2));
2522 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
2523 }
2524
2525 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2526 {
2527 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
2528 DECL_FUNCTION_PERSONALITY (t2));
2529 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
2530 DECL_FUNCTION_SPECIFIC_TARGET (t2));
2531 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
2532 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
2533 }
2534
2535 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
2536 {
2537 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
2538 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
2539 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
2540 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
2541 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
2542 reconstructed during fixup. */
2543 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
2544 during fixup. */
2545 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
2546 /* ??? Global types from different TUs have non-matching
2547 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
2548 equal. */
2549 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
2550 ;
2551 else
2552 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
2553 /* TYPE_CANONICAL is re-computed during type merging, so do not
2554 compare it here. */
2555 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
2556 }
2557
2558 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
2559 {
2560 if (code == ENUMERAL_TYPE)
2561 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
2562 else if (code == ARRAY_TYPE)
2563 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
2564 else if (RECORD_OR_UNION_TYPE_P (t1))
2565 {
2566 tree f1, f2;
2567 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
2568 f1 || f2;
2569 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2570 compare_tree_edges (f1, f2);
2571 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
2572 }
2573 else if (code == FUNCTION_TYPE
2574 || code == METHOD_TYPE)
2575 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
2576 if (!POINTER_TYPE_P (t1))
2577 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
2578 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
2579 }
2580
2581 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
2582 {
2583 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2584 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
2585 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
2586 }
2587
2588 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
2589 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
2590 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
2591
2592 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
2593 {
2594 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
2595 compare_tree_edges (TREE_OPERAND (t1, i),
2596 TREE_OPERAND (t2, i));
2597
2598 /* BLOCKs are function local and we don't merge anything there. */
2599 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
2600 return false;
2601 }
2602
2603 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
2604 {
2605 unsigned i;
2606 tree t;
2607 /* Lengths have already been compared above. */
2608 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
2609 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
2610 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
2611 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
2612 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
2613 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
2614 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
2615 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
2616 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
2617 }
2618
2619 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
2620 {
2621 unsigned i;
2622 tree index, value;
2623 /* Lengths have already been compared above. */
2624 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
2625 {
2626 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
2627 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
2628 }
2629 }
2630
2631 #undef compare_tree_edges
2632
2633 return true;
2634 }
2635
2636 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
2637 out MAP if they are equal. */
2638
2639 static bool
2640 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
2641 tree *map)
2642 {
2643 /* Assume SCC entry hashes are sorted after their cardinality. Which
2644 means we can simply take the first n-tuple of equal hashes
2645 (which is recorded as entry_len) and do n SCC entry candidate
2646 comparisons. */
2647 for (unsigned i = 0; i < pscc->entry_len; ++i)
2648 {
2649 tree *mapp = map;
2650 num_scc_compare_collisions++;
2651 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
2652 {
2653 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
2654 on the scc as all trees will be freed. */
2655 return true;
2656 }
2657 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
2658 the SCC prevails. */
2659 for (unsigned j = 0; j < scc->len; ++j)
2660 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
2661 }
2662
2663 return false;
2664 }
2665
2666 /* QSort sort function to sort a map of two pointers after the 2nd
2667 pointer. */
2668
2669 static int
2670 cmp_tree (const void *p1_, const void *p2_)
2671 {
2672 tree *p1 = (tree *)(const_cast<void *>(p1_));
2673 tree *p2 = (tree *)(const_cast<void *>(p2_));
2674 if (p1[1] == p2[1])
2675 return 0;
2676 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
2677 }
2678
2679 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
2680 hash value SCC_HASH with an already recorded SCC. Return true if
2681 that was successful, otherwise return false. */
2682
2683 static bool
2684 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
2685 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
2686 {
2687 bool unified_p = false;
2688 tree_scc *scc
2689 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
2690 scc->next = NULL;
2691 scc->hash = scc_hash;
2692 scc->len = len;
2693 scc->entry_len = scc_entry_len;
2694 for (unsigned i = 0; i < len; ++i)
2695 {
2696 tree t = streamer_tree_cache_get_tree (cache, from + i);
2697 scc->entries[i] = t;
2698 /* Do not merge SCCs with local entities inside them. Also do
2699 not merge TRANSLATION_UNIT_DECLs. */
2700 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
2701 || (VAR_OR_FUNCTION_DECL_P (t)
2702 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2703 || TREE_CODE (t) == LABEL_DECL)
2704 {
2705 /* Avoid doing any work for these cases and do not worry to
2706 record the SCCs for further merging. */
2707 return false;
2708 }
2709 }
2710
2711 /* Look for the list of candidate SCCs to compare against. */
2712 tree_scc **slot;
2713 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
2714 if (*slot)
2715 {
2716 /* Try unifying against each candidate. */
2717 num_scc_compares++;
2718
2719 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
2720 outside of the scc when following tree edges. Make sure
2721 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
2722 to track whether we visited the SCC member during the compare.
2723 We cannot use TREE_VISITED on the pscc members as the extended
2724 scc and pscc can overlap. */
2725 for (unsigned i = 0; i < scc->len; ++i)
2726 {
2727 TREE_VISITED (scc->entries[i]) = 1;
2728 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
2729 }
2730
2731 tree *map = XALLOCAVEC (tree, 2 * len);
2732 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
2733 {
2734 if (!compare_tree_sccs (pscc, scc, map))
2735 continue;
2736
2737 /* Found an equal SCC. */
2738 unified_p = true;
2739 num_scc_compare_collisions--;
2740 num_sccs_merged++;
2741 total_scc_size_merged += len;
2742
2743 #ifdef ENABLE_CHECKING
2744 for (unsigned i = 0; i < len; ++i)
2745 {
2746 tree t = map[2*i+1];
2747 enum tree_code code = TREE_CODE (t);
2748 /* IDENTIFIER_NODEs should be singletons and are merged by the
2749 streamer. The others should be singletons, too, and we
2750 should not merge them in any way. */
2751 gcc_assert (code != TRANSLATION_UNIT_DECL
2752 && code != IDENTIFIER_NODE
2753 && !streamer_handle_as_builtin_p (t));
2754 }
2755 #endif
2756
2757 /* Fixup the streamer cache with the prevailing nodes according
2758 to the tree node mapping computed by compare_tree_sccs. */
2759 if (len == 1)
2760 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
2761 else
2762 {
2763 tree *map2 = XALLOCAVEC (tree, 2 * len);
2764 for (unsigned i = 0; i < len; ++i)
2765 {
2766 map2[i*2] = (tree)(uintptr_t)(from + i);
2767 map2[i*2+1] = scc->entries[i];
2768 }
2769 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
2770 qsort (map, len, 2 * sizeof (tree), cmp_tree);
2771 for (unsigned i = 0; i < len; ++i)
2772 streamer_tree_cache_replace_tree (cache, map[2*i],
2773 (uintptr_t)map2[2*i]);
2774 }
2775
2776 /* Free the tree nodes from the read SCC. */
2777 for (unsigned i = 0; i < len; ++i)
2778 {
2779 if (TYPE_P (scc->entries[i]))
2780 num_merged_types++;
2781 ggc_free (scc->entries[i]);
2782 }
2783
2784 break;
2785 }
2786
2787 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
2788 if (!unified_p)
2789 for (unsigned i = 0; i < scc->len; ++i)
2790 TREE_VISITED (scc->entries[i]) = 0;
2791 }
2792
2793 /* If we didn't unify it to any candidate duplicate the relevant
2794 pieces to permanent storage and link it into the chain. */
2795 if (!unified_p)
2796 {
2797 tree_scc *pscc
2798 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
2799 memcpy (pscc, scc, sizeof (tree_scc));
2800 pscc->next = (*slot);
2801 *slot = pscc;
2802 }
2803 return unified_p;
2804 }
2805
2806
2807 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2808 RESOLUTIONS is the set of symbols picked by the linker (read from the
2809 resolution file when the linker plugin is being used). */
2810
2811 static void
2812 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2813 vec<ld_plugin_symbol_resolution_t> resolutions)
2814 {
2815 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2816 const int decl_offset = sizeof (struct lto_decl_header);
2817 const int main_offset = decl_offset + header->decl_state_size;
2818 const int string_offset = main_offset + header->main_size;
2819 struct lto_input_block ib_main;
2820 struct data_in *data_in;
2821 unsigned int i;
2822 const uint32_t *data_ptr, *data_end;
2823 uint32_t num_decl_states;
2824
2825 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2826 header->main_size);
2827
2828 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2829 header->string_size, resolutions);
2830
2831 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2832 internal types that should not be merged. */
2833
2834 /* Read the global declarations and types. */
2835 while (ib_main.p < ib_main.len)
2836 {
2837 tree t;
2838 unsigned from = data_in->reader_cache->nodes.length ();
2839 /* Read and uniquify SCCs as in the input stream. */
2840 enum LTO_tags tag = streamer_read_record_start (&ib_main);
2841 if (tag == LTO_tree_scc)
2842 {
2843 unsigned len_;
2844 unsigned scc_entry_len;
2845 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
2846 &scc_entry_len);
2847 unsigned len = data_in->reader_cache->nodes.length () - from;
2848 gcc_assert (len == len_);
2849
2850 total_scc_size += len;
2851 num_sccs_read++;
2852
2853 /* We have the special case of size-1 SCCs that are pre-merged
2854 by means of identifier and string sharing for example.
2855 ??? Maybe we should avoid streaming those as SCCs. */
2856 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
2857 from);
2858 if (len == 1
2859 && (TREE_CODE (first) == IDENTIFIER_NODE
2860 || TREE_CODE (first) == INTEGER_CST
2861 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
2862 || streamer_handle_as_builtin_p (first)))
2863 continue;
2864
2865 /* Try to unify the SCC with already existing ones. */
2866 if (!flag_ltrans
2867 && unify_scc (data_in->reader_cache, from,
2868 len, scc_entry_len, scc_hash))
2869 continue;
2870
2871 /* Do remaining fixup tasks for prevailing nodes. */
2872 bool seen_type = false;
2873 bool not_merged_type_same_scc = false;
2874 bool not_merged_type_not_same_scc = false;
2875 for (unsigned i = 0; i < len; ++i)
2876 {
2877 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
2878 from + i);
2879 /* For statistics, see if the old code would have merged
2880 the type. */
2881 if (TYPE_P (t)
2882 && (flag_lto_report || (flag_wpa && flag_lto_report_wpa)))
2883 {
2884 tree newt = gimple_register_type (t);
2885 if (newt != t)
2886 {
2887 num_not_merged_types++;
2888 unsigned j;
2889 /* Check if we can never merge the types because
2890 they are in the same SCC and thus the old
2891 code was broken. */
2892 for (j = 0; j < len; ++j)
2893 if (i != j
2894 && streamer_tree_cache_get_tree
2895 (data_in->reader_cache, from + j) == newt)
2896 {
2897 num_not_merged_types_in_same_scc++;
2898 not_merged_type_same_scc = true;
2899 break;
2900 }
2901 if (j == len)
2902 not_merged_type_not_same_scc = true;
2903 }
2904 }
2905 /* Reconstruct the type variant and pointer-to/reference-to
2906 chains. */
2907 if (TYPE_P (t))
2908 {
2909 seen_type = true;
2910 num_prevailing_types++;
2911 lto_fixup_prevailing_type (t);
2912 }
2913 /* Compute the canonical type of all types.
2914 ??? Should be able to assert that !TYPE_CANONICAL. */
2915 if (TYPE_P (t) && !TYPE_CANONICAL (t))
2916 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2917 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
2918 type which is also member of this SCC. */
2919 if (TREE_CODE (t) == INTEGER_CST
2920 && !TREE_OVERFLOW (t))
2921 cache_integer_cst (t);
2922 /* Register TYPE_DECLs with the debuginfo machinery. */
2923 if (!flag_wpa
2924 && TREE_CODE (t) == TYPE_DECL)
2925 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2926 if (!flag_ltrans)
2927 {
2928 /* Register variables and functions with the
2929 symbol table. */
2930 if (TREE_CODE (t) == VAR_DECL)
2931 lto_register_var_decl_in_symtab (data_in, t, from + i);
2932 else if (TREE_CODE (t) == FUNCTION_DECL
2933 && !DECL_BUILT_IN (t))
2934 lto_register_function_decl_in_symtab (data_in, t, from + i);
2935 /* Scan the tree for references to global functions or
2936 variables and record those for later fixup. */
2937 if (mentions_vars_p (t))
2938 vec_safe_push (tree_with_vars, t);
2939 }
2940 }
2941 if (not_merged_type_same_scc)
2942 {
2943 num_not_merged_types_in_same_scc_trees += len;
2944 num_not_merged_types_trees += len;
2945 }
2946 else if (not_merged_type_not_same_scc)
2947 num_not_merged_types_trees += len;
2948 if (seen_type)
2949 num_type_scc_trees += len;
2950 }
2951 else
2952 {
2953 /* Pickle stray references. */
2954 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
2955 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
2956 }
2957 }
2958
2959 /* Read in lto_in_decl_state objects. */
2960 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2961 data_end =
2962 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2963 num_decl_states = *data_ptr++;
2964
2965 gcc_assert (num_decl_states > 0);
2966 decl_data->global_decl_state = lto_new_in_decl_state ();
2967 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2968 decl_data->global_decl_state);
2969
2970 /* Read in per-function decl states and enter them in hash table. */
2971 decl_data->function_decl_states =
2972 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2973
2974 for (i = 1; i < num_decl_states; i++)
2975 {
2976 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2977 void **slot;
2978
2979 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2980 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2981 gcc_assert (*slot == NULL);
2982 *slot = state;
2983 }
2984
2985 if (data_ptr != data_end)
2986 internal_error ("bytecode stream: garbage at the end of symbols section");
2987
2988 /* Set the current decl state to be the global state. */
2989 decl_data->current_decl_state = decl_data->global_decl_state;
2990
2991 lto_data_in_delete (data_in);
2992 }
2993
2994 /* Custom version of strtoll, which is not portable. */
2995
2996 static HOST_WIDEST_INT
2997 lto_parse_hex (const char *p)
2998 {
2999 HOST_WIDEST_INT ret = 0;
3000
3001 for (; *p != '\0'; ++p)
3002 {
3003 char c = *p;
3004 unsigned char part;
3005 ret <<= 4;
3006 if (c >= '0' && c <= '9')
3007 part = c - '0';
3008 else if (c >= 'a' && c <= 'f')
3009 part = c - 'a' + 10;
3010 else if (c >= 'A' && c <= 'F')
3011 part = c - 'A' + 10;
3012 else
3013 internal_error ("could not parse hex number");
3014 ret |= part;
3015 }
3016
3017 return ret;
3018 }
3019
3020 /* Read resolution for file named FILE_NAME. The resolution is read from
3021 RESOLUTION. */
3022
3023 static void
3024 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
3025 {
3026 /* We require that objects in the resolution file are in the same
3027 order as the lto1 command line. */
3028 unsigned int name_len;
3029 char *obj_name;
3030 unsigned int num_symbols;
3031 unsigned int i;
3032 struct lto_file_decl_data *file_data;
3033 splay_tree_node nd = NULL;
3034
3035 if (!resolution)
3036 return;
3037
3038 name_len = strlen (file->filename);
3039 obj_name = XNEWVEC (char, name_len + 1);
3040 fscanf (resolution, " "); /* Read white space. */
3041
3042 fread (obj_name, sizeof (char), name_len, resolution);
3043 obj_name[name_len] = '\0';
3044 if (filename_cmp (obj_name, file->filename) != 0)
3045 internal_error ("unexpected file name %s in linker resolution file. "
3046 "Expected %s", obj_name, file->filename);
3047 if (file->offset != 0)
3048 {
3049 int t;
3050 char offset_p[17];
3051 HOST_WIDEST_INT offset;
3052 t = fscanf (resolution, "@0x%16s", offset_p);
3053 if (t != 1)
3054 internal_error ("could not parse file offset");
3055 offset = lto_parse_hex (offset_p);
3056 if (offset != file->offset)
3057 internal_error ("unexpected offset");
3058 }
3059
3060 free (obj_name);
3061
3062 fscanf (resolution, "%u", &num_symbols);
3063
3064 for (i = 0; i < num_symbols; i++)
3065 {
3066 int t;
3067 unsigned index;
3068 unsigned HOST_WIDE_INT id;
3069 char r_str[27];
3070 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
3071 unsigned int j;
3072 unsigned int lto_resolution_str_len =
3073 sizeof (lto_resolution_str) / sizeof (char *);
3074 res_pair rp;
3075
3076 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
3077 &index, &id, r_str);
3078 if (t != 3)
3079 internal_error ("invalid line in the resolution file");
3080
3081 for (j = 0; j < lto_resolution_str_len; j++)
3082 {
3083 if (strcmp (lto_resolution_str[j], r_str) == 0)
3084 {
3085 r = (enum ld_plugin_symbol_resolution) j;
3086 break;
3087 }
3088 }
3089 if (j == lto_resolution_str_len)
3090 internal_error ("invalid resolution in the resolution file");
3091
3092 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
3093 {
3094 nd = lto_splay_tree_lookup (file_ids, id);
3095 if (nd == NULL)
3096 internal_error ("resolution sub id %wx not in object file", id);
3097 }
3098
3099 file_data = (struct lto_file_decl_data *)nd->value;
3100 /* The indexes are very sparse. To save memory save them in a compact
3101 format that is only unpacked later when the subfile is processed. */
3102 rp.res = r;
3103 rp.index = index;
3104 file_data->respairs.safe_push (rp);
3105 if (file_data->max_index < index)
3106 file_data->max_index = index;
3107 }
3108 }
3109
3110 /* List of file_decl_datas */
3111 struct file_data_list
3112 {
3113 struct lto_file_decl_data *first, *last;
3114 };
3115
3116 /* Is the name for a id'ed LTO section? */
3117
3118 static int
3119 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
3120 {
3121 const char *s;
3122
3123 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
3124 return 0;
3125 s = strrchr (name, '.');
3126 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
3127 }
3128
3129 /* Create file_data of each sub file id */
3130
3131 static int
3132 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
3133 struct file_data_list *list)
3134 {
3135 struct lto_section_slot s_slot, *new_slot;
3136 unsigned HOST_WIDE_INT id;
3137 splay_tree_node nd;
3138 void **hash_slot;
3139 char *new_name;
3140 struct lto_file_decl_data *file_data;
3141
3142 if (!lto_section_with_id (ls->name, &id))
3143 return 1;
3144
3145 /* Find hash table of sub module id */
3146 nd = lto_splay_tree_lookup (file_ids, id);
3147 if (nd != NULL)
3148 {
3149 file_data = (struct lto_file_decl_data *)nd->value;
3150 }
3151 else
3152 {
3153 file_data = ggc_alloc_lto_file_decl_data ();
3154 memset(file_data, 0, sizeof (struct lto_file_decl_data));
3155 file_data->id = id;
3156 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
3157 lto_splay_tree_insert (file_ids, id, file_data);
3158
3159 /* Maintain list in linker order */
3160 if (!list->first)
3161 list->first = file_data;
3162 if (list->last)
3163 list->last->next = file_data;
3164 list->last = file_data;
3165 }
3166
3167 /* Copy section into sub module hash table */
3168 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
3169 s_slot.name = new_name;
3170 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
3171 gcc_assert (*hash_slot == NULL);
3172
3173 new_slot = XDUP (struct lto_section_slot, ls);
3174 new_slot->name = new_name;
3175 *hash_slot = new_slot;
3176 return 1;
3177 }
3178
3179 /* Read declarations and other initializations for a FILE_DATA. */
3180
3181 static void
3182 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
3183 {
3184 const char *data;
3185 size_t len;
3186 vec<ld_plugin_symbol_resolution_t>
3187 resolutions = vNULL;
3188 int i;
3189 res_pair *rp;
3190
3191 /* Create vector for fast access of resolution. We do this lazily
3192 to save memory. */
3193 resolutions.safe_grow_cleared (file_data->max_index + 1);
3194 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
3195 resolutions[rp->index] = rp->res;
3196 file_data->respairs.release ();
3197
3198 file_data->renaming_hash_table = lto_create_renaming_table ();
3199 file_data->file_name = file->filename;
3200 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
3201 if (data == NULL)
3202 {
3203 internal_error ("cannot read LTO decls from %s", file_data->file_name);
3204 return;
3205 }
3206 /* Frees resolutions */
3207 lto_read_decls (file_data, data, resolutions);
3208 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
3209 }
3210
3211 /* Finalize FILE_DATA in FILE and increase COUNT. */
3212
3213 static int
3214 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
3215 int *count)
3216 {
3217 lto_file_finalize (file_data, file);
3218 if (cgraph_dump_file)
3219 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
3220 file_data->file_name, file_data->id);
3221 (*count)++;
3222 return 0;
3223 }
3224
3225 /* Generate a TREE representation for all types and external decls
3226 entities in FILE.
3227
3228 Read all of the globals out of the file. Then read the cgraph
3229 and process the .o index into the cgraph nodes so that it can open
3230 the .o file to load the functions and ipa information. */
3231
3232 static struct lto_file_decl_data *
3233 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
3234 {
3235 struct lto_file_decl_data *file_data = NULL;
3236 splay_tree file_ids;
3237 htab_t section_hash_table;
3238 struct lto_section_slot *section;
3239 struct file_data_list file_list;
3240 struct lto_section_list section_list;
3241
3242 memset (&section_list, 0, sizeof (struct lto_section_list));
3243 section_hash_table = lto_obj_build_section_table (file, &section_list);
3244
3245 /* Find all sub modules in the object and put their sections into new hash
3246 tables in a splay tree. */
3247 file_ids = lto_splay_tree_new ();
3248 memset (&file_list, 0, sizeof (struct file_data_list));
3249 for (section = section_list.first; section != NULL; section = section->next)
3250 create_subid_section_table (section, file_ids, &file_list);
3251
3252 /* Add resolutions to file ids */
3253 lto_resolution_read (file_ids, resolution_file, file);
3254
3255 /* Finalize each lto file for each submodule in the merged object */
3256 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
3257 lto_create_files_from_ids (file, file_data, count);
3258
3259 splay_tree_delete (file_ids);
3260 htab_delete (section_hash_table);
3261
3262 return file_list.first;
3263 }
3264
3265 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
3266 #define LTO_MMAP_IO 1
3267 #endif
3268
3269 #if LTO_MMAP_IO
3270 /* Page size of machine is used for mmap and munmap calls. */
3271 static size_t page_mask;
3272 #endif
3273
3274 /* Get the section data of length LEN from FILENAME starting at
3275 OFFSET. The data segment must be freed by the caller when the
3276 caller is finished. Returns NULL if all was not well. */
3277
3278 static char *
3279 lto_read_section_data (struct lto_file_decl_data *file_data,
3280 intptr_t offset, size_t len)
3281 {
3282 char *result;
3283 static int fd = -1;
3284 static char *fd_name;
3285 #if LTO_MMAP_IO
3286 intptr_t computed_len;
3287 intptr_t computed_offset;
3288 intptr_t diff;
3289 #endif
3290
3291 /* Keep a single-entry file-descriptor cache. The last file we
3292 touched will get closed at exit.
3293 ??? Eventually we want to add a more sophisticated larger cache
3294 or rather fix function body streaming to not stream them in
3295 practically random order. */
3296 if (fd != -1
3297 && filename_cmp (fd_name, file_data->file_name) != 0)
3298 {
3299 free (fd_name);
3300 close (fd);
3301 fd = -1;
3302 }
3303 if (fd == -1)
3304 {
3305 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
3306 if (fd == -1)
3307 {
3308 fatal_error ("Cannot open %s", file_data->file_name);
3309 return NULL;
3310 }
3311 fd_name = xstrdup (file_data->file_name);
3312 }
3313
3314 #if LTO_MMAP_IO
3315 if (!page_mask)
3316 {
3317 size_t page_size = sysconf (_SC_PAGE_SIZE);
3318 page_mask = ~(page_size - 1);
3319 }
3320
3321 computed_offset = offset & page_mask;
3322 diff = offset - computed_offset;
3323 computed_len = len + diff;
3324
3325 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
3326 fd, computed_offset);
3327 if (result == MAP_FAILED)
3328 {
3329 fatal_error ("Cannot map %s", file_data->file_name);
3330 return NULL;
3331 }
3332
3333 return result + diff;
3334 #else
3335 result = (char *) xmalloc (len);
3336 if (lseek (fd, offset, SEEK_SET) != offset
3337 || read (fd, result, len) != (ssize_t) len)
3338 {
3339 free (result);
3340 fatal_error ("Cannot read %s", file_data->file_name);
3341 result = NULL;
3342 }
3343 #ifdef __MINGW32__
3344 /* Native windows doesn't supports delayed unlink on opened file. So
3345 we close file here again. This produces higher I/O load, but at least
3346 it prevents to have dangling file handles preventing unlink. */
3347 free (fd_name);
3348 fd_name = NULL;
3349 close (fd);
3350 fd = -1;
3351 #endif
3352 return result;
3353 #endif
3354 }
3355
3356
3357 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
3358 NAME will be NULL unless the section type is for a function
3359 body. */
3360
3361 static const char *
3362 get_section_data (struct lto_file_decl_data *file_data,
3363 enum lto_section_type section_type,
3364 const char *name,
3365 size_t *len)
3366 {
3367 htab_t section_hash_table = file_data->section_hash_table;
3368 struct lto_section_slot *f_slot;
3369 struct lto_section_slot s_slot;
3370 const char *section_name = lto_get_section_name (section_type, name, file_data);
3371 char *data = NULL;
3372
3373 *len = 0;
3374 s_slot.name = section_name;
3375 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
3376 if (f_slot)
3377 {
3378 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
3379 *len = f_slot->len;
3380 }
3381
3382 free (CONST_CAST (char *, section_name));
3383 return data;
3384 }
3385
3386
3387 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
3388 starts at OFFSET and has LEN bytes. */
3389
3390 static void
3391 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
3392 enum lto_section_type section_type ATTRIBUTE_UNUSED,
3393 const char *name ATTRIBUTE_UNUSED,
3394 const char *offset, size_t len ATTRIBUTE_UNUSED)
3395 {
3396 #if LTO_MMAP_IO
3397 intptr_t computed_len;
3398 intptr_t computed_offset;
3399 intptr_t diff;
3400 #endif
3401
3402 #if LTO_MMAP_IO
3403 computed_offset = ((intptr_t) offset) & page_mask;
3404 diff = (intptr_t) offset - computed_offset;
3405 computed_len = len + diff;
3406
3407 munmap ((caddr_t) computed_offset, computed_len);
3408 #else
3409 free (CONST_CAST(char *, offset));
3410 #endif
3411 }
3412
3413 static lto_file *current_lto_file;
3414
3415 /* Helper for qsort; compare partitions and return one with smaller size.
3416 We sort from greatest to smallest so parallel build doesn't stale on the
3417 longest compilation being executed too late. */
3418
3419 static int
3420 cmp_partitions_size (const void *a, const void *b)
3421 {
3422 const struct ltrans_partition_def *pa
3423 = *(struct ltrans_partition_def *const *)a;
3424 const struct ltrans_partition_def *pb
3425 = *(struct ltrans_partition_def *const *)b;
3426 return pb->insns - pa->insns;
3427 }
3428
3429 /* Helper for qsort; compare partitions and return one with smaller order. */
3430
3431 static int
3432 cmp_partitions_order (const void *a, const void *b)
3433 {
3434 const struct ltrans_partition_def *pa
3435 = *(struct ltrans_partition_def *const *)a;
3436 const struct ltrans_partition_def *pb
3437 = *(struct ltrans_partition_def *const *)b;
3438 int ordera = -1, orderb = -1;
3439
3440 if (lto_symtab_encoder_size (pa->encoder))
3441 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
3442 if (lto_symtab_encoder_size (pb->encoder))
3443 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
3444 return orderb - ordera;
3445 }
3446
3447 /* Write all output files in WPA mode and the file with the list of
3448 LTRANS units. */
3449
3450 static void
3451 lto_wpa_write_files (void)
3452 {
3453 unsigned i, n_sets;
3454 lto_file *file;
3455 ltrans_partition part;
3456 FILE *ltrans_output_list_stream;
3457 char *temp_filename;
3458 size_t blen;
3459
3460 /* Open the LTRANS output list. */
3461 if (!ltrans_output_list)
3462 fatal_error ("no LTRANS output list filename provided");
3463 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
3464 if (ltrans_output_list_stream == NULL)
3465 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
3466
3467 timevar_push (TV_WHOPR_WPA);
3468
3469 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
3470 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
3471
3472 /* Find out statics that need to be promoted
3473 to globals with hidden visibility because they are accessed from multiple
3474 partitions. */
3475 lto_promote_cross_file_statics ();
3476
3477 timevar_pop (TV_WHOPR_WPA);
3478
3479 timevar_push (TV_WHOPR_WPA_IO);
3480
3481 /* Generate a prefix for the LTRANS unit files. */
3482 blen = strlen (ltrans_output_list);
3483 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
3484 strcpy (temp_filename, ltrans_output_list);
3485 if (blen > sizeof (".out")
3486 && strcmp (temp_filename + blen - sizeof (".out") + 1,
3487 ".out") == 0)
3488 temp_filename[blen - sizeof (".out") + 1] = '\0';
3489 blen = strlen (temp_filename);
3490
3491 n_sets = ltrans_partitions.length ();
3492
3493 /* Sort partitions by size so small ones are compiled last.
3494 FIXME: Even when not reordering we may want to output one list for parallel make
3495 and other for final link command. */
3496 ltrans_partitions.qsort (flag_toplevel_reorder
3497 ? cmp_partitions_size
3498 : cmp_partitions_order);
3499 for (i = 0; i < n_sets; i++)
3500 {
3501 size_t len;
3502 ltrans_partition part = ltrans_partitions[i];
3503
3504 /* Write all the nodes in SET. */
3505 sprintf (temp_filename + blen, "%u.o", i);
3506 file = lto_obj_file_open (temp_filename, true);
3507 if (!file)
3508 fatal_error ("lto_obj_file_open() failed");
3509
3510 if (!quiet_flag)
3511 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
3512 if (cgraph_dump_file)
3513 {
3514 lto_symtab_encoder_iterator lsei;
3515
3516 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
3517 part->name, temp_filename, part->insns);
3518 fprintf (cgraph_dump_file, " Symbols in partition: ");
3519 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
3520 lsei_next_in_partition (&lsei))
3521 {
3522 symtab_node node = lsei_node (lsei);
3523 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3524 }
3525 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
3526 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
3527 lsei_next (&lsei))
3528 {
3529 symtab_node node = lsei_node (lsei);
3530 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
3531 {
3532 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3533 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
3534 if (cnode
3535 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
3536 fprintf (cgraph_dump_file, "(body included)");
3537 else
3538 {
3539 varpool_node *vnode = dyn_cast <varpool_node> (node);
3540 if (vnode
3541 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
3542 fprintf (cgraph_dump_file, "(initializer included)");
3543 }
3544 }
3545 }
3546 fprintf (cgraph_dump_file, "\n");
3547 }
3548 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
3549
3550 lto_set_current_out_file (file);
3551
3552 ipa_write_optimization_summaries (part->encoder);
3553
3554 lto_set_current_out_file (NULL);
3555 lto_obj_file_close (file);
3556 free (file);
3557 part->encoder = NULL;
3558
3559 len = strlen (temp_filename);
3560 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
3561 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
3562 fatal_error ("writing to LTRANS output list %s: %m",
3563 ltrans_output_list);
3564 }
3565
3566 lto_stats.num_output_files += n_sets;
3567
3568 /* Close the LTRANS output list. */
3569 if (fclose (ltrans_output_list_stream))
3570 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
3571
3572 free_ltrans_partitions();
3573 free (temp_filename);
3574
3575 timevar_pop (TV_WHOPR_WPA_IO);
3576 }
3577
3578
3579 /* If TT is a variable or function decl replace it with its
3580 prevailing variant. */
3581 #define LTO_SET_PREVAIL(tt) \
3582 do {\
3583 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
3584 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
3585 { \
3586 tt = lto_symtab_prevailing_decl (tt); \
3587 fixed = true; \
3588 } \
3589 } while (0)
3590
3591 /* Ensure that TT isn't a replacable var of function decl. */
3592 #define LTO_NO_PREVAIL(tt) \
3593 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
3594
3595 /* Given a tree T replace all fields referring to variables or functions
3596 with their prevailing variant. */
3597 static void
3598 lto_fixup_prevailing_decls (tree t)
3599 {
3600 enum tree_code code = TREE_CODE (t);
3601 bool fixed = false;
3602
3603 gcc_checking_assert (code != CONSTRUCTOR && code != TREE_BINFO);
3604 LTO_NO_PREVAIL (TREE_TYPE (t));
3605 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
3606 LTO_NO_PREVAIL (TREE_CHAIN (t));
3607 if (DECL_P (t))
3608 {
3609 LTO_NO_PREVAIL (DECL_NAME (t));
3610 LTO_SET_PREVAIL (DECL_CONTEXT (t));
3611 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
3612 {
3613 LTO_SET_PREVAIL (DECL_SIZE (t));
3614 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
3615 LTO_SET_PREVAIL (DECL_INITIAL (t));
3616 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
3617 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
3618 }
3619 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
3620 {
3621 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
3622 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
3623 }
3624 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
3625 {
3626 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
3627 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
3628 LTO_NO_PREVAIL (DECL_VINDEX (t));
3629 }
3630 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
3631 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
3632 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
3633 {
3634 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
3635 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
3636 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
3637 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
3638 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
3639 }
3640 }
3641 else if (TYPE_P (t))
3642 {
3643 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
3644 LTO_SET_PREVAIL (TYPE_SIZE (t));
3645 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
3646 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
3647 LTO_NO_PREVAIL (TYPE_NAME (t));
3648
3649 LTO_SET_PREVAIL (TYPE_MINVAL (t));
3650 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
3651 LTO_NO_PREVAIL (t->type_non_common.binfo);
3652
3653 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
3654
3655 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
3656 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
3657 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
3658 }
3659 else if (EXPR_P (t))
3660 {
3661 int i;
3662 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3663 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
3664 }
3665 else
3666 {
3667 switch (code)
3668 {
3669 case TREE_LIST:
3670 LTO_SET_PREVAIL (TREE_VALUE (t));
3671 LTO_SET_PREVAIL (TREE_PURPOSE (t));
3672 LTO_NO_PREVAIL (TREE_PURPOSE (t));
3673 break;
3674 default:
3675 gcc_unreachable ();
3676 }
3677 }
3678 /* If we fixed nothing, then we missed something seen by
3679 mentions_vars_p. */
3680 gcc_checking_assert (fixed);
3681 }
3682 #undef LTO_SET_PREVAIL
3683 #undef LTO_NO_PREVAIL
3684
3685 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
3686 replaces var and function decls with the corresponding prevailing def. */
3687
3688 static void
3689 lto_fixup_state (struct lto_in_decl_state *state)
3690 {
3691 unsigned i, si;
3692 struct lto_tree_ref_table *table;
3693
3694 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
3695 we still need to walk from all DECLs to find the reachable
3696 FUNCTION_DECLs and VAR_DECLs. */
3697 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
3698 {
3699 table = &state->streams[si];
3700 for (i = 0; i < table->size; i++)
3701 {
3702 tree *tp = table->trees + i;
3703 if (VAR_OR_FUNCTION_DECL_P (*tp)
3704 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
3705 *tp = lto_symtab_prevailing_decl (*tp);
3706 }
3707 }
3708 }
3709
3710 /* A callback of htab_traverse. Just extracts a state from SLOT
3711 and calls lto_fixup_state. */
3712
3713 static int
3714 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
3715 {
3716 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
3717 lto_fixup_state (state);
3718 return 1;
3719 }
3720
3721 /* Fix the decls from all FILES. Replaces each decl with the corresponding
3722 prevailing one. */
3723
3724 static void
3725 lto_fixup_decls (struct lto_file_decl_data **files)
3726 {
3727 unsigned int i;
3728 tree t;
3729
3730 if (tree_with_vars)
3731 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
3732 lto_fixup_prevailing_decls (t);
3733
3734 for (i = 0; files[i]; i++)
3735 {
3736 struct lto_file_decl_data *file = files[i];
3737 struct lto_in_decl_state *state = file->global_decl_state;
3738 lto_fixup_state (state);
3739
3740 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
3741 }
3742 }
3743
3744 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
3745
3746 /* Turn file datas for sub files into a single array, so that they look
3747 like separate files for further passes. */
3748
3749 static void
3750 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
3751 {
3752 struct lto_file_decl_data *n, *next;
3753 int i, k;
3754
3755 lto_stats.num_input_files = count;
3756 all_file_decl_data
3757 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
3758 /* Set the hooks so that all of the ipa passes can read in their data. */
3759 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3760 for (i = 0, k = 0; i < last_file_ix; i++)
3761 {
3762 for (n = orig[i]; n != NULL; n = next)
3763 {
3764 all_file_decl_data[k++] = n;
3765 next = n->next;
3766 n->next = NULL;
3767 }
3768 }
3769 all_file_decl_data[k] = NULL;
3770 gcc_assert (k == count);
3771 }
3772
3773 /* Input file data before flattening (i.e. splitting them to subfiles to support
3774 incremental linking. */
3775 static int real_file_count;
3776 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
3777
3778 static void print_lto_report_1 (void);
3779
3780 /* Read all the symbols from the input files FNAMES. NFILES is the
3781 number of files requested in the command line. Instantiate a
3782 global call graph by aggregating all the sub-graphs found in each
3783 file. */
3784
3785 static void
3786 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
3787 {
3788 unsigned int i, last_file_ix;
3789 FILE *resolution;
3790 int count = 0;
3791 struct lto_file_decl_data **decl_data;
3792 void **res;
3793 symtab_node snode;
3794
3795 init_cgraph ();
3796
3797 timevar_push (TV_IPA_LTO_DECL_IN);
3798
3799 real_file_decl_data
3800 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
3801 real_file_count = nfiles;
3802
3803 /* Read the resolution file. */
3804 resolution = NULL;
3805 if (resolution_file_name)
3806 {
3807 int t;
3808 unsigned num_objects;
3809
3810 resolution = fopen (resolution_file_name, "r");
3811 if (resolution == NULL)
3812 fatal_error ("could not open symbol resolution file: %m");
3813
3814 t = fscanf (resolution, "%u", &num_objects);
3815 gcc_assert (t == 1);
3816
3817 /* True, since the plugin splits the archives. */
3818 gcc_assert (num_objects == nfiles);
3819 }
3820 cgraph_state = CGRAPH_LTO_STREAMING;
3821
3822 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3823 tree_int_map_eq, NULL);
3824 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
3825 gimple_canonical_type_eq, 0);
3826 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3827 tree_int_map_eq, NULL);
3828 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3829 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
3830 gcc_obstack_init (&tree_scc_hash_obstack);
3831 tree_scc_hash.create (4096);
3832
3833 /* Register the common node types with the canonical type machinery so
3834 we properly share alias-sets across languages and TUs. Do not
3835 expose the common nodes as type merge target - those that should be
3836 are already exposed so by pre-loading the LTO streamer caches. */
3837 for (i = 0; i < itk_none; ++i)
3838 lto_register_canonical_types (integer_types[i]);
3839 /* The sizetypes are not used to access data so we do not need to
3840 do anything about them. */
3841 for (i = 0; i < TI_MAX; ++i)
3842 lto_register_canonical_types (global_trees[i]);
3843
3844 if (!quiet_flag)
3845 fprintf (stderr, "Reading object files:");
3846
3847 /* Read all of the object files specified on the command line. */
3848 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
3849 {
3850 struct lto_file_decl_data *file_data = NULL;
3851 if (!quiet_flag)
3852 {
3853 fprintf (stderr, " %s", fnames[i]);
3854 fflush (stderr);
3855 }
3856
3857 current_lto_file = lto_obj_file_open (fnames[i], false);
3858 if (!current_lto_file)
3859 break;
3860
3861 file_data = lto_file_read (current_lto_file, resolution, &count);
3862 if (!file_data)
3863 {
3864 lto_obj_file_close (current_lto_file);
3865 free (current_lto_file);
3866 current_lto_file = NULL;
3867 break;
3868 }
3869
3870 decl_data[last_file_ix++] = file_data;
3871
3872 lto_obj_file_close (current_lto_file);
3873 free (current_lto_file);
3874 current_lto_file = NULL;
3875 }
3876
3877 lto_flatten_files (decl_data, count, last_file_ix);
3878 lto_stats.num_input_files = count;
3879 ggc_free(decl_data);
3880 real_file_decl_data = NULL;
3881
3882 if (resolution_file_name)
3883 fclose (resolution);
3884
3885 /* Show the LTO report before launching LTRANS. */
3886 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3887 print_lto_report_1 ();
3888
3889 /* Free gimple type merging datastructures. */
3890 htab_delete (gimple_types);
3891 gimple_types = NULL;
3892 htab_delete (type_hash_cache);
3893 type_hash_cache = NULL;
3894 free (type_pair_cache);
3895 type_pair_cache = NULL;
3896 tree_scc_hash.dispose ();
3897 obstack_free (&tree_scc_hash_obstack, NULL);
3898 htab_delete (gimple_canonical_types);
3899 gimple_canonical_types = NULL;
3900 htab_delete (canonical_type_hash_cache);
3901 canonical_type_hash_cache = NULL;
3902 ggc_collect ();
3903
3904 /* Set the hooks so that all of the ipa passes can read in their data. */
3905 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3906
3907 timevar_pop (TV_IPA_LTO_DECL_IN);
3908
3909 if (!quiet_flag)
3910 fprintf (stderr, "\nReading the callgraph\n");
3911
3912 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3913 /* Read the symtab. */
3914 input_symtab ();
3915
3916 /* Store resolutions into the symbol table. */
3917
3918 FOR_EACH_SYMBOL (snode)
3919 if (symtab_real_symbol_p (snode)
3920 && snode->symbol.lto_file_data
3921 && snode->symbol.lto_file_data->resolution_map
3922 && (res = pointer_map_contains (snode->symbol.lto_file_data->resolution_map,
3923 snode->symbol.decl)))
3924 snode->symbol.resolution
3925 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3926 for (i = 0; all_file_decl_data[i]; i++)
3927 if (all_file_decl_data[i]->resolution_map)
3928 {
3929 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3930 all_file_decl_data[i]->resolution_map = NULL;
3931 }
3932
3933 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3934
3935 if (!quiet_flag)
3936 fprintf (stderr, "Merging declarations\n");
3937
3938 timevar_push (TV_IPA_LTO_DECL_MERGE);
3939 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3940 need to care about resolving symbols again, we only need to replace
3941 duplicated declarations read from the callgraph and from function
3942 sections. */
3943 if (!flag_ltrans)
3944 {
3945 lto_symtab_merge_decls ();
3946
3947 /* If there were errors during symbol merging bail out, we have no
3948 good way to recover here. */
3949 if (seen_error ())
3950 fatal_error ("errors during merging of translation units");
3951
3952 /* Fixup all decls. */
3953 lto_fixup_decls (all_file_decl_data);
3954 }
3955 if (tree_with_vars)
3956 ggc_free (tree_with_vars);
3957 tree_with_vars = NULL;
3958 ggc_collect ();
3959
3960 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3961 /* Each pass will set the appropriate timer. */
3962
3963 if (!quiet_flag)
3964 fprintf (stderr, "Reading summaries\n");
3965
3966 /* Read the IPA summary data. */
3967 if (flag_ltrans)
3968 ipa_read_optimization_summaries ();
3969 else
3970 ipa_read_summaries ();
3971
3972 for (i = 0; all_file_decl_data[i]; i++)
3973 {
3974 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3975 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3976 all_file_decl_data[i]->symtab_node_encoder = NULL;
3977 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3978 all_file_decl_data[i]->global_decl_state = NULL;
3979 all_file_decl_data[i]->current_decl_state = NULL;
3980 }
3981
3982 /* Finally merge the cgraph according to the decl merging decisions. */
3983 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3984 if (cgraph_dump_file)
3985 {
3986 fprintf (cgraph_dump_file, "Before merging:\n");
3987 dump_symtab (cgraph_dump_file);
3988 }
3989 lto_symtab_merge_symbols ();
3990 ggc_collect ();
3991 cgraph_state = CGRAPH_STATE_IPA_SSA;
3992
3993 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3994
3995 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3996
3997 /* Indicate that the cgraph is built and ready. */
3998 cgraph_function_flags_ready = true;
3999
4000 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
4001 ggc_free (all_file_decl_data);
4002 all_file_decl_data = NULL;
4003 }
4004
4005
4006 /* Materialize all the bodies for all the nodes in the callgraph. */
4007
4008 static void
4009 materialize_cgraph (void)
4010 {
4011 tree decl;
4012 struct cgraph_node *node;
4013 unsigned i;
4014 timevar_id_t lto_timer;
4015
4016 if (!quiet_flag)
4017 fprintf (stderr,
4018 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
4019
4020 /* Now that we have input the cgraph, we need to clear all of the aux
4021 nodes and read the functions if we are not running in WPA mode. */
4022 timevar_push (TV_IPA_LTO_GIMPLE_IN);
4023
4024 FOR_EACH_FUNCTION (node)
4025 {
4026 if (node->symbol.lto_file_data)
4027 {
4028 lto_materialize_function (node);
4029 lto_stats.num_input_cgraph_nodes++;
4030 }
4031 }
4032
4033 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
4034
4035 /* Start the appropriate timer depending on the mode that we are
4036 operating in. */
4037 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
4038 : (flag_ltrans) ? TV_WHOPR_LTRANS
4039 : TV_LTO;
4040 timevar_push (lto_timer);
4041
4042 current_function_decl = NULL;
4043 set_cfun (NULL);
4044
4045 /* Inform the middle end about the global variables we have seen. */
4046 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
4047 rest_of_decl_compilation (decl, 1, 0);
4048
4049 if (!quiet_flag)
4050 fprintf (stderr, "\n");
4051
4052 timevar_pop (lto_timer);
4053 }
4054
4055
4056 /* Show various memory usage statistics related to LTO. */
4057 static void
4058 print_lto_report_1 (void)
4059 {
4060 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
4061 fprintf (stderr, "%s statistics\n", pfx);
4062
4063 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
4064 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
4065 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
4066 if (flag_wpa && tree_scc_hash.is_created ())
4067 {
4068 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
4069 "collision ratio: %f\n", pfx,
4070 (long) tree_scc_hash.size (),
4071 (long) tree_scc_hash.elements (),
4072 tree_scc_hash.collisions ());
4073 hash_table<tree_scc_hasher>::iterator hiter;
4074 tree_scc *scc, *max_scc = NULL;
4075 unsigned max_length = 0;
4076 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
4077 {
4078 unsigned length = 0;
4079 tree_scc *s = scc;
4080 for (; s; s = s->next)
4081 length++;
4082 if (length > max_length)
4083 {
4084 max_length = length;
4085 max_scc = scc;
4086 }
4087 }
4088 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
4089 pfx, max_length, max_scc->len);
4090 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
4091 num_scc_compares, num_scc_compare_collisions,
4092 num_scc_compare_collisions / (double) num_scc_compares);
4093 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
4094 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
4095 total_scc_size_merged);
4096 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
4097 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
4098 pfx, num_prevailing_types, num_type_scc_trees);
4099 fprintf (stderr, "[%s] Old merging code merges an additional %lu types"
4100 " of which %lu are in the same SCC with their "
4101 "prevailing variant (%lu and %lu associated trees)\n",
4102 pfx, num_not_merged_types, num_not_merged_types_in_same_scc,
4103 num_not_merged_types_trees,
4104 num_not_merged_types_in_same_scc_trees);
4105 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
4106 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
4107 (long) htab_size (gimple_canonical_types),
4108 (long) htab_elements (gimple_canonical_types),
4109 (long) gimple_canonical_types->searches,
4110 (long) gimple_canonical_types->collisions,
4111 htab_collisions (gimple_canonical_types));
4112 fprintf (stderr, "[%s] GIMPLE canonical type hash table: size %ld, "
4113 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
4114 (long) htab_size (canonical_type_hash_cache),
4115 (long) htab_elements (canonical_type_hash_cache),
4116 (long) canonical_type_hash_cache->searches,
4117 (long) canonical_type_hash_cache->collisions,
4118 htab_collisions (canonical_type_hash_cache));
4119 }
4120
4121 print_lto_report (pfx);
4122 }
4123
4124 /* Perform whole program analysis (WPA) on the callgraph and write out the
4125 optimization plan. */
4126
4127 static void
4128 do_whole_program_analysis (void)
4129 {
4130 symtab_node node;
4131
4132 timevar_start (TV_PHASE_OPT_GEN);
4133
4134 /* Note that since we are in WPA mode, materialize_cgraph will not
4135 actually read in all the function bodies. It only materializes
4136 the decls and cgraph nodes so that analysis can be performed. */
4137 materialize_cgraph ();
4138
4139 /* Reading in the cgraph uses different timers, start timing WPA now. */
4140 timevar_push (TV_WHOPR_WPA);
4141
4142 if (pre_ipa_mem_report)
4143 {
4144 fprintf (stderr, "Memory consumption before IPA\n");
4145 dump_memory_report (false);
4146 }
4147
4148 cgraph_function_flags_ready = true;
4149
4150 if (cgraph_dump_file)
4151 dump_symtab (cgraph_dump_file);
4152 bitmap_obstack_initialize (NULL);
4153 cgraph_state = CGRAPH_STATE_IPA_SSA;
4154
4155 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
4156 symtab_remove_unreachable_nodes (false, dump_file);
4157
4158 if (cgraph_dump_file)
4159 {
4160 fprintf (cgraph_dump_file, "Optimized ");
4161 dump_symtab (cgraph_dump_file);
4162 }
4163 #ifdef ENABLE_CHECKING
4164 verify_cgraph ();
4165 #endif
4166 bitmap_obstack_release (NULL);
4167
4168 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
4169 timevar_pop (TV_WHOPR_WPA);
4170
4171 timevar_push (TV_WHOPR_PARTITIONING);
4172 if (flag_lto_partition_1to1)
4173 lto_1_to_1_map ();
4174 else if (flag_lto_partition_max)
4175 lto_max_map ();
4176 else
4177 lto_balanced_map ();
4178
4179 /* AUX pointers are used by partitioning code to bookkeep number of
4180 partitions symbol is in. This is no longer needed. */
4181 FOR_EACH_SYMBOL (node)
4182 node->symbol.aux = NULL;
4183
4184 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
4185 timevar_pop (TV_WHOPR_PARTITIONING);
4186
4187 timevar_stop (TV_PHASE_OPT_GEN);
4188 timevar_start (TV_PHASE_STREAM_OUT);
4189
4190 if (!quiet_flag)
4191 {
4192 fprintf (stderr, "\nStreaming out");
4193 fflush (stderr);
4194 }
4195 lto_wpa_write_files ();
4196 if (!quiet_flag)
4197 fprintf (stderr, "\n");
4198
4199 timevar_stop (TV_PHASE_STREAM_OUT);
4200
4201 ggc_collect ();
4202 if (post_ipa_mem_report)
4203 {
4204 fprintf (stderr, "Memory consumption after IPA\n");
4205 dump_memory_report (false);
4206 }
4207
4208 /* Show the LTO report before launching LTRANS. */
4209 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
4210 print_lto_report_1 ();
4211 if (mem_report_wpa)
4212 dump_memory_report (true);
4213 }
4214
4215
4216 static GTY(()) tree lto_eh_personality_decl;
4217
4218 /* Return the LTO personality function decl. */
4219
4220 tree
4221 lto_eh_personality (void)
4222 {
4223 if (!lto_eh_personality_decl)
4224 {
4225 /* Use the first personality DECL for our personality if we don't
4226 support multiple ones. This ensures that we don't artificially
4227 create the need for them in a single-language program. */
4228 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
4229 lto_eh_personality_decl = first_personality_decl;
4230 else
4231 lto_eh_personality_decl = lhd_gcc_personality ();
4232 }
4233
4234 return lto_eh_personality_decl;
4235 }
4236
4237 /* Set the process name based on the LTO mode. */
4238
4239 static void
4240 lto_process_name (void)
4241 {
4242 if (flag_lto)
4243 setproctitle ("lto1-lto");
4244 if (flag_wpa)
4245 setproctitle ("lto1-wpa");
4246 if (flag_ltrans)
4247 setproctitle ("lto1-ltrans");
4248 }
4249
4250
4251 /* Initialize the LTO front end. */
4252
4253 static void
4254 lto_init (void)
4255 {
4256 lto_process_name ();
4257 lto_streamer_hooks_init ();
4258 lto_reader_init ();
4259 lto_set_in_hooks (NULL, get_section_data, free_section_data);
4260 memset (&lto_stats, 0, sizeof (lto_stats));
4261 bitmap_obstack_initialize (NULL);
4262 gimple_register_cfg_hooks ();
4263 }
4264
4265
4266 /* Main entry point for the GIMPLE front end. This front end has
4267 three main personalities:
4268
4269 - LTO (-flto). All the object files on the command line are
4270 loaded in memory and processed as a single translation unit.
4271 This is the traditional link-time optimization behavior.
4272
4273 - WPA (-fwpa). Only the callgraph and summary information for
4274 files in the command file are loaded. A single callgraph
4275 (without function bodies) is instantiated for the whole set of
4276 files. IPA passes are only allowed to analyze the call graph
4277 and make transformation decisions. The callgraph is
4278 partitioned, each partition is written to a new object file
4279 together with the transformation decisions.
4280
4281 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
4282 summary files from running again. Since WPA computed summary
4283 information and decided what transformations to apply, LTRANS
4284 simply applies them. */
4285
4286 void
4287 lto_main (void)
4288 {
4289 /* LTO is called as a front end, even though it is not a front end.
4290 Because it is called as a front end, TV_PHASE_PARSING and
4291 TV_PARSE_GLOBAL are active, and we need to turn them off while
4292 doing LTO. Later we turn them back on so they are active up in
4293 toplev.c. */
4294 timevar_pop (TV_PARSE_GLOBAL);
4295 timevar_stop (TV_PHASE_PARSING);
4296
4297 timevar_start (TV_PHASE_SETUP);
4298
4299 /* Initialize the LTO front end. */
4300 lto_init ();
4301
4302 timevar_stop (TV_PHASE_SETUP);
4303 timevar_start (TV_PHASE_STREAM_IN);
4304
4305 /* Read all the symbols and call graph from all the files in the
4306 command line. */
4307 read_cgraph_and_symbols (num_in_fnames, in_fnames);
4308
4309 timevar_stop (TV_PHASE_STREAM_IN);
4310
4311 if (!seen_error ())
4312 {
4313 /* If WPA is enabled analyze the whole call graph and create an
4314 optimization plan. Otherwise, read in all the function
4315 bodies and continue with optimization. */
4316 if (flag_wpa)
4317 do_whole_program_analysis ();
4318 else
4319 {
4320 struct varpool_node *vnode;
4321
4322 timevar_start (TV_PHASE_OPT_GEN);
4323
4324 materialize_cgraph ();
4325 if (!flag_ltrans)
4326 lto_promote_statics_nonwpa ();
4327
4328 /* Let the middle end know that we have read and merged all of
4329 the input files. */
4330 compile ();
4331
4332 timevar_stop (TV_PHASE_OPT_GEN);
4333
4334 /* FIXME lto, if the processes spawned by WPA fail, we miss
4335 the chance to print WPA's report, so WPA will call
4336 print_lto_report before launching LTRANS. If LTRANS was
4337 launched directly by the driver we would not need to do
4338 this. */
4339 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
4340 print_lto_report_1 ();
4341
4342 /* Record the global variables. */
4343 FOR_EACH_DEFINED_VARIABLE (vnode)
4344 vec_safe_push (lto_global_var_decls, vnode->symbol.decl);
4345 }
4346 }
4347
4348 /* Here we make LTO pretend to be a parser. */
4349 timevar_start (TV_PHASE_PARSING);
4350 timevar_push (TV_PARSE_GLOBAL);
4351 }
4352
4353 #include "gt-lto-lto.h"