cgraph.h (symtab_node_base): Add definition, alias and analyzed flags...
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48
49 static GTY(()) tree first_personality_decl;
50
51 /* Returns a hash code for P. */
52
53 static hashval_t
54 hash_name (const void *p)
55 {
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
58 }
59
60
61 /* Returns nonzero if P1 and P2 are equal. */
62
63 static int
64 eq_name (const void *p1, const void *p2)
65 {
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
70
71 return strcmp (s1->name, s2->name) == 0;
72 }
73
74 /* Free lto_section_slot */
75
76 static void
77 free_with_string (void *arg)
78 {
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
80
81 free (CONST_CAST (char *, s->name));
82 free (arg);
83 }
84
85 /* Create section hash table */
86
87 htab_t
88 lto_obj_create_section_hash_table (void)
89 {
90 return htab_create (37, hash_name, eq_name, free_with_string);
91 }
92
93 /* Delete an allocated integer KEY in the splay tree. */
94
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
97 {
98 free ((void *) key);
99 }
100
101 /* Compare splay tree node ids A and B. */
102
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
105 {
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
108
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
111
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
117 }
118
119 /* Look up splay tree node by ID in splay tree T. */
120
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
123 {
124 return splay_tree_lookup (t, (splay_tree_key) &id);
125 }
126
127 /* Check if KEY has ID. */
128
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
131 {
132 return *(unsigned HOST_WIDE_INT *) key == id;
133 }
134
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
138
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
142 {
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
146 }
147
148 /* Create a splay tree. */
149
150 static splay_tree
151 lto_splay_tree_new (void)
152 {
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
156 }
157
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
160
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
163 {
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
168 {
169 if (node->symbol.analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
176 {
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
181 }
182 }
183 return false;
184 }
185
186 /* Read the function body for the function associated with NODE. */
187
188 static void
189 lto_materialize_function (struct cgraph_node *node)
190 {
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
195
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if ((cgraph_function_with_gimple_body_p (node) && node->symbol.analyzed)
200 || has_analyzed_clone_p (node))
201 {
202 /* Clones don't need to be read. */
203 if (node->clone_of)
204 return;
205
206 /* Load the function body only if not operating in WPA mode. In
207 WPA mode, the body of the function is not needed. */
208 if (!flag_wpa)
209 {
210 file_data = node->symbol.lto_file_data;
211 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
212
213 /* We may have renamed the declaration, e.g., a static function. */
214 name = lto_get_decl_name_mapping (file_data, name);
215
216 data = lto_get_section_data (file_data, LTO_section_function_body,
217 name, &len);
218 if (!data)
219 fatal_error ("%s: section %s is missing",
220 file_data->file_name,
221 name);
222
223 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
224
225 push_struct_function (decl);
226 announce_function (decl);
227 lto_input_function_body (file_data, decl, data);
228 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
229 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
230 lto_stats.num_function_bodies++;
231 lto_free_section_data (file_data, LTO_section_function_body, name,
232 data, len);
233 pop_cfun ();
234 ggc_collect ();
235 }
236 }
237
238 /* Let the middle end know about the function. */
239 rest_of_decl_compilation (decl, 1, 0);
240 }
241
242
243 /* Decode the content of memory pointed to by DATA in the in decl
244 state object STATE. DATA_IN points to a data_in structure for
245 decoding. Return the address after the decoded object in the
246 input. */
247
248 static const uint32_t *
249 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
250 struct lto_in_decl_state *state)
251 {
252 uint32_t ix;
253 tree decl;
254 uint32_t i, j;
255
256 ix = *data++;
257 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
258 if (TREE_CODE (decl) != FUNCTION_DECL)
259 {
260 gcc_assert (decl == void_type_node);
261 decl = NULL_TREE;
262 }
263 state->fn_decl = decl;
264
265 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
266 {
267 uint32_t size = *data++;
268 tree *decls = ggc_alloc_vec_tree (size);
269
270 for (j = 0; j < size; j++)
271 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
272
273 state->streams[i].size = size;
274 state->streams[i].trees = decls;
275 data += size;
276 }
277
278 return data;
279 }
280
281
282
283 /* Global type table. FIXME, it should be possible to re-use some
284 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
285 etc), but those assume that types were built with the various
286 build_*_type routines which is not the case with the streamer. */
287 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
288 htab_t gimple_types;
289 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
290 htab_t type_hash_cache;
291
292 static hashval_t gimple_type_hash (const void *);
293
294 /* Structure used to maintain a cache of some type pairs compared by
295 gimple_types_compatible_p when comparing aggregate types. There are
296 three possible values for SAME_P:
297
298 -2: The pair (T1, T2) has just been inserted in the table.
299 0: T1 and T2 are different types.
300 1: T1 and T2 are the same type. */
301
302 struct type_pair_d
303 {
304 unsigned int uid1;
305 unsigned int uid2;
306 signed char same_p;
307 };
308 typedef struct type_pair_d *type_pair_t;
309
310 #define GIMPLE_TYPE_PAIR_SIZE 16381
311 struct type_pair_d *type_pair_cache;
312
313
314 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
315 entry if none existed. */
316
317 static inline type_pair_t
318 lookup_type_pair (tree t1, tree t2)
319 {
320 unsigned int index;
321 unsigned int uid1, uid2;
322
323 if (TYPE_UID (t1) < TYPE_UID (t2))
324 {
325 uid1 = TYPE_UID (t1);
326 uid2 = TYPE_UID (t2);
327 }
328 else
329 {
330 uid1 = TYPE_UID (t2);
331 uid2 = TYPE_UID (t1);
332 }
333 gcc_checking_assert (uid1 != uid2);
334
335 /* iterative_hash_hashval_t imply an function calls.
336 We know that UIDS are in limited range. */
337 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
338 % GIMPLE_TYPE_PAIR_SIZE);
339 if (type_pair_cache [index].uid1 == uid1
340 && type_pair_cache [index].uid2 == uid2)
341 return &type_pair_cache[index];
342
343 type_pair_cache [index].uid1 = uid1;
344 type_pair_cache [index].uid2 = uid2;
345 type_pair_cache [index].same_p = -2;
346
347 return &type_pair_cache[index];
348 }
349
350 /* Per pointer state for the SCC finding. The on_sccstack flag
351 is not strictly required, it is true when there is no hash value
352 recorded for the type and false otherwise. But querying that
353 is slower. */
354
355 struct sccs
356 {
357 unsigned int dfsnum;
358 unsigned int low;
359 bool on_sccstack;
360 union {
361 hashval_t hash;
362 signed char same_p;
363 } u;
364 };
365
366 static unsigned int next_dfs_num;
367 static unsigned int gtc_next_dfs_num;
368
369 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
370
371 typedef struct GTY(()) gimple_type_leader_entry_s {
372 tree type;
373 tree leader;
374 } gimple_type_leader_entry;
375
376 #define GIMPLE_TYPE_LEADER_SIZE 16381
377 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
378 gimple_type_leader_entry *gimple_type_leader;
379
380 /* Lookup an existing leader for T and return it or NULL_TREE, if
381 there is none in the cache. */
382
383 static inline tree
384 gimple_lookup_type_leader (tree t)
385 {
386 gimple_type_leader_entry *leader;
387
388 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
389 if (leader->type != t)
390 return NULL_TREE;
391
392 return leader->leader;
393 }
394
395
396 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
397 true then if any type has no name return false, otherwise return
398 true if both types have no names. */
399
400 static bool
401 compare_type_names_p (tree t1, tree t2)
402 {
403 tree name1 = TYPE_NAME (t1);
404 tree name2 = TYPE_NAME (t2);
405
406 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
407 return false;
408
409 if (name1 == NULL_TREE)
410 return true;
411
412 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
413 if (TREE_CODE (name1) != TREE_CODE (name2))
414 return false;
415
416 if (TREE_CODE (name1) == TYPE_DECL)
417 name1 = DECL_NAME (name1);
418 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
419
420 if (TREE_CODE (name2) == TYPE_DECL)
421 name2 = DECL_NAME (name2);
422 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
423
424 /* Identifiers can be compared with pointer equality rather
425 than a string comparison. */
426 if (name1 == name2)
427 return true;
428
429 return false;
430 }
431
432 static bool
433 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
434 vec<type_pair_t> *,
435 struct pointer_map_t *, struct obstack *);
436
437 /* DFS visit the edge from the callers type pair with state *STATE to
438 the pair T1, T2 while operating in FOR_MERGING_P mode.
439 Update the merging status if it is not part of the SCC containing the
440 callers pair and return it.
441 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
442
443 static bool
444 gtc_visit (tree t1, tree t2,
445 struct sccs *state,
446 vec<type_pair_t> *sccstack,
447 struct pointer_map_t *sccstate,
448 struct obstack *sccstate_obstack)
449 {
450 struct sccs *cstate = NULL;
451 type_pair_t p;
452 void **slot;
453 tree leader1, leader2;
454
455 /* Check first for the obvious case of pointer identity. */
456 if (t1 == t2)
457 return true;
458
459 /* Check that we have two types to compare. */
460 if (t1 == NULL_TREE || t2 == NULL_TREE)
461 return false;
462
463 /* Can't be the same type if the types don't have the same code. */
464 if (TREE_CODE (t1) != TREE_CODE (t2))
465 return false;
466
467 /* Can't be the same type if they have different CV qualifiers. */
468 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
469 return false;
470
471 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
472 return false;
473
474 /* Void types and nullptr types are always the same. */
475 if (TREE_CODE (t1) == VOID_TYPE
476 || TREE_CODE (t1) == NULLPTR_TYPE)
477 return true;
478
479 /* Can't be the same type if they have different alignment or mode. */
480 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
481 || TYPE_MODE (t1) != TYPE_MODE (t2))
482 return false;
483
484 /* Do some simple checks before doing three hashtable queries. */
485 if (INTEGRAL_TYPE_P (t1)
486 || SCALAR_FLOAT_TYPE_P (t1)
487 || FIXED_POINT_TYPE_P (t1)
488 || TREE_CODE (t1) == VECTOR_TYPE
489 || TREE_CODE (t1) == COMPLEX_TYPE
490 || TREE_CODE (t1) == OFFSET_TYPE
491 || POINTER_TYPE_P (t1))
492 {
493 /* Can't be the same type if they have different sign or precision. */
494 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
495 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
496 return false;
497
498 if (TREE_CODE (t1) == INTEGER_TYPE
499 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
500 return false;
501
502 /* That's all we need to check for float and fixed-point types. */
503 if (SCALAR_FLOAT_TYPE_P (t1)
504 || FIXED_POINT_TYPE_P (t1))
505 return true;
506
507 /* For other types fall through to more complex checks. */
508 }
509
510 /* If the types have been previously registered and found equal
511 they still are. */
512 leader1 = gimple_lookup_type_leader (t1);
513 leader2 = gimple_lookup_type_leader (t2);
514 if (leader1 == t2
515 || t1 == leader2
516 || (leader1 && leader1 == leader2))
517 return true;
518
519 /* If the hash values of t1 and t2 are different the types can't
520 possibly be the same. This helps keeping the type-pair hashtable
521 small, only tracking comparisons for hash collisions. */
522 if (gimple_type_hash (t1) != gimple_type_hash (t2))
523 return false;
524
525 /* Allocate a new cache entry for this comparison. */
526 p = lookup_type_pair (t1, t2);
527 if (p->same_p == 0 || p->same_p == 1)
528 {
529 /* We have already decided whether T1 and T2 are the
530 same, return the cached result. */
531 return p->same_p == 1;
532 }
533
534 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
535 cstate = (struct sccs *)*slot;
536 /* Not yet visited. DFS recurse. */
537 if (!cstate)
538 {
539 gimple_types_compatible_p_1 (t1, t2, p,
540 sccstack, sccstate, sccstate_obstack);
541 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
542 state->low = MIN (state->low, cstate->low);
543 }
544 /* If the type is still on the SCC stack adjust the parents low. */
545 if (cstate->dfsnum < state->dfsnum
546 && cstate->on_sccstack)
547 state->low = MIN (cstate->dfsnum, state->low);
548
549 /* Return the current lattice value. We start with an equality
550 assumption so types part of a SCC will be optimistically
551 treated equal unless proven otherwise. */
552 return cstate->u.same_p;
553 }
554
555 /* Worker for gimple_types_compatible.
556 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
557
558 static bool
559 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
560 vec<type_pair_t> *sccstack,
561 struct pointer_map_t *sccstate,
562 struct obstack *sccstate_obstack)
563 {
564 struct sccs *state;
565
566 gcc_assert (p->same_p == -2);
567
568 state = XOBNEW (sccstate_obstack, struct sccs);
569 *pointer_map_insert (sccstate, p) = state;
570
571 sccstack->safe_push (p);
572 state->dfsnum = gtc_next_dfs_num++;
573 state->low = state->dfsnum;
574 state->on_sccstack = true;
575 /* Start with an equality assumption. As we DFS recurse into child
576 SCCs this assumption may get revisited. */
577 state->u.same_p = 1;
578
579 /* The struct tags shall compare equal. */
580 if (!compare_type_names_p (t1, t2))
581 goto different_types;
582
583 /* The main variant of both types should compare equal. */
584 if (TYPE_MAIN_VARIANT (t1) != t1
585 || TYPE_MAIN_VARIANT (t2) != t2)
586 {
587 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
588 state, sccstack, sccstate, sccstate_obstack))
589 goto different_types;
590 }
591
592 /* We may not merge typedef types to the same type in different
593 contexts. */
594 if (TYPE_NAME (t1)
595 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
596 && DECL_CONTEXT (TYPE_NAME (t1))
597 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
598 {
599 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
600 DECL_CONTEXT (TYPE_NAME (t2)),
601 state, sccstack, sccstate, sccstate_obstack))
602 goto different_types;
603 }
604
605 /* If their attributes are not the same they can't be the same type. */
606 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
607 goto different_types;
608
609 /* Do type-specific comparisons. */
610 switch (TREE_CODE (t1))
611 {
612 case VECTOR_TYPE:
613 case COMPLEX_TYPE:
614 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
615 state, sccstack, sccstate, sccstate_obstack))
616 goto different_types;
617 goto same_types;
618
619 case ARRAY_TYPE:
620 /* Array types are the same if the element types are the same and
621 the number of elements are the same. */
622 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
623 state, sccstack, sccstate, sccstate_obstack)
624 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
625 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
626 goto different_types;
627 else
628 {
629 tree i1 = TYPE_DOMAIN (t1);
630 tree i2 = TYPE_DOMAIN (t2);
631
632 /* For an incomplete external array, the type domain can be
633 NULL_TREE. Check this condition also. */
634 if (i1 == NULL_TREE && i2 == NULL_TREE)
635 goto same_types;
636 else if (i1 == NULL_TREE || i2 == NULL_TREE)
637 goto different_types;
638 else
639 {
640 tree min1 = TYPE_MIN_VALUE (i1);
641 tree min2 = TYPE_MIN_VALUE (i2);
642 tree max1 = TYPE_MAX_VALUE (i1);
643 tree max2 = TYPE_MAX_VALUE (i2);
644
645 /* The minimum/maximum values have to be the same. */
646 if ((min1 == min2
647 || (min1 && min2
648 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
649 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
650 || operand_equal_p (min1, min2, 0))))
651 && (max1 == max2
652 || (max1 && max2
653 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
654 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
655 || operand_equal_p (max1, max2, 0)))))
656 goto same_types;
657 else
658 goto different_types;
659 }
660 }
661
662 case METHOD_TYPE:
663 /* Method types should belong to the same class. */
664 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
665 state, sccstack, sccstate, sccstate_obstack))
666 goto different_types;
667
668 /* Fallthru */
669
670 case FUNCTION_TYPE:
671 /* Function types are the same if the return type and arguments types
672 are the same. */
673 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
674 state, sccstack, sccstate, sccstate_obstack))
675 goto different_types;
676
677 if (!comp_type_attributes (t1, t2))
678 goto different_types;
679
680 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
681 goto same_types;
682 else
683 {
684 tree parms1, parms2;
685
686 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
687 parms1 && parms2;
688 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
689 {
690 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
691 state, sccstack, sccstate, sccstate_obstack))
692 goto different_types;
693 }
694
695 if (parms1 || parms2)
696 goto different_types;
697
698 goto same_types;
699 }
700
701 case OFFSET_TYPE:
702 {
703 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
704 state, sccstack, sccstate, sccstate_obstack)
705 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
706 TYPE_OFFSET_BASETYPE (t2),
707 state, sccstack, sccstate, sccstate_obstack))
708 goto different_types;
709
710 goto same_types;
711 }
712
713 case POINTER_TYPE:
714 case REFERENCE_TYPE:
715 {
716 /* If the two pointers have different ref-all attributes,
717 they can't be the same type. */
718 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
719 goto different_types;
720
721 /* Otherwise, pointer and reference types are the same if the
722 pointed-to types are the same. */
723 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
724 state, sccstack, sccstate, sccstate_obstack))
725 goto same_types;
726
727 goto different_types;
728 }
729
730 case INTEGER_TYPE:
731 case BOOLEAN_TYPE:
732 {
733 tree min1 = TYPE_MIN_VALUE (t1);
734 tree max1 = TYPE_MAX_VALUE (t1);
735 tree min2 = TYPE_MIN_VALUE (t2);
736 tree max2 = TYPE_MAX_VALUE (t2);
737 bool min_equal_p = false;
738 bool max_equal_p = false;
739
740 /* If either type has a minimum value, the other type must
741 have the same. */
742 if (min1 == NULL_TREE && min2 == NULL_TREE)
743 min_equal_p = true;
744 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
745 min_equal_p = true;
746
747 /* Likewise, if either type has a maximum value, the other
748 type must have the same. */
749 if (max1 == NULL_TREE && max2 == NULL_TREE)
750 max_equal_p = true;
751 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
752 max_equal_p = true;
753
754 if (!min_equal_p || !max_equal_p)
755 goto different_types;
756
757 goto same_types;
758 }
759
760 case ENUMERAL_TYPE:
761 {
762 /* FIXME lto, we cannot check bounds on enumeral types because
763 different front ends will produce different values.
764 In C, enumeral types are integers, while in C++ each element
765 will have its own symbolic value. We should decide how enums
766 are to be represented in GIMPLE and have each front end lower
767 to that. */
768 tree v1, v2;
769
770 /* For enumeral types, all the values must be the same. */
771 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
772 goto same_types;
773
774 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
775 v1 && v2;
776 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
777 {
778 tree c1 = TREE_VALUE (v1);
779 tree c2 = TREE_VALUE (v2);
780
781 if (TREE_CODE (c1) == CONST_DECL)
782 c1 = DECL_INITIAL (c1);
783
784 if (TREE_CODE (c2) == CONST_DECL)
785 c2 = DECL_INITIAL (c2);
786
787 if (tree_int_cst_equal (c1, c2) != 1)
788 goto different_types;
789
790 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
791 goto different_types;
792 }
793
794 /* If one enumeration has more values than the other, they
795 are not the same. */
796 if (v1 || v2)
797 goto different_types;
798
799 goto same_types;
800 }
801
802 case RECORD_TYPE:
803 case UNION_TYPE:
804 case QUAL_UNION_TYPE:
805 {
806 tree f1, f2;
807
808 /* For aggregate types, all the fields must be the same. */
809 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
810 f1 && f2;
811 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
812 {
813 /* Different field kinds are not compatible. */
814 if (TREE_CODE (f1) != TREE_CODE (f2))
815 goto different_types;
816 /* Field decls must have the same name and offset. */
817 if (TREE_CODE (f1) == FIELD_DECL
818 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
819 || !gimple_compare_field_offset (f1, f2)))
820 goto different_types;
821 /* All entities should have the same name and type. */
822 if (DECL_NAME (f1) != DECL_NAME (f2)
823 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
824 state, sccstack, sccstate, sccstate_obstack))
825 goto different_types;
826 }
827
828 /* If one aggregate has more fields than the other, they
829 are not the same. */
830 if (f1 || f2)
831 goto different_types;
832
833 goto same_types;
834 }
835
836 default:
837 gcc_unreachable ();
838 }
839
840 /* Common exit path for types that are not compatible. */
841 different_types:
842 state->u.same_p = 0;
843 goto pop;
844
845 /* Common exit path for types that are compatible. */
846 same_types:
847 gcc_assert (state->u.same_p == 1);
848
849 pop:
850 if (state->low == state->dfsnum)
851 {
852 type_pair_t x;
853
854 /* Pop off the SCC and set its cache values to the final
855 comparison result. */
856 do
857 {
858 struct sccs *cstate;
859 x = sccstack->pop ();
860 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
861 cstate->on_sccstack = false;
862 x->same_p = state->u.same_p;
863 }
864 while (x != p);
865 }
866
867 return state->u.same_p;
868 }
869
870 /* Return true iff T1 and T2 are structurally identical. When
871 FOR_MERGING_P is true the an incomplete type and a complete type
872 are considered different, otherwise they are considered compatible. */
873
874 static bool
875 gimple_types_compatible_p (tree t1, tree t2)
876 {
877 vec<type_pair_t> sccstack = vNULL;
878 struct pointer_map_t *sccstate;
879 struct obstack sccstate_obstack;
880 type_pair_t p = NULL;
881 bool res;
882 tree leader1, leader2;
883
884 /* Before starting to set up the SCC machinery handle simple cases. */
885
886 /* Check first for the obvious case of pointer identity. */
887 if (t1 == t2)
888 return true;
889
890 /* Check that we have two types to compare. */
891 if (t1 == NULL_TREE || t2 == NULL_TREE)
892 return false;
893
894 /* Can't be the same type if the types don't have the same code. */
895 if (TREE_CODE (t1) != TREE_CODE (t2))
896 return false;
897
898 /* Can't be the same type if they have different CV qualifiers. */
899 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
900 return false;
901
902 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
903 return false;
904
905 /* Void types and nullptr types are always the same. */
906 if (TREE_CODE (t1) == VOID_TYPE
907 || TREE_CODE (t1) == NULLPTR_TYPE)
908 return true;
909
910 /* Can't be the same type if they have different alignment or mode. */
911 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
912 || TYPE_MODE (t1) != TYPE_MODE (t2))
913 return false;
914
915 /* Do some simple checks before doing three hashtable queries. */
916 if (INTEGRAL_TYPE_P (t1)
917 || SCALAR_FLOAT_TYPE_P (t1)
918 || FIXED_POINT_TYPE_P (t1)
919 || TREE_CODE (t1) == VECTOR_TYPE
920 || TREE_CODE (t1) == COMPLEX_TYPE
921 || TREE_CODE (t1) == OFFSET_TYPE
922 || POINTER_TYPE_P (t1))
923 {
924 /* Can't be the same type if they have different sign or precision. */
925 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
926 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
927 return false;
928
929 if (TREE_CODE (t1) == INTEGER_TYPE
930 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
931 return false;
932
933 /* That's all we need to check for float and fixed-point types. */
934 if (SCALAR_FLOAT_TYPE_P (t1)
935 || FIXED_POINT_TYPE_P (t1))
936 return true;
937
938 /* For other types fall through to more complex checks. */
939 }
940
941 /* If the types have been previously registered and found equal
942 they still are. */
943 leader1 = gimple_lookup_type_leader (t1);
944 leader2 = gimple_lookup_type_leader (t2);
945 if (leader1 == t2
946 || t1 == leader2
947 || (leader1 && leader1 == leader2))
948 return true;
949
950 /* If the hash values of t1 and t2 are different the types can't
951 possibly be the same. This helps keeping the type-pair hashtable
952 small, only tracking comparisons for hash collisions. */
953 if (gimple_type_hash (t1) != gimple_type_hash (t2))
954 return false;
955
956 /* If we've visited this type pair before (in the case of aggregates
957 with self-referential types), and we made a decision, return it. */
958 p = lookup_type_pair (t1, t2);
959 if (p->same_p == 0 || p->same_p == 1)
960 {
961 /* We have already decided whether T1 and T2 are the
962 same, return the cached result. */
963 return p->same_p == 1;
964 }
965
966 /* Now set up the SCC machinery for the comparison. */
967 gtc_next_dfs_num = 1;
968 sccstate = pointer_map_create ();
969 gcc_obstack_init (&sccstate_obstack);
970 res = gimple_types_compatible_p_1 (t1, t2, p,
971 &sccstack, sccstate, &sccstate_obstack);
972 sccstack.release ();
973 pointer_map_destroy (sccstate);
974 obstack_free (&sccstate_obstack, NULL);
975
976 return res;
977 }
978
979 static hashval_t
980 iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
981 struct pointer_map_t *, struct obstack *);
982
983 /* DFS visit the edge from the callers type with state *STATE to T.
984 Update the callers type hash V with the hash for T if it is not part
985 of the SCC containing the callers type and return it.
986 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
987
988 static hashval_t
989 visit (tree t, struct sccs *state, hashval_t v,
990 vec<tree> *sccstack,
991 struct pointer_map_t *sccstate,
992 struct obstack *sccstate_obstack)
993 {
994 struct sccs *cstate = NULL;
995 struct tree_int_map m;
996 void **slot;
997
998 /* If there is a hash value recorded for this type then it can't
999 possibly be part of our parent SCC. Simply mix in its hash. */
1000 m.base.from = t;
1001 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1002 && *slot)
1003 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1004
1005 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1006 cstate = (struct sccs *)*slot;
1007 if (!cstate)
1008 {
1009 hashval_t tem;
1010 /* Not yet visited. DFS recurse. */
1011 tem = iterative_hash_gimple_type (t, v,
1012 sccstack, sccstate, sccstate_obstack);
1013 if (!cstate)
1014 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1015 state->low = MIN (state->low, cstate->low);
1016 /* If the type is no longer on the SCC stack and thus is not part
1017 of the parents SCC mix in its hash value. Otherwise we will
1018 ignore the type for hashing purposes and return the unaltered
1019 hash value. */
1020 if (!cstate->on_sccstack)
1021 return tem;
1022 }
1023 if (cstate->dfsnum < state->dfsnum
1024 && cstate->on_sccstack)
1025 state->low = MIN (cstate->dfsnum, state->low);
1026
1027 /* We are part of our parents SCC, skip this type during hashing
1028 and return the unaltered hash value. */
1029 return v;
1030 }
1031
1032 /* Hash NAME with the previous hash value V and return it. */
1033
1034 static hashval_t
1035 iterative_hash_name (tree name, hashval_t v)
1036 {
1037 if (!name)
1038 return v;
1039 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1040 if (TREE_CODE (name) == TYPE_DECL)
1041 name = DECL_NAME (name);
1042 if (!name)
1043 return v;
1044 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1045 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1046 }
1047
1048 /* A type, hashvalue pair for sorting SCC members. */
1049
1050 struct type_hash_pair {
1051 tree type;
1052 hashval_t hash;
1053 };
1054
1055 /* Compare two type, hashvalue pairs. */
1056
1057 static int
1058 type_hash_pair_compare (const void *p1_, const void *p2_)
1059 {
1060 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1061 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1062 if (p1->hash < p2->hash)
1063 return -1;
1064 else if (p1->hash > p2->hash)
1065 return 1;
1066 return 0;
1067 }
1068
1069 /* Returning a hash value for gimple type TYPE combined with VAL.
1070 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1071
1072 To hash a type we end up hashing in types that are reachable.
1073 Through pointers we can end up with cycles which messes up the
1074 required property that we need to compute the same hash value
1075 for structurally equivalent types. To avoid this we have to
1076 hash all types in a cycle (the SCC) in a commutative way. The
1077 easiest way is to not mix in the hashes of the SCC members at
1078 all. To make this work we have to delay setting the hash
1079 values of the SCC until it is complete. */
1080
1081 static hashval_t
1082 iterative_hash_gimple_type (tree type, hashval_t val,
1083 vec<tree> *sccstack,
1084 struct pointer_map_t *sccstate,
1085 struct obstack *sccstate_obstack)
1086 {
1087 hashval_t v;
1088 void **slot;
1089 struct sccs *state;
1090
1091 /* Not visited during this DFS walk. */
1092 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1093 state = XOBNEW (sccstate_obstack, struct sccs);
1094 *pointer_map_insert (sccstate, type) = state;
1095
1096 sccstack->safe_push (type);
1097 state->dfsnum = next_dfs_num++;
1098 state->low = state->dfsnum;
1099 state->on_sccstack = true;
1100
1101 /* Combine a few common features of types so that types are grouped into
1102 smaller sets; when searching for existing matching types to merge,
1103 only existing types having the same features as the new type will be
1104 checked. */
1105 v = iterative_hash_name (TYPE_NAME (type), 0);
1106 if (TYPE_NAME (type)
1107 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1108 && DECL_CONTEXT (TYPE_NAME (type))
1109 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1110 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1111 sccstack, sccstate, sccstate_obstack);
1112
1113 /* Factor in the variant structure. */
1114 if (TYPE_MAIN_VARIANT (type) != type)
1115 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1116 sccstack, sccstate, sccstate_obstack);
1117
1118 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1119 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1120 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1121
1122 /* Do not hash the types size as this will cause differences in
1123 hash values for the complete vs. the incomplete type variant. */
1124
1125 /* Incorporate common features of numerical types. */
1126 if (INTEGRAL_TYPE_P (type)
1127 || SCALAR_FLOAT_TYPE_P (type)
1128 || FIXED_POINT_TYPE_P (type))
1129 {
1130 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1131 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1132 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1133 }
1134
1135 /* For pointer and reference types, fold in information about the type
1136 pointed to. */
1137 if (POINTER_TYPE_P (type))
1138 v = visit (TREE_TYPE (type), state, v,
1139 sccstack, sccstate, sccstate_obstack);
1140
1141 /* For integer types hash the types min/max values and the string flag. */
1142 if (TREE_CODE (type) == INTEGER_TYPE)
1143 {
1144 /* OMP lowering can introduce error_mark_node in place of
1145 random local decls in types. */
1146 if (TYPE_MIN_VALUE (type) != error_mark_node)
1147 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1148 if (TYPE_MAX_VALUE (type) != error_mark_node)
1149 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1150 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1151 }
1152
1153 /* For array types hash the domain and the string flag. */
1154 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1155 {
1156 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1157 v = visit (TYPE_DOMAIN (type), state, v,
1158 sccstack, sccstate, sccstate_obstack);
1159 }
1160
1161 /* Recurse for aggregates with a single element type. */
1162 if (TREE_CODE (type) == ARRAY_TYPE
1163 || TREE_CODE (type) == COMPLEX_TYPE
1164 || TREE_CODE (type) == VECTOR_TYPE)
1165 v = visit (TREE_TYPE (type), state, v,
1166 sccstack, sccstate, sccstate_obstack);
1167
1168 /* Incorporate function return and argument types. */
1169 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1170 {
1171 unsigned na;
1172 tree p;
1173
1174 /* For method types also incorporate their parent class. */
1175 if (TREE_CODE (type) == METHOD_TYPE)
1176 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1177 sccstack, sccstate, sccstate_obstack);
1178
1179 /* Check result and argument types. */
1180 v = visit (TREE_TYPE (type), state, v,
1181 sccstack, sccstate, sccstate_obstack);
1182 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1183 {
1184 v = visit (TREE_VALUE (p), state, v,
1185 sccstack, sccstate, sccstate_obstack);
1186 na++;
1187 }
1188
1189 v = iterative_hash_hashval_t (na, v);
1190 }
1191
1192 if (RECORD_OR_UNION_TYPE_P (type))
1193 {
1194 unsigned nf;
1195 tree f;
1196
1197 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1198 {
1199 v = iterative_hash_name (DECL_NAME (f), v);
1200 v = visit (TREE_TYPE (f), state, v,
1201 sccstack, sccstate, sccstate_obstack);
1202 nf++;
1203 }
1204
1205 v = iterative_hash_hashval_t (nf, v);
1206 }
1207
1208 /* Record hash for us. */
1209 state->u.hash = v;
1210
1211 /* See if we found an SCC. */
1212 if (state->low == state->dfsnum)
1213 {
1214 tree x;
1215 struct tree_int_map *m;
1216
1217 /* Pop off the SCC and set its hash values. */
1218 x = sccstack->pop ();
1219 /* Optimize SCC size one. */
1220 if (x == type)
1221 {
1222 state->on_sccstack = false;
1223 m = ggc_alloc_cleared_tree_int_map ();
1224 m->base.from = x;
1225 m->to = v;
1226 slot = htab_find_slot (type_hash_cache, m, INSERT);
1227 gcc_assert (!*slot);
1228 *slot = (void *) m;
1229 }
1230 else
1231 {
1232 struct sccs *cstate;
1233 unsigned first, i, size, j;
1234 struct type_hash_pair *pairs;
1235 /* Pop off the SCC and build an array of type, hash pairs. */
1236 first = sccstack->length () - 1;
1237 while ((*sccstack)[first] != type)
1238 --first;
1239 size = sccstack->length () - first + 1;
1240 pairs = XALLOCAVEC (struct type_hash_pair, size);
1241 i = 0;
1242 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1243 cstate->on_sccstack = false;
1244 pairs[i].type = x;
1245 pairs[i].hash = cstate->u.hash;
1246 do
1247 {
1248 x = sccstack->pop ();
1249 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1250 cstate->on_sccstack = false;
1251 ++i;
1252 pairs[i].type = x;
1253 pairs[i].hash = cstate->u.hash;
1254 }
1255 while (x != type);
1256 gcc_assert (i + 1 == size);
1257 /* Sort the arrays of type, hash pairs so that when we mix in
1258 all members of the SCC the hash value becomes independent on
1259 the order we visited the SCC. Disregard hashes equal to
1260 the hash of the type we mix into because we cannot guarantee
1261 a stable sort for those across different TUs. */
1262 qsort (pairs, size, sizeof (struct type_hash_pair),
1263 type_hash_pair_compare);
1264 for (i = 0; i < size; ++i)
1265 {
1266 hashval_t hash;
1267 m = ggc_alloc_cleared_tree_int_map ();
1268 m->base.from = pairs[i].type;
1269 hash = pairs[i].hash;
1270 /* Skip same hashes. */
1271 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1272 ;
1273 for (; j < size; ++j)
1274 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1275 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1276 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1277 m->to = hash;
1278 if (pairs[i].type == type)
1279 v = hash;
1280 slot = htab_find_slot (type_hash_cache, m, INSERT);
1281 gcc_assert (!*slot);
1282 *slot = (void *) m;
1283 }
1284 }
1285 }
1286
1287 return iterative_hash_hashval_t (v, val);
1288 }
1289
1290 /* Returns a hash value for P (assumed to be a type). The hash value
1291 is computed using some distinguishing features of the type. Note
1292 that we cannot use pointer hashing here as we may be dealing with
1293 two distinct instances of the same type.
1294
1295 This function should produce the same hash value for two compatible
1296 types according to gimple_types_compatible_p. */
1297
1298 static hashval_t
1299 gimple_type_hash (const void *p)
1300 {
1301 const_tree t = (const_tree) p;
1302 vec<tree> sccstack = vNULL;
1303 struct pointer_map_t *sccstate;
1304 struct obstack sccstate_obstack;
1305 hashval_t val;
1306 void **slot;
1307 struct tree_int_map m;
1308
1309 m.base.from = CONST_CAST_TREE (t);
1310 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1311 && *slot)
1312 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1313
1314 /* Perform a DFS walk and pre-hash all reachable types. */
1315 next_dfs_num = 1;
1316 sccstate = pointer_map_create ();
1317 gcc_obstack_init (&sccstate_obstack);
1318 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1319 &sccstack, sccstate, &sccstate_obstack);
1320 sccstack.release ();
1321 pointer_map_destroy (sccstate);
1322 obstack_free (&sccstate_obstack, NULL);
1323
1324 return val;
1325 }
1326
1327 /* Returns nonzero if P1 and P2 are equal. */
1328
1329 static int
1330 gimple_type_eq (const void *p1, const void *p2)
1331 {
1332 const_tree t1 = (const_tree) p1;
1333 const_tree t2 = (const_tree) p2;
1334 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1335 CONST_CAST_TREE (t2));
1336 }
1337
1338
1339 /* Worker for gimple_register_type.
1340 Register type T in the global type table gimple_types.
1341 When REGISTERING_MV is false first recurse for the main variant of T. */
1342
1343 static tree
1344 gimple_register_type_1 (tree t, bool registering_mv)
1345 {
1346 void **slot;
1347 gimple_type_leader_entry *leader;
1348
1349 /* If we registered this type before return the cached result. */
1350 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1351 if (leader->type == t)
1352 return leader->leader;
1353
1354 /* Always register the main variant first. This is important so we
1355 pick up the non-typedef variants as canonical, otherwise we'll end
1356 up taking typedef ids for structure tags during comparison.
1357 It also makes sure that main variants will be merged to main variants.
1358 As we are operating on a possibly partially fixed up type graph
1359 do not bother to recurse more than once, otherwise we may end up
1360 walking in circles.
1361 If we are registering a main variant it will either remain its
1362 own main variant or it will be merged to something else in which
1363 case we do not care for the main variant leader. */
1364 if (!registering_mv
1365 && TYPE_MAIN_VARIANT (t) != t)
1366 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1367
1368 /* See if we already have an equivalent type registered. */
1369 slot = htab_find_slot (gimple_types, t, INSERT);
1370 if (*slot
1371 && *(tree *)slot != t)
1372 {
1373 tree new_type = (tree) *((tree *) slot);
1374 leader->type = t;
1375 leader->leader = new_type;
1376 return new_type;
1377 }
1378
1379 /* If not, insert it to the cache and the hash. */
1380 leader->type = t;
1381 leader->leader = t;
1382 *slot = (void *) t;
1383 return t;
1384 }
1385
1386 /* Register type T in the global type table gimple_types.
1387 If another type T', compatible with T, already existed in
1388 gimple_types then return T', otherwise return T. This is used by
1389 LTO to merge identical types read from different TUs. */
1390
1391 static tree
1392 gimple_register_type (tree t)
1393 {
1394 gcc_assert (TYPE_P (t));
1395 return gimple_register_type_1 (t, false);
1396 }
1397
1398 #define GIMPLE_REGISTER_TYPE(tt) \
1399 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1400
1401
1402
1403 /* A hashtable of trees that potentially refer to variables or functions
1404 that must be replaced with their prevailing variant. */
1405 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1406 tree_with_vars;
1407
1408 /* Remember that T is a tree that (potentially) refers to a variable
1409 or function decl that may be replaced with its prevailing variant. */
1410 static void
1411 remember_with_vars (tree t)
1412 {
1413 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1414 }
1415
1416 #define LTO_FIXUP_TREE(tt) \
1417 do \
1418 { \
1419 if (tt) \
1420 { \
1421 if (TYPE_P (tt)) \
1422 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1423 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1424 remember_with_vars (t); \
1425 if (TREE_CODE (tt) == INTEGER_CST) \
1426 (tt) = fixup_integer_cst (tt); \
1427 } \
1428 } while (0)
1429
1430 static void lto_fixup_types (tree);
1431
1432 /* Return integer_cst T with updated type. */
1433
1434 static tree
1435 fixup_integer_cst (tree t)
1436 {
1437 tree type = GIMPLE_REGISTER_TYPE (TREE_TYPE (t));
1438
1439 if (type == TREE_TYPE (t))
1440 return t;
1441
1442 /* If overflow was set, streamer_read_integer_cst
1443 produced local copy of T. */
1444 if (TREE_OVERFLOW (t))
1445 {
1446 TREE_TYPE (t) = type;
1447 return t;
1448 }
1449 else
1450 /* Otherwise produce new shared node for the new type. */
1451 return build_int_cst_wide (type, TREE_INT_CST_LOW (t),
1452 TREE_INT_CST_HIGH (t));
1453 }
1454
1455 /* Fix up fields of a tree_typed T. */
1456
1457 static void
1458 lto_ft_typed (tree t)
1459 {
1460 LTO_FIXUP_TREE (TREE_TYPE (t));
1461 }
1462
1463 /* Fix up fields of a tree_common T. */
1464
1465 static void
1466 lto_ft_common (tree t)
1467 {
1468 lto_ft_typed (t);
1469 LTO_FIXUP_TREE (TREE_CHAIN (t));
1470 }
1471
1472 /* Fix up fields of a decl_minimal T. */
1473
1474 static void
1475 lto_ft_decl_minimal (tree t)
1476 {
1477 lto_ft_common (t);
1478 LTO_FIXUP_TREE (DECL_NAME (t));
1479 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1480 }
1481
1482 /* Fix up fields of a decl_common T. */
1483
1484 static void
1485 lto_ft_decl_common (tree t)
1486 {
1487 lto_ft_decl_minimal (t);
1488 LTO_FIXUP_TREE (DECL_SIZE (t));
1489 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1490 LTO_FIXUP_TREE (DECL_INITIAL (t));
1491 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1492 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1493 }
1494
1495 /* Fix up fields of a decl_with_vis T. */
1496
1497 static void
1498 lto_ft_decl_with_vis (tree t)
1499 {
1500 lto_ft_decl_common (t);
1501
1502 /* Accessor macro has side-effects, use field-name here. */
1503 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1504 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1505 }
1506
1507 /* Fix up fields of a decl_non_common T. */
1508
1509 static void
1510 lto_ft_decl_non_common (tree t)
1511 {
1512 lto_ft_decl_with_vis (t);
1513 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1514 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1515 LTO_FIXUP_TREE (DECL_VINDEX (t));
1516 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1517 like for 'typedef enum foo foo'. We have no way of avoiding to
1518 merge them and dwarf2out.c cannot deal with this,
1519 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1520 if (TREE_CODE (t) == TYPE_DECL
1521 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1522 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1523 }
1524
1525 /* Fix up fields of a decl_non_common T. */
1526
1527 static void
1528 lto_ft_function (tree t)
1529 {
1530 lto_ft_decl_non_common (t);
1531 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1532 }
1533
1534 /* Fix up fields of a field_decl T. */
1535
1536 static void
1537 lto_ft_field_decl (tree t)
1538 {
1539 lto_ft_decl_common (t);
1540 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1541 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1542 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1543 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1544 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1545 }
1546
1547 /* Fix up fields of a type T. */
1548
1549 static void
1550 lto_ft_type (tree t)
1551 {
1552 lto_ft_common (t);
1553 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1554 LTO_FIXUP_TREE (TYPE_SIZE (t));
1555 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1556 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1557 LTO_FIXUP_TREE (TYPE_NAME (t));
1558
1559 /* Accessors are for derived node types only. */
1560 if (!POINTER_TYPE_P (t))
1561 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1562 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1563
1564 /* Accessor is for derived node types only. */
1565 LTO_FIXUP_TREE (t->type_non_common.binfo);
1566
1567 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1568 }
1569
1570 /* Fix up fields of a BINFO T. */
1571
1572 static void
1573 lto_ft_binfo (tree t)
1574 {
1575 unsigned HOST_WIDE_INT i, n;
1576 tree base, saved_base;
1577
1578 lto_ft_common (t);
1579 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1580 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1581 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1582 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1583 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
1584 for (i = 0; i < n; i++)
1585 {
1586 saved_base = base = BINFO_BASE_ACCESS (t, i);
1587 LTO_FIXUP_TREE (base);
1588 if (base != saved_base)
1589 (*BINFO_BASE_ACCESSES (t))[i] = base;
1590 }
1591 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1592 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1593 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1594 n = BINFO_N_BASE_BINFOS (t);
1595 for (i = 0; i < n; i++)
1596 {
1597 saved_base = base = BINFO_BASE_BINFO (t, i);
1598 LTO_FIXUP_TREE (base);
1599 if (base != saved_base)
1600 (*BINFO_BASE_BINFOS (t))[i] = base;
1601 }
1602 }
1603
1604 /* Fix up fields of a CONSTRUCTOR T. */
1605
1606 static void
1607 lto_ft_constructor (tree t)
1608 {
1609 unsigned HOST_WIDE_INT idx;
1610 constructor_elt *ce;
1611
1612 lto_ft_typed (t);
1613
1614 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
1615 {
1616 LTO_FIXUP_TREE (ce->index);
1617 LTO_FIXUP_TREE (ce->value);
1618 }
1619 }
1620
1621 /* Fix up fields of an expression tree T. */
1622
1623 static void
1624 lto_ft_expr (tree t)
1625 {
1626 int i;
1627 lto_ft_typed (t);
1628 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1629 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1630 }
1631
1632 /* Given a tree T fixup fields of T by replacing types with their merged
1633 variant and other entities by an equal entity from an earlier compilation
1634 unit, or an entity being canonical in a different way. This includes
1635 for instance integer or string constants. */
1636
1637 static void
1638 lto_fixup_types (tree t)
1639 {
1640 switch (TREE_CODE (t))
1641 {
1642 case IDENTIFIER_NODE:
1643 break;
1644
1645 case TREE_LIST:
1646 LTO_FIXUP_TREE (TREE_VALUE (t));
1647 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1648 LTO_FIXUP_TREE (TREE_CHAIN (t));
1649 break;
1650
1651 case FIELD_DECL:
1652 lto_ft_field_decl (t);
1653 break;
1654
1655 case LABEL_DECL:
1656 case CONST_DECL:
1657 case PARM_DECL:
1658 case RESULT_DECL:
1659 case IMPORTED_DECL:
1660 lto_ft_decl_common (t);
1661 break;
1662
1663 case VAR_DECL:
1664 lto_ft_decl_with_vis (t);
1665 break;
1666
1667 case TYPE_DECL:
1668 lto_ft_decl_non_common (t);
1669 break;
1670
1671 case FUNCTION_DECL:
1672 lto_ft_function (t);
1673 break;
1674
1675 case TREE_BINFO:
1676 lto_ft_binfo (t);
1677 break;
1678
1679 case PLACEHOLDER_EXPR:
1680 lto_ft_common (t);
1681 break;
1682
1683 case BLOCK:
1684 case TRANSLATION_UNIT_DECL:
1685 case OPTIMIZATION_NODE:
1686 case TARGET_OPTION_NODE:
1687 break;
1688
1689 default:
1690 if (TYPE_P (t))
1691 lto_ft_type (t);
1692 else if (TREE_CODE (t) == CONSTRUCTOR)
1693 lto_ft_constructor (t);
1694 else if (CONSTANT_CLASS_P (t))
1695 LTO_FIXUP_TREE (TREE_TYPE (t));
1696 else if (EXPR_P (t))
1697 {
1698 lto_ft_expr (t);
1699 }
1700 else
1701 {
1702 remember_with_vars (t);
1703 }
1704 }
1705 }
1706
1707
1708 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1709
1710 static enum ld_plugin_symbol_resolution
1711 get_resolution (struct data_in *data_in, unsigned index)
1712 {
1713 if (data_in->globals_resolution.exists ())
1714 {
1715 ld_plugin_symbol_resolution_t ret;
1716 /* We can have references to not emitted functions in
1717 DECL_FUNCTION_PERSONALITY at least. So we can and have
1718 to indeed return LDPR_UNKNOWN in some cases. */
1719 if (data_in->globals_resolution.length () <= index)
1720 return LDPR_UNKNOWN;
1721 ret = data_in->globals_resolution[index];
1722 return ret;
1723 }
1724 else
1725 /* Delay resolution finding until decl merging. */
1726 return LDPR_UNKNOWN;
1727 }
1728
1729 /* Map assigning declarations their resolutions. */
1730 static pointer_map_t *resolution_map;
1731
1732 /* We need to record resolutions until symbol table is read. */
1733 static void
1734 register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1735 {
1736 if (resolution == LDPR_UNKNOWN)
1737 return;
1738 if (!resolution_map)
1739 resolution_map = pointer_map_create ();
1740 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1741 }
1742
1743 /* Register DECL with the global symbol table and change its
1744 name if necessary to avoid name clashes for static globals across
1745 different files. */
1746
1747 static void
1748 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1749 {
1750 tree context;
1751
1752 /* Variable has file scope, not local. */
1753 if (!TREE_PUBLIC (decl)
1754 && !((context = decl_function_context (decl))
1755 && auto_var_in_fn_p (decl, context)))
1756 {
1757 rest_of_decl_compilation (decl, 1, 0);
1758 }
1759
1760 /* If this variable has already been declared, queue the
1761 declaration for merging. */
1762 if (TREE_PUBLIC (decl))
1763 {
1764 unsigned ix;
1765 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1766 gcc_unreachable ();
1767 register_resolution (decl, get_resolution (data_in, ix));
1768 }
1769 }
1770
1771
1772 /* Register DECL with the global symbol table and change its
1773 name if necessary to avoid name clashes for static globals across
1774 different files. DATA_IN contains descriptors and tables for the
1775 file being read. */
1776
1777 static void
1778 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1779 {
1780 /* If this variable has already been declared, queue the
1781 declaration for merging. */
1782 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1783 {
1784 unsigned ix;
1785 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1786 gcc_unreachable ();
1787 register_resolution (decl, get_resolution (data_in, ix));
1788 }
1789 }
1790
1791
1792 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1793 for one compilation unit) go over all trees starting at index FROM until the
1794 end of the sequence and replace fields of those trees, and the trees
1795 themself with their canonical variants as per gimple_register_type. */
1796
1797 static void
1798 uniquify_nodes (struct data_in *data_in, unsigned from)
1799 {
1800 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1801 unsigned len = cache->nodes.length ();
1802 unsigned i;
1803
1804 /* Go backwards because children streamed for the first time come
1805 as part of their parents, and hence are created after them. */
1806
1807 /* First register all the types in the cache. This makes sure to
1808 have the original structure in the type cycles when registering
1809 them and computing hashes. */
1810 for (i = len; i-- > from;)
1811 {
1812 tree t = cache->nodes[i];
1813 if (t && TYPE_P (t))
1814 {
1815 tree newt = gimple_register_type (t);
1816 /* Mark non-prevailing types so we fix them up. No need
1817 to reset that flag afterwards - nothing that refers
1818 to those types is left and they are collected. */
1819 if (newt != t)
1820 TREE_VISITED (t) = 1;
1821 }
1822 }
1823
1824 /* Second fixup all trees in the new cache entries. */
1825 for (i = len; i-- > from;)
1826 {
1827 tree t = cache->nodes[i];
1828 tree oldt = t;
1829 if (!t)
1830 continue;
1831
1832 /* First fixup the fields of T. */
1833 lto_fixup_types (t);
1834
1835 if (!TYPE_P (t))
1836 continue;
1837
1838 /* Now try to find a canonical variant of T itself. */
1839 t = GIMPLE_REGISTER_TYPE (t);
1840
1841 if (t == oldt)
1842 {
1843 /* The following re-creates proper variant lists while fixing up
1844 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1845 variant list state before fixup is broken. */
1846 tree tem, mv;
1847
1848 #ifdef ENABLE_CHECKING
1849 /* Remove us from our main variant list if we are not the
1850 variant leader. */
1851 if (TYPE_MAIN_VARIANT (t) != t)
1852 {
1853 tem = TYPE_MAIN_VARIANT (t);
1854 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1855 tem = TYPE_NEXT_VARIANT (tem);
1856 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1857 }
1858 #endif
1859
1860 /* Query our new main variant. */
1861 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1862
1863 /* If we were the variant leader and we get replaced ourselves drop
1864 all variants from our list. */
1865 if (TYPE_MAIN_VARIANT (t) == t
1866 && mv != t)
1867 {
1868 tem = t;
1869 while (tem)
1870 {
1871 tree tem2 = TYPE_NEXT_VARIANT (tem);
1872 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1873 tem = tem2;
1874 }
1875 }
1876
1877 /* If we are not our own variant leader link us into our new leaders
1878 variant list. */
1879 if (mv != t)
1880 {
1881 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1882 TYPE_NEXT_VARIANT (mv) = t;
1883 if (RECORD_OR_UNION_TYPE_P (t))
1884 TYPE_BINFO (t) = TYPE_BINFO (mv);
1885 /* Preserve the invariant that type variants share their
1886 TYPE_FIELDS. */
1887 if (RECORD_OR_UNION_TYPE_P (t)
1888 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1889 {
1890 tree f1, f2;
1891 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1892 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1893 {
1894 unsigned ix;
1895 gcc_assert (f1 != f2
1896 && DECL_NAME (f1) == DECL_NAME (f2));
1897 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1898 gcc_unreachable ();
1899 /* If we're going to replace an element which we'd
1900 still visit in the next iterations, we wouldn't
1901 handle it, so do it here. We do have to handle it
1902 even though the field_decl itself will be removed,
1903 as it could refer to e.g. integer_cst which we
1904 wouldn't reach via any other way, hence they
1905 (and their type) would stay uncollected. */
1906 /* ??? We should rather make sure to replace all
1907 references to f2 with f1. That means handling
1908 COMPONENT_REFs and CONSTRUCTOR elements in
1909 lto_fixup_types and special-case the field-decl
1910 operand handling. */
1911 /* ??? Not sure the above is all relevant in this
1912 path canonicalizing TYPE_FIELDS to that of the
1913 main variant. */
1914 if (ix < i)
1915 lto_fixup_types (f2);
1916 streamer_tree_cache_insert_at (cache, f1, ix);
1917 }
1918 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1919 }
1920 }
1921
1922 /* Finally adjust our main variant and fix it up. */
1923 TYPE_MAIN_VARIANT (t) = mv;
1924
1925 /* The following reconstructs the pointer chains
1926 of the new pointed-to type if we are a main variant. We do
1927 not stream those so they are broken before fixup. */
1928 if (TREE_CODE (t) == POINTER_TYPE
1929 && TYPE_MAIN_VARIANT (t) == t)
1930 {
1931 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1932 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1933 }
1934 else if (TREE_CODE (t) == REFERENCE_TYPE
1935 && TYPE_MAIN_VARIANT (t) == t)
1936 {
1937 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1938 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1939 }
1940 }
1941
1942 else
1943 {
1944 if (RECORD_OR_UNION_TYPE_P (t))
1945 {
1946 tree f1, f2;
1947 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
1948 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
1949 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1950 {
1951 unsigned ix;
1952 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
1953 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1954 gcc_unreachable ();
1955 /* If we're going to replace an element which we'd
1956 still visit in the next iterations, we wouldn't
1957 handle it, so do it here. We do have to handle it
1958 even though the field_decl itself will be removed,
1959 as it could refer to e.g. integer_cst which we
1960 wouldn't reach via any other way, hence they
1961 (and their type) would stay uncollected. */
1962 /* ??? We should rather make sure to replace all
1963 references to f2 with f1. That means handling
1964 COMPONENT_REFs and CONSTRUCTOR elements in
1965 lto_fixup_types and special-case the field-decl
1966 operand handling. */
1967 if (ix < i)
1968 lto_fixup_types (f2);
1969 streamer_tree_cache_insert_at (cache, f1, ix);
1970 }
1971 }
1972
1973 /* If we found a tree that is equal to oldt replace it in the
1974 cache, so that further users (in the various LTO sections)
1975 make use of it. */
1976 streamer_tree_cache_insert_at (cache, t, i);
1977 }
1978 }
1979
1980 /* Finally compute the canonical type of all TREE_TYPEs and register
1981 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
1982 From this point there are no longer any types with
1983 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
1984 This step requires the TYPE_POINTER_TO lists being present, so
1985 make sure it is done last. */
1986 for (i = len; i-- > from;)
1987 {
1988 tree t = cache->nodes[i];
1989 if (t == NULL_TREE)
1990 continue;
1991
1992 if (TREE_CODE (t) == VAR_DECL)
1993 lto_register_var_decl_in_symtab (data_in, t);
1994 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
1995 lto_register_function_decl_in_symtab (data_in, t);
1996 else if (!flag_wpa
1997 && TREE_CODE (t) == TYPE_DECL)
1998 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1999 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2000 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2001 }
2002 }
2003
2004
2005 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2006 RESOLUTIONS is the set of symbols picked by the linker (read from the
2007 resolution file when the linker plugin is being used). */
2008
2009 static void
2010 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2011 vec<ld_plugin_symbol_resolution_t> resolutions)
2012 {
2013 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2014 const int decl_offset = sizeof (struct lto_decl_header);
2015 const int main_offset = decl_offset + header->decl_state_size;
2016 const int string_offset = main_offset + header->main_size;
2017 struct lto_input_block ib_main;
2018 struct data_in *data_in;
2019 unsigned int i;
2020 const uint32_t *data_ptr, *data_end;
2021 uint32_t num_decl_states;
2022
2023 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2024 header->main_size);
2025
2026 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2027 header->string_size, resolutions);
2028
2029 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2030 internal types that should not be merged. */
2031
2032 /* Read the global declarations and types. */
2033 while (ib_main.p < ib_main.len)
2034 {
2035 tree t;
2036 unsigned from = data_in->reader_cache->nodes.length ();
2037 t = stream_read_tree (&ib_main, data_in);
2038 gcc_assert (t && ib_main.p <= ib_main.len);
2039 uniquify_nodes (data_in, from);
2040 }
2041
2042 /* Read in lto_in_decl_state objects. */
2043 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2044 data_end =
2045 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2046 num_decl_states = *data_ptr++;
2047
2048 gcc_assert (num_decl_states > 0);
2049 decl_data->global_decl_state = lto_new_in_decl_state ();
2050 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2051 decl_data->global_decl_state);
2052
2053 /* Read in per-function decl states and enter them in hash table. */
2054 decl_data->function_decl_states =
2055 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2056
2057 for (i = 1; i < num_decl_states; i++)
2058 {
2059 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2060 void **slot;
2061
2062 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2063 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2064 gcc_assert (*slot == NULL);
2065 *slot = state;
2066 }
2067
2068 if (data_ptr != data_end)
2069 internal_error ("bytecode stream: garbage at the end of symbols section");
2070
2071 /* Set the current decl state to be the global state. */
2072 decl_data->current_decl_state = decl_data->global_decl_state;
2073
2074 lto_data_in_delete (data_in);
2075 }
2076
2077 /* Custom version of strtoll, which is not portable. */
2078
2079 static HOST_WIDEST_INT
2080 lto_parse_hex (const char *p)
2081 {
2082 HOST_WIDEST_INT ret = 0;
2083
2084 for (; *p != '\0'; ++p)
2085 {
2086 char c = *p;
2087 unsigned char part;
2088 ret <<= 4;
2089 if (c >= '0' && c <= '9')
2090 part = c - '0';
2091 else if (c >= 'a' && c <= 'f')
2092 part = c - 'a' + 10;
2093 else if (c >= 'A' && c <= 'F')
2094 part = c - 'A' + 10;
2095 else
2096 internal_error ("could not parse hex number");
2097 ret |= part;
2098 }
2099
2100 return ret;
2101 }
2102
2103 /* Read resolution for file named FILE_NAME. The resolution is read from
2104 RESOLUTION. */
2105
2106 static void
2107 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2108 {
2109 /* We require that objects in the resolution file are in the same
2110 order as the lto1 command line. */
2111 unsigned int name_len;
2112 char *obj_name;
2113 unsigned int num_symbols;
2114 unsigned int i;
2115 struct lto_file_decl_data *file_data;
2116 splay_tree_node nd = NULL;
2117
2118 if (!resolution)
2119 return;
2120
2121 name_len = strlen (file->filename);
2122 obj_name = XNEWVEC (char, name_len + 1);
2123 fscanf (resolution, " "); /* Read white space. */
2124
2125 fread (obj_name, sizeof (char), name_len, resolution);
2126 obj_name[name_len] = '\0';
2127 if (filename_cmp (obj_name, file->filename) != 0)
2128 internal_error ("unexpected file name %s in linker resolution file. "
2129 "Expected %s", obj_name, file->filename);
2130 if (file->offset != 0)
2131 {
2132 int t;
2133 char offset_p[17];
2134 HOST_WIDEST_INT offset;
2135 t = fscanf (resolution, "@0x%16s", offset_p);
2136 if (t != 1)
2137 internal_error ("could not parse file offset");
2138 offset = lto_parse_hex (offset_p);
2139 if (offset != file->offset)
2140 internal_error ("unexpected offset");
2141 }
2142
2143 free (obj_name);
2144
2145 fscanf (resolution, "%u", &num_symbols);
2146
2147 for (i = 0; i < num_symbols; i++)
2148 {
2149 int t;
2150 unsigned index;
2151 unsigned HOST_WIDE_INT id;
2152 char r_str[27];
2153 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2154 unsigned int j;
2155 unsigned int lto_resolution_str_len =
2156 sizeof (lto_resolution_str) / sizeof (char *);
2157 res_pair rp;
2158
2159 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2160 &index, &id, r_str);
2161 if (t != 3)
2162 internal_error ("invalid line in the resolution file");
2163
2164 for (j = 0; j < lto_resolution_str_len; j++)
2165 {
2166 if (strcmp (lto_resolution_str[j], r_str) == 0)
2167 {
2168 r = (enum ld_plugin_symbol_resolution) j;
2169 break;
2170 }
2171 }
2172 if (j == lto_resolution_str_len)
2173 internal_error ("invalid resolution in the resolution file");
2174
2175 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2176 {
2177 nd = lto_splay_tree_lookup (file_ids, id);
2178 if (nd == NULL)
2179 internal_error ("resolution sub id %wx not in object file", id);
2180 }
2181
2182 file_data = (struct lto_file_decl_data *)nd->value;
2183 /* The indexes are very sparse. To save memory save them in a compact
2184 format that is only unpacked later when the subfile is processed. */
2185 rp.res = r;
2186 rp.index = index;
2187 file_data->respairs.safe_push (rp);
2188 if (file_data->max_index < index)
2189 file_data->max_index = index;
2190 }
2191 }
2192
2193 /* List of file_decl_datas */
2194 struct file_data_list
2195 {
2196 struct lto_file_decl_data *first, *last;
2197 };
2198
2199 /* Is the name for a id'ed LTO section? */
2200
2201 static int
2202 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2203 {
2204 const char *s;
2205
2206 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2207 return 0;
2208 s = strrchr (name, '.');
2209 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2210 }
2211
2212 /* Create file_data of each sub file id */
2213
2214 static int
2215 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2216 struct file_data_list *list)
2217 {
2218 struct lto_section_slot s_slot, *new_slot;
2219 unsigned HOST_WIDE_INT id;
2220 splay_tree_node nd;
2221 void **hash_slot;
2222 char *new_name;
2223 struct lto_file_decl_data *file_data;
2224
2225 if (!lto_section_with_id (ls->name, &id))
2226 return 1;
2227
2228 /* Find hash table of sub module id */
2229 nd = lto_splay_tree_lookup (file_ids, id);
2230 if (nd != NULL)
2231 {
2232 file_data = (struct lto_file_decl_data *)nd->value;
2233 }
2234 else
2235 {
2236 file_data = ggc_alloc_lto_file_decl_data ();
2237 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2238 file_data->id = id;
2239 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2240 lto_splay_tree_insert (file_ids, id, file_data);
2241
2242 /* Maintain list in linker order */
2243 if (!list->first)
2244 list->first = file_data;
2245 if (list->last)
2246 list->last->next = file_data;
2247 list->last = file_data;
2248 }
2249
2250 /* Copy section into sub module hash table */
2251 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2252 s_slot.name = new_name;
2253 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2254 gcc_assert (*hash_slot == NULL);
2255
2256 new_slot = XDUP (struct lto_section_slot, ls);
2257 new_slot->name = new_name;
2258 *hash_slot = new_slot;
2259 return 1;
2260 }
2261
2262 /* Read declarations and other initializations for a FILE_DATA. */
2263
2264 static void
2265 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2266 {
2267 const char *data;
2268 size_t len;
2269 vec<ld_plugin_symbol_resolution_t>
2270 resolutions = vNULL;
2271 int i;
2272 res_pair *rp;
2273
2274 /* Create vector for fast access of resolution. We do this lazily
2275 to save memory. */
2276 resolutions.safe_grow_cleared (file_data->max_index + 1);
2277 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2278 resolutions[rp->index] = rp->res;
2279 file_data->respairs.release ();
2280
2281 file_data->renaming_hash_table = lto_create_renaming_table ();
2282 file_data->file_name = file->filename;
2283 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2284 if (data == NULL)
2285 {
2286 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2287 return;
2288 }
2289 /* Frees resolutions */
2290 lto_read_decls (file_data, data, resolutions);
2291 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2292 }
2293
2294 /* Finalize FILE_DATA in FILE and increase COUNT. */
2295
2296 static int
2297 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2298 int *count)
2299 {
2300 lto_file_finalize (file_data, file);
2301 if (cgraph_dump_file)
2302 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2303 file_data->file_name, file_data->id);
2304 (*count)++;
2305 return 0;
2306 }
2307
2308 /* Generate a TREE representation for all types and external decls
2309 entities in FILE.
2310
2311 Read all of the globals out of the file. Then read the cgraph
2312 and process the .o index into the cgraph nodes so that it can open
2313 the .o file to load the functions and ipa information. */
2314
2315 static struct lto_file_decl_data *
2316 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2317 {
2318 struct lto_file_decl_data *file_data = NULL;
2319 splay_tree file_ids;
2320 htab_t section_hash_table;
2321 struct lto_section_slot *section;
2322 struct file_data_list file_list;
2323 struct lto_section_list section_list;
2324
2325 memset (&section_list, 0, sizeof (struct lto_section_list));
2326 section_hash_table = lto_obj_build_section_table (file, &section_list);
2327
2328 /* Find all sub modules in the object and put their sections into new hash
2329 tables in a splay tree. */
2330 file_ids = lto_splay_tree_new ();
2331 memset (&file_list, 0, sizeof (struct file_data_list));
2332 for (section = section_list.first; section != NULL; section = section->next)
2333 create_subid_section_table (section, file_ids, &file_list);
2334
2335 /* Add resolutions to file ids */
2336 lto_resolution_read (file_ids, resolution_file, file);
2337
2338 /* Finalize each lto file for each submodule in the merged object */
2339 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2340 lto_create_files_from_ids (file, file_data, count);
2341
2342 splay_tree_delete (file_ids);
2343 htab_delete (section_hash_table);
2344
2345 return file_list.first;
2346 }
2347
2348 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2349 #define LTO_MMAP_IO 1
2350 #endif
2351
2352 #if LTO_MMAP_IO
2353 /* Page size of machine is used for mmap and munmap calls. */
2354 static size_t page_mask;
2355 #endif
2356
2357 /* Get the section data of length LEN from FILENAME starting at
2358 OFFSET. The data segment must be freed by the caller when the
2359 caller is finished. Returns NULL if all was not well. */
2360
2361 static char *
2362 lto_read_section_data (struct lto_file_decl_data *file_data,
2363 intptr_t offset, size_t len)
2364 {
2365 char *result;
2366 static int fd = -1;
2367 static char *fd_name;
2368 #if LTO_MMAP_IO
2369 intptr_t computed_len;
2370 intptr_t computed_offset;
2371 intptr_t diff;
2372 #endif
2373
2374 /* Keep a single-entry file-descriptor cache. The last file we
2375 touched will get closed at exit.
2376 ??? Eventually we want to add a more sophisticated larger cache
2377 or rather fix function body streaming to not stream them in
2378 practically random order. */
2379 if (fd != -1
2380 && filename_cmp (fd_name, file_data->file_name) != 0)
2381 {
2382 free (fd_name);
2383 close (fd);
2384 fd = -1;
2385 }
2386 if (fd == -1)
2387 {
2388 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2389 if (fd == -1)
2390 {
2391 fatal_error ("Cannot open %s", file_data->file_name);
2392 return NULL;
2393 }
2394 fd_name = xstrdup (file_data->file_name);
2395 }
2396
2397 #if LTO_MMAP_IO
2398 if (!page_mask)
2399 {
2400 size_t page_size = sysconf (_SC_PAGE_SIZE);
2401 page_mask = ~(page_size - 1);
2402 }
2403
2404 computed_offset = offset & page_mask;
2405 diff = offset - computed_offset;
2406 computed_len = len + diff;
2407
2408 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2409 fd, computed_offset);
2410 if (result == MAP_FAILED)
2411 {
2412 fatal_error ("Cannot map %s", file_data->file_name);
2413 return NULL;
2414 }
2415
2416 return result + diff;
2417 #else
2418 result = (char *) xmalloc (len);
2419 if (lseek (fd, offset, SEEK_SET) != offset
2420 || read (fd, result, len) != (ssize_t) len)
2421 {
2422 free (result);
2423 fatal_error ("Cannot read %s", file_data->file_name);
2424 result = NULL;
2425 }
2426 #ifdef __MINGW32__
2427 /* Native windows doesn't supports delayed unlink on opened file. So
2428 we close file here again. This produces higher I/O load, but at least
2429 it prevents to have dangling file handles preventing unlink. */
2430 free (fd_name);
2431 fd_name = NULL;
2432 close (fd);
2433 fd = -1;
2434 #endif
2435 return result;
2436 #endif
2437 }
2438
2439
2440 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2441 NAME will be NULL unless the section type is for a function
2442 body. */
2443
2444 static const char *
2445 get_section_data (struct lto_file_decl_data *file_data,
2446 enum lto_section_type section_type,
2447 const char *name,
2448 size_t *len)
2449 {
2450 htab_t section_hash_table = file_data->section_hash_table;
2451 struct lto_section_slot *f_slot;
2452 struct lto_section_slot s_slot;
2453 const char *section_name = lto_get_section_name (section_type, name, file_data);
2454 char *data = NULL;
2455
2456 *len = 0;
2457 s_slot.name = section_name;
2458 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2459 if (f_slot)
2460 {
2461 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2462 *len = f_slot->len;
2463 }
2464
2465 free (CONST_CAST (char *, section_name));
2466 return data;
2467 }
2468
2469
2470 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2471 starts at OFFSET and has LEN bytes. */
2472
2473 static void
2474 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2475 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2476 const char *name ATTRIBUTE_UNUSED,
2477 const char *offset, size_t len ATTRIBUTE_UNUSED)
2478 {
2479 #if LTO_MMAP_IO
2480 intptr_t computed_len;
2481 intptr_t computed_offset;
2482 intptr_t diff;
2483 #endif
2484
2485 #if LTO_MMAP_IO
2486 computed_offset = ((intptr_t) offset) & page_mask;
2487 diff = (intptr_t) offset - computed_offset;
2488 computed_len = len + diff;
2489
2490 munmap ((caddr_t) computed_offset, computed_len);
2491 #else
2492 free (CONST_CAST(char *, offset));
2493 #endif
2494 }
2495
2496 static lto_file *current_lto_file;
2497
2498 /* Helper for qsort; compare partitions and return one with smaller size.
2499 We sort from greatest to smallest so parallel build doesn't stale on the
2500 longest compilation being executed too late. */
2501
2502 static int
2503 cmp_partitions_size (const void *a, const void *b)
2504 {
2505 const struct ltrans_partition_def *pa
2506 = *(struct ltrans_partition_def *const *)a;
2507 const struct ltrans_partition_def *pb
2508 = *(struct ltrans_partition_def *const *)b;
2509 return pb->insns - pa->insns;
2510 }
2511
2512 /* Helper for qsort; compare partitions and return one with smaller order. */
2513
2514 static int
2515 cmp_partitions_order (const void *a, const void *b)
2516 {
2517 const struct ltrans_partition_def *pa
2518 = *(struct ltrans_partition_def *const *)a;
2519 const struct ltrans_partition_def *pb
2520 = *(struct ltrans_partition_def *const *)b;
2521 int ordera = -1, orderb = -1;
2522
2523 if (lto_symtab_encoder_size (pa->encoder))
2524 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2525 if (lto_symtab_encoder_size (pb->encoder))
2526 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2527 return orderb - ordera;
2528 }
2529
2530 /* Write all output files in WPA mode and the file with the list of
2531 LTRANS units. */
2532
2533 static void
2534 lto_wpa_write_files (void)
2535 {
2536 unsigned i, n_sets;
2537 lto_file *file;
2538 ltrans_partition part;
2539 FILE *ltrans_output_list_stream;
2540 char *temp_filename;
2541 size_t blen;
2542
2543 /* Open the LTRANS output list. */
2544 if (!ltrans_output_list)
2545 fatal_error ("no LTRANS output list filename provided");
2546 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2547 if (ltrans_output_list_stream == NULL)
2548 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2549
2550 timevar_push (TV_WHOPR_WPA);
2551
2552 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2553 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2554
2555 /* Find out statics that need to be promoted
2556 to globals with hidden visibility because they are accessed from multiple
2557 partitions. */
2558 lto_promote_cross_file_statics ();
2559
2560 timevar_pop (TV_WHOPR_WPA);
2561
2562 timevar_push (TV_WHOPR_WPA_IO);
2563
2564 /* Generate a prefix for the LTRANS unit files. */
2565 blen = strlen (ltrans_output_list);
2566 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2567 strcpy (temp_filename, ltrans_output_list);
2568 if (blen > sizeof (".out")
2569 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2570 ".out") == 0)
2571 temp_filename[blen - sizeof (".out") + 1] = '\0';
2572 blen = strlen (temp_filename);
2573
2574 n_sets = ltrans_partitions.length ();
2575
2576 /* Sort partitions by size so small ones are compiled last.
2577 FIXME: Even when not reordering we may want to output one list for parallel make
2578 and other for final link command. */
2579 ltrans_partitions.qsort (flag_toplevel_reorder
2580 ? cmp_partitions_size
2581 : cmp_partitions_order);
2582 for (i = 0; i < n_sets; i++)
2583 {
2584 size_t len;
2585 ltrans_partition part = ltrans_partitions[i];
2586
2587 /* Write all the nodes in SET. */
2588 sprintf (temp_filename + blen, "%u.o", i);
2589 file = lto_obj_file_open (temp_filename, true);
2590 if (!file)
2591 fatal_error ("lto_obj_file_open() failed");
2592
2593 if (!quiet_flag)
2594 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2595 if (cgraph_dump_file)
2596 {
2597 lto_symtab_encoder_iterator lsei;
2598
2599 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2600 part->name, temp_filename, part->insns);
2601 fprintf (cgraph_dump_file, " Symbols in partition: ");
2602 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2603 lsei_next_in_partition (&lsei))
2604 {
2605 symtab_node node = lsei_node (lsei);
2606 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2607 }
2608 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2609 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2610 lsei_next (&lsei))
2611 {
2612 symtab_node node = lsei_node (lsei);
2613 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2614 {
2615 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2616 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2617 if (cnode
2618 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2619 fprintf (cgraph_dump_file, "(body included)");
2620 else
2621 {
2622 varpool_node *vnode = dyn_cast <varpool_node> (node);
2623 if (vnode
2624 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2625 fprintf (cgraph_dump_file, "(initializer included)");
2626 }
2627 }
2628 }
2629 fprintf (cgraph_dump_file, "\n");
2630 }
2631 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2632
2633 lto_set_current_out_file (file);
2634
2635 ipa_write_optimization_summaries (part->encoder);
2636
2637 lto_set_current_out_file (NULL);
2638 lto_obj_file_close (file);
2639 free (file);
2640 part->encoder = NULL;
2641
2642 len = strlen (temp_filename);
2643 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2644 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2645 fatal_error ("writing to LTRANS output list %s: %m",
2646 ltrans_output_list);
2647 }
2648
2649 lto_stats.num_output_files += n_sets;
2650
2651 /* Close the LTRANS output list. */
2652 if (fclose (ltrans_output_list_stream))
2653 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2654
2655 free_ltrans_partitions();
2656 free (temp_filename);
2657
2658 timevar_pop (TV_WHOPR_WPA_IO);
2659 }
2660
2661
2662 /* If TT is a variable or function decl replace it with its
2663 prevailing variant. */
2664 #define LTO_SET_PREVAIL(tt) \
2665 do {\
2666 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2667 tt = lto_symtab_prevailing_decl (tt); \
2668 } while (0)
2669
2670 /* Ensure that TT isn't a replacable var of function decl. */
2671 #define LTO_NO_PREVAIL(tt) \
2672 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2673
2674 /* Given a tree T replace all fields referring to variables or functions
2675 with their prevailing variant. */
2676 static void
2677 lto_fixup_prevailing_decls (tree t)
2678 {
2679 enum tree_code code = TREE_CODE (t);
2680 LTO_NO_PREVAIL (TREE_TYPE (t));
2681 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2682 LTO_NO_PREVAIL (TREE_CHAIN (t));
2683 if (DECL_P (t))
2684 {
2685 LTO_NO_PREVAIL (DECL_NAME (t));
2686 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2687 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2688 {
2689 LTO_SET_PREVAIL (DECL_SIZE (t));
2690 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2691 LTO_SET_PREVAIL (DECL_INITIAL (t));
2692 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2693 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2694 }
2695 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2696 {
2697 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2698 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2699 }
2700 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2701 {
2702 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2703 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2704 LTO_NO_PREVAIL (DECL_VINDEX (t));
2705 }
2706 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2707 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2708 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2709 {
2710 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2711 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2712 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2713 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2714 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2715 }
2716 }
2717 else if (TYPE_P (t))
2718 {
2719 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2720 LTO_SET_PREVAIL (TYPE_SIZE (t));
2721 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2722 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2723 LTO_NO_PREVAIL (TYPE_NAME (t));
2724
2725 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2726 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2727 LTO_SET_PREVAIL (t->type_non_common.binfo);
2728
2729 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2730
2731 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2732 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2733 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2734 }
2735 else if (EXPR_P (t))
2736 {
2737 int i;
2738 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2739 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2740 }
2741 else
2742 {
2743 switch (code)
2744 {
2745 case TREE_LIST:
2746 LTO_SET_PREVAIL (TREE_VALUE (t));
2747 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2748 break;
2749 default:
2750 gcc_unreachable ();
2751 }
2752 }
2753 }
2754 #undef LTO_SET_PREVAIL
2755 #undef LTO_NO_PREVAIL
2756
2757 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2758 replaces var and function decls with the corresponding prevailing def. */
2759
2760 static void
2761 lto_fixup_state (struct lto_in_decl_state *state)
2762 {
2763 unsigned i, si;
2764 struct lto_tree_ref_table *table;
2765
2766 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2767 we still need to walk from all DECLs to find the reachable
2768 FUNCTION_DECLs and VAR_DECLs. */
2769 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2770 {
2771 table = &state->streams[si];
2772 for (i = 0; i < table->size; i++)
2773 {
2774 tree *tp = table->trees + i;
2775 if (VAR_OR_FUNCTION_DECL_P (*tp))
2776 *tp = lto_symtab_prevailing_decl (*tp);
2777 }
2778 }
2779 }
2780
2781 /* A callback of htab_traverse. Just extracts a state from SLOT
2782 and calls lto_fixup_state. */
2783
2784 static int
2785 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2786 {
2787 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2788 lto_fixup_state (state);
2789 return 1;
2790 }
2791
2792 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2793 prevailing one. */
2794
2795 static void
2796 lto_fixup_decls (struct lto_file_decl_data **files)
2797 {
2798 unsigned int i;
2799 htab_iterator hi;
2800 tree t;
2801
2802 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2803 lto_fixup_prevailing_decls (t);
2804
2805 for (i = 0; files[i]; i++)
2806 {
2807 struct lto_file_decl_data *file = files[i];
2808 struct lto_in_decl_state *state = file->global_decl_state;
2809 lto_fixup_state (state);
2810
2811 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2812 }
2813 }
2814
2815 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2816
2817 /* Turn file datas for sub files into a single array, so that they look
2818 like separate files for further passes. */
2819
2820 static void
2821 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2822 {
2823 struct lto_file_decl_data *n, *next;
2824 int i, k;
2825
2826 lto_stats.num_input_files = count;
2827 all_file_decl_data
2828 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2829 /* Set the hooks so that all of the ipa passes can read in their data. */
2830 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2831 for (i = 0, k = 0; i < last_file_ix; i++)
2832 {
2833 for (n = orig[i]; n != NULL; n = next)
2834 {
2835 all_file_decl_data[k++] = n;
2836 next = n->next;
2837 n->next = NULL;
2838 }
2839 }
2840 all_file_decl_data[k] = NULL;
2841 gcc_assert (k == count);
2842 }
2843
2844 /* Input file data before flattening (i.e. splitting them to subfiles to support
2845 incremental linking. */
2846 static int real_file_count;
2847 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2848
2849 static void print_lto_report_1 (void);
2850
2851 /* Read all the symbols from the input files FNAMES. NFILES is the
2852 number of files requested in the command line. Instantiate a
2853 global call graph by aggregating all the sub-graphs found in each
2854 file. */
2855
2856 static void
2857 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2858 {
2859 unsigned int i, last_file_ix;
2860 FILE *resolution;
2861 struct cgraph_node *node;
2862 int count = 0;
2863 struct lto_file_decl_data **decl_data;
2864
2865 init_cgraph ();
2866
2867 timevar_push (TV_IPA_LTO_DECL_IN);
2868
2869 real_file_decl_data
2870 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2871 real_file_count = nfiles;
2872
2873 /* Read the resolution file. */
2874 resolution = NULL;
2875 if (resolution_file_name)
2876 {
2877 int t;
2878 unsigned num_objects;
2879
2880 resolution = fopen (resolution_file_name, "r");
2881 if (resolution == NULL)
2882 fatal_error ("could not open symbol resolution file: %m");
2883
2884 t = fscanf (resolution, "%u", &num_objects);
2885 gcc_assert (t == 1);
2886
2887 /* True, since the plugin splits the archives. */
2888 gcc_assert (num_objects == nfiles);
2889 }
2890
2891 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2892 NULL);
2893 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2894 tree_int_map_eq, NULL);
2895 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2896 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2897 (GIMPLE_TYPE_LEADER_SIZE);
2898 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2899
2900 if (!quiet_flag)
2901 fprintf (stderr, "Reading object files:");
2902
2903 /* Read all of the object files specified on the command line. */
2904 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2905 {
2906 struct lto_file_decl_data *file_data = NULL;
2907 if (!quiet_flag)
2908 {
2909 fprintf (stderr, " %s", fnames[i]);
2910 fflush (stderr);
2911 }
2912
2913 current_lto_file = lto_obj_file_open (fnames[i], false);
2914 if (!current_lto_file)
2915 break;
2916
2917 file_data = lto_file_read (current_lto_file, resolution, &count);
2918 if (!file_data)
2919 {
2920 lto_obj_file_close (current_lto_file);
2921 free (current_lto_file);
2922 current_lto_file = NULL;
2923 break;
2924 }
2925
2926 decl_data[last_file_ix++] = file_data;
2927
2928 lto_obj_file_close (current_lto_file);
2929 free (current_lto_file);
2930 current_lto_file = NULL;
2931 ggc_collect ();
2932 }
2933
2934 lto_flatten_files (decl_data, count, last_file_ix);
2935 lto_stats.num_input_files = count;
2936 ggc_free(decl_data);
2937 real_file_decl_data = NULL;
2938
2939 if (resolution_file_name)
2940 fclose (resolution);
2941
2942 /* Show the LTO report before launching LTRANS. */
2943 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2944 print_lto_report_1 ();
2945
2946 /* Free gimple type merging datastructures. */
2947 htab_delete (gimple_types);
2948 gimple_types = NULL;
2949 htab_delete (type_hash_cache);
2950 type_hash_cache = NULL;
2951 free (type_pair_cache);
2952 type_pair_cache = NULL;
2953 gimple_type_leader = NULL;
2954 free_gimple_type_tables ();
2955 ggc_collect ();
2956
2957 /* Set the hooks so that all of the ipa passes can read in their data. */
2958 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2959
2960 timevar_pop (TV_IPA_LTO_DECL_IN);
2961
2962 if (!quiet_flag)
2963 fprintf (stderr, "\nReading the callgraph\n");
2964
2965 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2966 /* Read the symtab. */
2967 input_symtab ();
2968
2969 /* Store resolutions into the symbol table. */
2970 if (resolution_map)
2971 {
2972 void **res;
2973 symtab_node snode;
2974
2975 FOR_EACH_SYMBOL (snode)
2976 if (symtab_real_symbol_p (snode)
2977 && (res = pointer_map_contains (resolution_map,
2978 snode->symbol.decl)))
2979 snode->symbol.resolution
2980 = (enum ld_plugin_symbol_resolution)(size_t)*res;
2981
2982 pointer_map_destroy (resolution_map);
2983 resolution_map = NULL;
2984 }
2985
2986 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2987
2988 if (!quiet_flag)
2989 fprintf (stderr, "Merging declarations\n");
2990
2991 timevar_push (TV_IPA_LTO_DECL_MERGE);
2992 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2993 need to care about resolving symbols again, we only need to replace
2994 duplicated declarations read from the callgraph and from function
2995 sections. */
2996 if (!flag_ltrans)
2997 {
2998 lto_symtab_merge_decls ();
2999
3000 /* If there were errors during symbol merging bail out, we have no
3001 good way to recover here. */
3002 if (seen_error ())
3003 fatal_error ("errors during merging of translation units");
3004
3005 /* Fixup all decls. */
3006 lto_fixup_decls (all_file_decl_data);
3007 }
3008 htab_delete (tree_with_vars);
3009 tree_with_vars = NULL;
3010 ggc_collect ();
3011
3012 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3013 /* Each pass will set the appropriate timer. */
3014
3015 if (!quiet_flag)
3016 fprintf (stderr, "Reading summaries\n");
3017
3018 /* Read the IPA summary data. */
3019 if (flag_ltrans)
3020 ipa_read_optimization_summaries ();
3021 else
3022 ipa_read_summaries ();
3023
3024 for (i = 0; all_file_decl_data[i]; i++)
3025 {
3026 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3027 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3028 all_file_decl_data[i]->symtab_node_encoder = NULL;
3029 }
3030
3031 /* Finally merge the cgraph according to the decl merging decisions. */
3032 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3033 if (cgraph_dump_file)
3034 {
3035 fprintf (cgraph_dump_file, "Before merging:\n");
3036 dump_cgraph (cgraph_dump_file);
3037 dump_varpool (cgraph_dump_file);
3038 }
3039 lto_symtab_merge_cgraph_nodes ();
3040 ggc_collect ();
3041
3042 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3043 summaries computed and needs to apply changes. At the moment WHOPR only
3044 supports inlining, so we can push it here by hand. In future we need to stream
3045 this field into ltrans compilation. */
3046 if (flag_ltrans)
3047 FOR_EACH_DEFINED_FUNCTION (node)
3048 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass)&pass_ipa_inline);
3049
3050 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3051
3052 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3053
3054 /* Indicate that the cgraph is built and ready. */
3055 cgraph_function_flags_ready = true;
3056
3057 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3058 ggc_free (all_file_decl_data);
3059 all_file_decl_data = NULL;
3060 }
3061
3062
3063 /* Materialize all the bodies for all the nodes in the callgraph. */
3064
3065 static void
3066 materialize_cgraph (void)
3067 {
3068 tree decl;
3069 struct cgraph_node *node;
3070 unsigned i;
3071 timevar_id_t lto_timer;
3072
3073 if (!quiet_flag)
3074 fprintf (stderr,
3075 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3076
3077 /* Now that we have input the cgraph, we need to clear all of the aux
3078 nodes and read the functions if we are not running in WPA mode. */
3079 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3080
3081 FOR_EACH_FUNCTION (node)
3082 {
3083 if (node->symbol.lto_file_data)
3084 {
3085 lto_materialize_function (node);
3086 lto_stats.num_input_cgraph_nodes++;
3087 }
3088 }
3089
3090 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3091
3092 /* Start the appropriate timer depending on the mode that we are
3093 operating in. */
3094 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3095 : (flag_ltrans) ? TV_WHOPR_LTRANS
3096 : TV_LTO;
3097 timevar_push (lto_timer);
3098
3099 current_function_decl = NULL;
3100 set_cfun (NULL);
3101
3102 /* Inform the middle end about the global variables we have seen. */
3103 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
3104 rest_of_decl_compilation (decl, 1, 0);
3105
3106 if (!quiet_flag)
3107 fprintf (stderr, "\n");
3108
3109 timevar_pop (lto_timer);
3110 }
3111
3112
3113 /* Show various memory usage statistics related to LTO. */
3114 static void
3115 print_lto_report_1 (void)
3116 {
3117 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3118 fprintf (stderr, "%s statistics\n", pfx);
3119
3120 if (gimple_types)
3121 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3122 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3123 (long) htab_size (gimple_types),
3124 (long) htab_elements (gimple_types),
3125 (long) gimple_types->searches,
3126 (long) gimple_types->collisions,
3127 htab_collisions (gimple_types));
3128 else
3129 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3130 if (type_hash_cache)
3131 fprintf (stderr, "[%s] GIMPLE type hash cache table: size %ld, %ld elements, "
3132 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3133 (long) htab_size (type_hash_cache),
3134 (long) htab_elements (type_hash_cache),
3135 (long) type_hash_cache->searches,
3136 (long) type_hash_cache->collisions,
3137 htab_collisions (type_hash_cache));
3138 else
3139 fprintf (stderr, "[%s] GIMPLE type hash cache table is empty\n", pfx);
3140
3141 print_gimple_types_stats (pfx);
3142 print_lto_report (pfx);
3143 }
3144
3145 /* Perform whole program analysis (WPA) on the callgraph and write out the
3146 optimization plan. */
3147
3148 static void
3149 do_whole_program_analysis (void)
3150 {
3151 symtab_node node;
3152
3153 timevar_start (TV_PHASE_OPT_GEN);
3154
3155 /* Note that since we are in WPA mode, materialize_cgraph will not
3156 actually read in all the function bodies. It only materializes
3157 the decls and cgraph nodes so that analysis can be performed. */
3158 materialize_cgraph ();
3159
3160 /* Reading in the cgraph uses different timers, start timing WPA now. */
3161 timevar_push (TV_WHOPR_WPA);
3162
3163 if (pre_ipa_mem_report)
3164 {
3165 fprintf (stderr, "Memory consumption before IPA\n");
3166 dump_memory_report (false);
3167 }
3168
3169 cgraph_function_flags_ready = true;
3170
3171 if (cgraph_dump_file)
3172 {
3173 dump_cgraph (cgraph_dump_file);
3174 dump_varpool (cgraph_dump_file);
3175 }
3176 bitmap_obstack_initialize (NULL);
3177 cgraph_state = CGRAPH_STATE_IPA_SSA;
3178
3179 execute_ipa_pass_list (all_regular_ipa_passes);
3180 symtab_remove_unreachable_nodes (false, dump_file);
3181
3182 if (cgraph_dump_file)
3183 {
3184 fprintf (cgraph_dump_file, "Optimized ");
3185 dump_cgraph (cgraph_dump_file);
3186 dump_varpool (cgraph_dump_file);
3187 }
3188 #ifdef ENABLE_CHECKING
3189 verify_cgraph ();
3190 #endif
3191 bitmap_obstack_release (NULL);
3192
3193 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3194 timevar_pop (TV_WHOPR_WPA);
3195
3196 timevar_push (TV_WHOPR_PARTITIONING);
3197 if (flag_lto_partition_1to1)
3198 lto_1_to_1_map ();
3199 else if (flag_lto_partition_max)
3200 lto_max_map ();
3201 else
3202 lto_balanced_map ();
3203
3204 /* AUX pointers are used by partitioning code to bookkeep number of
3205 partitions symbol is in. This is no longer needed. */
3206 FOR_EACH_SYMBOL (node)
3207 node->symbol.aux = NULL;
3208
3209 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3210 timevar_pop (TV_WHOPR_PARTITIONING);
3211
3212 timevar_stop (TV_PHASE_OPT_GEN);
3213 timevar_start (TV_PHASE_STREAM_OUT);
3214
3215 if (!quiet_flag)
3216 {
3217 fprintf (stderr, "\nStreaming out");
3218 fflush (stderr);
3219 }
3220 lto_wpa_write_files ();
3221 if (!quiet_flag)
3222 fprintf (stderr, "\n");
3223
3224 timevar_stop (TV_PHASE_STREAM_OUT);
3225
3226 ggc_collect ();
3227 if (post_ipa_mem_report)
3228 {
3229 fprintf (stderr, "Memory consumption after IPA\n");
3230 dump_memory_report (false);
3231 }
3232
3233 /* Show the LTO report before launching LTRANS. */
3234 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3235 print_lto_report_1 ();
3236 if (mem_report_wpa)
3237 dump_memory_report (true);
3238 }
3239
3240
3241 static GTY(()) tree lto_eh_personality_decl;
3242
3243 /* Return the LTO personality function decl. */
3244
3245 tree
3246 lto_eh_personality (void)
3247 {
3248 if (!lto_eh_personality_decl)
3249 {
3250 /* Use the first personality DECL for our personality if we don't
3251 support multiple ones. This ensures that we don't artificially
3252 create the need for them in a single-language program. */
3253 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3254 lto_eh_personality_decl = first_personality_decl;
3255 else
3256 lto_eh_personality_decl = lhd_gcc_personality ();
3257 }
3258
3259 return lto_eh_personality_decl;
3260 }
3261
3262 /* Set the process name based on the LTO mode. */
3263
3264 static void
3265 lto_process_name (void)
3266 {
3267 if (flag_lto)
3268 setproctitle ("lto1-lto");
3269 if (flag_wpa)
3270 setproctitle ("lto1-wpa");
3271 if (flag_ltrans)
3272 setproctitle ("lto1-ltrans");
3273 }
3274
3275
3276 /* Initialize the LTO front end. */
3277
3278 static void
3279 lto_init (void)
3280 {
3281 lto_process_name ();
3282 lto_streamer_hooks_init ();
3283 lto_reader_init ();
3284 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3285 memset (&lto_stats, 0, sizeof (lto_stats));
3286 bitmap_obstack_initialize (NULL);
3287 gimple_register_cfg_hooks ();
3288 }
3289
3290
3291 /* Main entry point for the GIMPLE front end. This front end has
3292 three main personalities:
3293
3294 - LTO (-flto). All the object files on the command line are
3295 loaded in memory and processed as a single translation unit.
3296 This is the traditional link-time optimization behavior.
3297
3298 - WPA (-fwpa). Only the callgraph and summary information for
3299 files in the command file are loaded. A single callgraph
3300 (without function bodies) is instantiated for the whole set of
3301 files. IPA passes are only allowed to analyze the call graph
3302 and make transformation decisions. The callgraph is
3303 partitioned, each partition is written to a new object file
3304 together with the transformation decisions.
3305
3306 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3307 summary files from running again. Since WPA computed summary
3308 information and decided what transformations to apply, LTRANS
3309 simply applies them. */
3310
3311 void
3312 lto_main (void)
3313 {
3314 /* LTO is called as a front end, even though it is not a front end.
3315 Because it is called as a front end, TV_PHASE_PARSING and
3316 TV_PARSE_GLOBAL are active, and we need to turn them off while
3317 doing LTO. Later we turn them back on so they are active up in
3318 toplev.c. */
3319 timevar_pop (TV_PARSE_GLOBAL);
3320 timevar_stop (TV_PHASE_PARSING);
3321
3322 timevar_start (TV_PHASE_SETUP);
3323
3324 /* Initialize the LTO front end. */
3325 lto_init ();
3326
3327 timevar_stop (TV_PHASE_SETUP);
3328 timevar_start (TV_PHASE_STREAM_IN);
3329
3330 /* Read all the symbols and call graph from all the files in the
3331 command line. */
3332 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3333
3334 timevar_stop (TV_PHASE_STREAM_IN);
3335
3336 if (!seen_error ())
3337 {
3338 /* If WPA is enabled analyze the whole call graph and create an
3339 optimization plan. Otherwise, read in all the function
3340 bodies and continue with optimization. */
3341 if (flag_wpa)
3342 do_whole_program_analysis ();
3343 else
3344 {
3345 struct varpool_node *vnode;
3346
3347 timevar_start (TV_PHASE_OPT_GEN);
3348
3349 materialize_cgraph ();
3350 if (!flag_ltrans)
3351 lto_promote_statics_nonwpa ();
3352
3353 /* Let the middle end know that we have read and merged all of
3354 the input files. */
3355 compile ();
3356
3357 timevar_stop (TV_PHASE_OPT_GEN);
3358
3359 /* FIXME lto, if the processes spawned by WPA fail, we miss
3360 the chance to print WPA's report, so WPA will call
3361 print_lto_report before launching LTRANS. If LTRANS was
3362 launched directly by the driver we would not need to do
3363 this. */
3364 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3365 print_lto_report_1 ();
3366
3367 /* Record the global variables. */
3368 FOR_EACH_DEFINED_VARIABLE (vnode)
3369 vec_safe_push (lto_global_var_decls, vnode->symbol.decl);
3370 }
3371 }
3372
3373 /* Here we make LTO pretend to be a parser. */
3374 timevar_start (TV_PHASE_PARSING);
3375 timevar_push (TV_PARSE_GLOBAL);
3376 }
3377
3378 #include "gt-lto-lto.h"