lto-cgraph.c (lto_symtab_encoder_new): New parameter FOR_INPUT.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48
49 static GTY(()) tree first_personality_decl;
50
51 /* Returns a hash code for P. */
52
53 static hashval_t
54 hash_name (const void *p)
55 {
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
58 }
59
60
61 /* Returns nonzero if P1 and P2 are equal. */
62
63 static int
64 eq_name (const void *p1, const void *p2)
65 {
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
70
71 return strcmp (s1->name, s2->name) == 0;
72 }
73
74 /* Free lto_section_slot */
75
76 static void
77 free_with_string (void *arg)
78 {
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
80
81 free (CONST_CAST (char *, s->name));
82 free (arg);
83 }
84
85 /* Create section hash table */
86
87 htab_t
88 lto_obj_create_section_hash_table (void)
89 {
90 return htab_create (37, hash_name, eq_name, free_with_string);
91 }
92
93 /* Delete an allocated integer KEY in the splay tree. */
94
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
97 {
98 free ((void *) key);
99 }
100
101 /* Compare splay tree node ids A and B. */
102
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
105 {
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
108
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
111
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
117 }
118
119 /* Look up splay tree node by ID in splay tree T. */
120
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
123 {
124 return splay_tree_lookup (t, (splay_tree_key) &id);
125 }
126
127 /* Check if KEY has ID. */
128
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
131 {
132 return *(unsigned HOST_WIDE_INT *) key == id;
133 }
134
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
138
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
142 {
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
146 }
147
148 /* Create a splay tree. */
149
150 static splay_tree
151 lto_splay_tree_new (void)
152 {
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
156 }
157
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
160
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
163 {
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
168 {
169 if (node->analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
176 {
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
181 }
182 }
183 return false;
184 }
185
186 /* Read the function body for the function associated with NODE. */
187
188 static void
189 lto_materialize_function (struct cgraph_node *node)
190 {
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
195
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
200 {
201 /* Clones don't need to be read. */
202 if (node->clone_of)
203 return;
204
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
207 if (!flag_wpa)
208 {
209 file_data = node->symbol.lto_file_data;
210 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
211
212 /* We may have renamed the declaration, e.g., a static function. */
213 name = lto_get_decl_name_mapping (file_data, name);
214
215 data = lto_get_section_data (file_data, LTO_section_function_body,
216 name, &len);
217 if (!data)
218 fatal_error ("%s: section %s is missing",
219 file_data->file_name,
220 name);
221
222 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
223
224 push_struct_function (decl);
225 announce_function (decl);
226 lto_input_function_body (file_data, decl, data);
227 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
228 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
229 lto_stats.num_function_bodies++;
230 lto_free_section_data (file_data, LTO_section_function_body, name,
231 data, len);
232 pop_cfun ();
233 ggc_collect ();
234 }
235 }
236
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl, 1, 0);
239 }
240
241
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
245 input. */
246
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
249 struct lto_in_decl_state *state)
250 {
251 uint32_t ix;
252 tree decl;
253 uint32_t i, j;
254
255 ix = *data++;
256 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
257 if (TREE_CODE (decl) != FUNCTION_DECL)
258 {
259 gcc_assert (decl == void_type_node);
260 decl = NULL_TREE;
261 }
262 state->fn_decl = decl;
263
264 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
265 {
266 uint32_t size = *data++;
267 tree *decls = ggc_alloc_vec_tree (size);
268
269 for (j = 0; j < size; j++)
270 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
271
272 state->streams[i].size = size;
273 state->streams[i].trees = decls;
274 data += size;
275 }
276
277 return data;
278 }
279
280
281
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
287 htab_t gimple_types;
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
289 htab_t type_hash_cache;
290
291 static hashval_t gimple_type_hash (const void *);
292
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
296
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
300
301 struct type_pair_d
302 {
303 unsigned int uid1;
304 unsigned int uid2;
305 signed char same_p;
306 };
307 typedef struct type_pair_d *type_pair_t;
308 DEF_VEC_P(type_pair_t);
309 DEF_VEC_ALLOC_P(type_pair_t,heap);
310
311 #define GIMPLE_TYPE_PAIR_SIZE 16381
312 struct type_pair_d *type_pair_cache;
313
314
315 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
316 entry if none existed. */
317
318 static inline type_pair_t
319 lookup_type_pair (tree t1, tree t2)
320 {
321 unsigned int index;
322 unsigned int uid1, uid2;
323
324 if (TYPE_UID (t1) < TYPE_UID (t2))
325 {
326 uid1 = TYPE_UID (t1);
327 uid2 = TYPE_UID (t2);
328 }
329 else
330 {
331 uid1 = TYPE_UID (t2);
332 uid2 = TYPE_UID (t1);
333 }
334 gcc_checking_assert (uid1 != uid2);
335
336 /* iterative_hash_hashval_t imply an function calls.
337 We know that UIDS are in limited range. */
338 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
339 % GIMPLE_TYPE_PAIR_SIZE);
340 if (type_pair_cache [index].uid1 == uid1
341 && type_pair_cache [index].uid2 == uid2)
342 return &type_pair_cache[index];
343
344 type_pair_cache [index].uid1 = uid1;
345 type_pair_cache [index].uid2 = uid2;
346 type_pair_cache [index].same_p = -2;
347
348 return &type_pair_cache[index];
349 }
350
351 /* Per pointer state for the SCC finding. The on_sccstack flag
352 is not strictly required, it is true when there is no hash value
353 recorded for the type and false otherwise. But querying that
354 is slower. */
355
356 struct sccs
357 {
358 unsigned int dfsnum;
359 unsigned int low;
360 bool on_sccstack;
361 union {
362 hashval_t hash;
363 signed char same_p;
364 } u;
365 };
366
367 static unsigned int next_dfs_num;
368 static unsigned int gtc_next_dfs_num;
369
370 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
371
372 typedef struct GTY(()) gimple_type_leader_entry_s {
373 tree type;
374 tree leader;
375 } gimple_type_leader_entry;
376
377 #define GIMPLE_TYPE_LEADER_SIZE 16381
378 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
379 gimple_type_leader_entry *gimple_type_leader;
380
381 /* Lookup an existing leader for T and return it or NULL_TREE, if
382 there is none in the cache. */
383
384 static inline tree
385 gimple_lookup_type_leader (tree t)
386 {
387 gimple_type_leader_entry *leader;
388
389 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
390 if (leader->type != t)
391 return NULL_TREE;
392
393 return leader->leader;
394 }
395
396
397 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
398 true then if any type has no name return false, otherwise return
399 true if both types have no names. */
400
401 static bool
402 compare_type_names_p (tree t1, tree t2)
403 {
404 tree name1 = TYPE_NAME (t1);
405 tree name2 = TYPE_NAME (t2);
406
407 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
408 return false;
409
410 if (name1 == NULL_TREE)
411 return true;
412
413 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
414 if (TREE_CODE (name1) != TREE_CODE (name2))
415 return false;
416
417 if (TREE_CODE (name1) == TYPE_DECL)
418 name1 = DECL_NAME (name1);
419 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
420
421 if (TREE_CODE (name2) == TYPE_DECL)
422 name2 = DECL_NAME (name2);
423 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
424
425 /* Identifiers can be compared with pointer equality rather
426 than a string comparison. */
427 if (name1 == name2)
428 return true;
429
430 return false;
431 }
432
433 static bool
434 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
435 VEC(type_pair_t, heap) **,
436 struct pointer_map_t *, struct obstack *);
437
438 /* DFS visit the edge from the callers type pair with state *STATE to
439 the pair T1, T2 while operating in FOR_MERGING_P mode.
440 Update the merging status if it is not part of the SCC containing the
441 callers pair and return it.
442 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
443
444 static bool
445 gtc_visit (tree t1, tree t2,
446 struct sccs *state,
447 VEC(type_pair_t, heap) **sccstack,
448 struct pointer_map_t *sccstate,
449 struct obstack *sccstate_obstack)
450 {
451 struct sccs *cstate = NULL;
452 type_pair_t p;
453 void **slot;
454 tree leader1, leader2;
455
456 /* Check first for the obvious case of pointer identity. */
457 if (t1 == t2)
458 return true;
459
460 /* Check that we have two types to compare. */
461 if (t1 == NULL_TREE || t2 == NULL_TREE)
462 return false;
463
464 /* Can't be the same type if the types don't have the same code. */
465 if (TREE_CODE (t1) != TREE_CODE (t2))
466 return false;
467
468 /* Can't be the same type if they have different CV qualifiers. */
469 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
470 return false;
471
472 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
473 return false;
474
475 /* Void types and nullptr types are always the same. */
476 if (TREE_CODE (t1) == VOID_TYPE
477 || TREE_CODE (t1) == NULLPTR_TYPE)
478 return true;
479
480 /* Can't be the same type if they have different alignment or mode. */
481 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
482 || TYPE_MODE (t1) != TYPE_MODE (t2))
483 return false;
484
485 /* Do some simple checks before doing three hashtable queries. */
486 if (INTEGRAL_TYPE_P (t1)
487 || SCALAR_FLOAT_TYPE_P (t1)
488 || FIXED_POINT_TYPE_P (t1)
489 || TREE_CODE (t1) == VECTOR_TYPE
490 || TREE_CODE (t1) == COMPLEX_TYPE
491 || TREE_CODE (t1) == OFFSET_TYPE
492 || POINTER_TYPE_P (t1))
493 {
494 /* Can't be the same type if they have different sign or precision. */
495 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
496 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
497 return false;
498
499 if (TREE_CODE (t1) == INTEGER_TYPE
500 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
501 return false;
502
503 /* That's all we need to check for float and fixed-point types. */
504 if (SCALAR_FLOAT_TYPE_P (t1)
505 || FIXED_POINT_TYPE_P (t1))
506 return true;
507
508 /* For other types fall through to more complex checks. */
509 }
510
511 /* If the types have been previously registered and found equal
512 they still are. */
513 leader1 = gimple_lookup_type_leader (t1);
514 leader2 = gimple_lookup_type_leader (t2);
515 if (leader1 == t2
516 || t1 == leader2
517 || (leader1 && leader1 == leader2))
518 return true;
519
520 /* If the hash values of t1 and t2 are different the types can't
521 possibly be the same. This helps keeping the type-pair hashtable
522 small, only tracking comparisons for hash collisions. */
523 if (gimple_type_hash (t1) != gimple_type_hash (t2))
524 return false;
525
526 /* Allocate a new cache entry for this comparison. */
527 p = lookup_type_pair (t1, t2);
528 if (p->same_p == 0 || p->same_p == 1)
529 {
530 /* We have already decided whether T1 and T2 are the
531 same, return the cached result. */
532 return p->same_p == 1;
533 }
534
535 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
536 cstate = (struct sccs *)*slot;
537 /* Not yet visited. DFS recurse. */
538 if (!cstate)
539 {
540 gimple_types_compatible_p_1 (t1, t2, p,
541 sccstack, sccstate, sccstate_obstack);
542 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
543 state->low = MIN (state->low, cstate->low);
544 }
545 /* If the type is still on the SCC stack adjust the parents low. */
546 if (cstate->dfsnum < state->dfsnum
547 && cstate->on_sccstack)
548 state->low = MIN (cstate->dfsnum, state->low);
549
550 /* Return the current lattice value. We start with an equality
551 assumption so types part of a SCC will be optimistically
552 treated equal unless proven otherwise. */
553 return cstate->u.same_p;
554 }
555
556 /* Worker for gimple_types_compatible.
557 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
558
559 static bool
560 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
561 VEC(type_pair_t, heap) **sccstack,
562 struct pointer_map_t *sccstate,
563 struct obstack *sccstate_obstack)
564 {
565 struct sccs *state;
566
567 gcc_assert (p->same_p == -2);
568
569 state = XOBNEW (sccstate_obstack, struct sccs);
570 *pointer_map_insert (sccstate, p) = state;
571
572 VEC_safe_push (type_pair_t, heap, *sccstack, p);
573 state->dfsnum = gtc_next_dfs_num++;
574 state->low = state->dfsnum;
575 state->on_sccstack = true;
576 /* Start with an equality assumption. As we DFS recurse into child
577 SCCs this assumption may get revisited. */
578 state->u.same_p = 1;
579
580 /* The struct tags shall compare equal. */
581 if (!compare_type_names_p (t1, t2))
582 goto different_types;
583
584 /* We may not merge typedef types to the same type in different
585 contexts. */
586 if (TYPE_NAME (t1)
587 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
588 && DECL_CONTEXT (TYPE_NAME (t1))
589 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
590 {
591 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
592 DECL_CONTEXT (TYPE_NAME (t2)),
593 state, sccstack, sccstate, sccstate_obstack))
594 goto different_types;
595 }
596
597 /* If their attributes are not the same they can't be the same type. */
598 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
599 goto different_types;
600
601 /* Do type-specific comparisons. */
602 switch (TREE_CODE (t1))
603 {
604 case VECTOR_TYPE:
605 case COMPLEX_TYPE:
606 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
607 state, sccstack, sccstate, sccstate_obstack))
608 goto different_types;
609 goto same_types;
610
611 case ARRAY_TYPE:
612 /* Array types are the same if the element types are the same and
613 the number of elements are the same. */
614 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
615 state, sccstack, sccstate, sccstate_obstack)
616 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
617 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
618 goto different_types;
619 else
620 {
621 tree i1 = TYPE_DOMAIN (t1);
622 tree i2 = TYPE_DOMAIN (t2);
623
624 /* For an incomplete external array, the type domain can be
625 NULL_TREE. Check this condition also. */
626 if (i1 == NULL_TREE && i2 == NULL_TREE)
627 goto same_types;
628 else if (i1 == NULL_TREE || i2 == NULL_TREE)
629 goto different_types;
630 else
631 {
632 tree min1 = TYPE_MIN_VALUE (i1);
633 tree min2 = TYPE_MIN_VALUE (i2);
634 tree max1 = TYPE_MAX_VALUE (i1);
635 tree max2 = TYPE_MAX_VALUE (i2);
636
637 /* The minimum/maximum values have to be the same. */
638 if ((min1 == min2
639 || (min1 && min2
640 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
641 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
642 || operand_equal_p (min1, min2, 0))))
643 && (max1 == max2
644 || (max1 && max2
645 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
646 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
647 || operand_equal_p (max1, max2, 0)))))
648 goto same_types;
649 else
650 goto different_types;
651 }
652 }
653
654 case METHOD_TYPE:
655 /* Method types should belong to the same class. */
656 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
657 state, sccstack, sccstate, sccstate_obstack))
658 goto different_types;
659
660 /* Fallthru */
661
662 case FUNCTION_TYPE:
663 /* Function types are the same if the return type and arguments types
664 are the same. */
665 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
666 state, sccstack, sccstate, sccstate_obstack))
667 goto different_types;
668
669 if (!comp_type_attributes (t1, t2))
670 goto different_types;
671
672 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
673 goto same_types;
674 else
675 {
676 tree parms1, parms2;
677
678 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
679 parms1 && parms2;
680 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
681 {
682 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
683 state, sccstack, sccstate, sccstate_obstack))
684 goto different_types;
685 }
686
687 if (parms1 || parms2)
688 goto different_types;
689
690 goto same_types;
691 }
692
693 case OFFSET_TYPE:
694 {
695 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
696 state, sccstack, sccstate, sccstate_obstack)
697 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
698 TYPE_OFFSET_BASETYPE (t2),
699 state, sccstack, sccstate, sccstate_obstack))
700 goto different_types;
701
702 goto same_types;
703 }
704
705 case POINTER_TYPE:
706 case REFERENCE_TYPE:
707 {
708 /* If the two pointers have different ref-all attributes,
709 they can't be the same type. */
710 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
711 goto different_types;
712
713 /* Otherwise, pointer and reference types are the same if the
714 pointed-to types are the same. */
715 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
716 state, sccstack, sccstate, sccstate_obstack))
717 goto same_types;
718
719 goto different_types;
720 }
721
722 case INTEGER_TYPE:
723 case BOOLEAN_TYPE:
724 {
725 tree min1 = TYPE_MIN_VALUE (t1);
726 tree max1 = TYPE_MAX_VALUE (t1);
727 tree min2 = TYPE_MIN_VALUE (t2);
728 tree max2 = TYPE_MAX_VALUE (t2);
729 bool min_equal_p = false;
730 bool max_equal_p = false;
731
732 /* If either type has a minimum value, the other type must
733 have the same. */
734 if (min1 == NULL_TREE && min2 == NULL_TREE)
735 min_equal_p = true;
736 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
737 min_equal_p = true;
738
739 /* Likewise, if either type has a maximum value, the other
740 type must have the same. */
741 if (max1 == NULL_TREE && max2 == NULL_TREE)
742 max_equal_p = true;
743 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
744 max_equal_p = true;
745
746 if (!min_equal_p || !max_equal_p)
747 goto different_types;
748
749 goto same_types;
750 }
751
752 case ENUMERAL_TYPE:
753 {
754 /* FIXME lto, we cannot check bounds on enumeral types because
755 different front ends will produce different values.
756 In C, enumeral types are integers, while in C++ each element
757 will have its own symbolic value. We should decide how enums
758 are to be represented in GIMPLE and have each front end lower
759 to that. */
760 tree v1, v2;
761
762 /* For enumeral types, all the values must be the same. */
763 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
764 goto same_types;
765
766 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
767 v1 && v2;
768 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
769 {
770 tree c1 = TREE_VALUE (v1);
771 tree c2 = TREE_VALUE (v2);
772
773 if (TREE_CODE (c1) == CONST_DECL)
774 c1 = DECL_INITIAL (c1);
775
776 if (TREE_CODE (c2) == CONST_DECL)
777 c2 = DECL_INITIAL (c2);
778
779 if (tree_int_cst_equal (c1, c2) != 1)
780 goto different_types;
781
782 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
783 goto different_types;
784 }
785
786 /* If one enumeration has more values than the other, they
787 are not the same. */
788 if (v1 || v2)
789 goto different_types;
790
791 goto same_types;
792 }
793
794 case RECORD_TYPE:
795 case UNION_TYPE:
796 case QUAL_UNION_TYPE:
797 {
798 tree f1, f2;
799
800 /* For aggregate types, all the fields must be the same. */
801 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
802 f1 && f2;
803 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
804 {
805 /* Different field kinds are not compatible. */
806 if (TREE_CODE (f1) != TREE_CODE (f2))
807 goto different_types;
808 /* Field decls must have the same name and offset. */
809 if (TREE_CODE (f1) == FIELD_DECL
810 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
811 || !gimple_compare_field_offset (f1, f2)))
812 goto different_types;
813 /* All entities should have the same name and type. */
814 if (DECL_NAME (f1) != DECL_NAME (f2)
815 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
816 state, sccstack, sccstate, sccstate_obstack))
817 goto different_types;
818 }
819
820 /* If one aggregate has more fields than the other, they
821 are not the same. */
822 if (f1 || f2)
823 goto different_types;
824
825 goto same_types;
826 }
827
828 default:
829 gcc_unreachable ();
830 }
831
832 /* Common exit path for types that are not compatible. */
833 different_types:
834 state->u.same_p = 0;
835 goto pop;
836
837 /* Common exit path for types that are compatible. */
838 same_types:
839 gcc_assert (state->u.same_p == 1);
840
841 pop:
842 if (state->low == state->dfsnum)
843 {
844 type_pair_t x;
845
846 /* Pop off the SCC and set its cache values to the final
847 comparison result. */
848 do
849 {
850 struct sccs *cstate;
851 x = VEC_pop (type_pair_t, *sccstack);
852 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
853 cstate->on_sccstack = false;
854 x->same_p = state->u.same_p;
855 }
856 while (x != p);
857 }
858
859 return state->u.same_p;
860 }
861
862 /* Return true iff T1 and T2 are structurally identical. When
863 FOR_MERGING_P is true the an incomplete type and a complete type
864 are considered different, otherwise they are considered compatible. */
865
866 static bool
867 gimple_types_compatible_p (tree t1, tree t2)
868 {
869 VEC(type_pair_t, heap) *sccstack = NULL;
870 struct pointer_map_t *sccstate;
871 struct obstack sccstate_obstack;
872 type_pair_t p = NULL;
873 bool res;
874 tree leader1, leader2;
875
876 /* Before starting to set up the SCC machinery handle simple cases. */
877
878 /* Check first for the obvious case of pointer identity. */
879 if (t1 == t2)
880 return true;
881
882 /* Check that we have two types to compare. */
883 if (t1 == NULL_TREE || t2 == NULL_TREE)
884 return false;
885
886 /* Can't be the same type if the types don't have the same code. */
887 if (TREE_CODE (t1) != TREE_CODE (t2))
888 return false;
889
890 /* Can't be the same type if they have different CV qualifiers. */
891 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
892 return false;
893
894 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
895 return false;
896
897 /* Void types and nullptr types are always the same. */
898 if (TREE_CODE (t1) == VOID_TYPE
899 || TREE_CODE (t1) == NULLPTR_TYPE)
900 return true;
901
902 /* Can't be the same type if they have different alignment or mode. */
903 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
904 || TYPE_MODE (t1) != TYPE_MODE (t2))
905 return false;
906
907 /* Do some simple checks before doing three hashtable queries. */
908 if (INTEGRAL_TYPE_P (t1)
909 || SCALAR_FLOAT_TYPE_P (t1)
910 || FIXED_POINT_TYPE_P (t1)
911 || TREE_CODE (t1) == VECTOR_TYPE
912 || TREE_CODE (t1) == COMPLEX_TYPE
913 || TREE_CODE (t1) == OFFSET_TYPE
914 || POINTER_TYPE_P (t1))
915 {
916 /* Can't be the same type if they have different sign or precision. */
917 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
918 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
919 return false;
920
921 if (TREE_CODE (t1) == INTEGER_TYPE
922 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
923 return false;
924
925 /* That's all we need to check for float and fixed-point types. */
926 if (SCALAR_FLOAT_TYPE_P (t1)
927 || FIXED_POINT_TYPE_P (t1))
928 return true;
929
930 /* For other types fall through to more complex checks. */
931 }
932
933 /* If the types have been previously registered and found equal
934 they still are. */
935 leader1 = gimple_lookup_type_leader (t1);
936 leader2 = gimple_lookup_type_leader (t2);
937 if (leader1 == t2
938 || t1 == leader2
939 || (leader1 && leader1 == leader2))
940 return true;
941
942 /* If the hash values of t1 and t2 are different the types can't
943 possibly be the same. This helps keeping the type-pair hashtable
944 small, only tracking comparisons for hash collisions. */
945 if (gimple_type_hash (t1) != gimple_type_hash (t2))
946 return false;
947
948 /* If we've visited this type pair before (in the case of aggregates
949 with self-referential types), and we made a decision, return it. */
950 p = lookup_type_pair (t1, t2);
951 if (p->same_p == 0 || p->same_p == 1)
952 {
953 /* We have already decided whether T1 and T2 are the
954 same, return the cached result. */
955 return p->same_p == 1;
956 }
957
958 /* Now set up the SCC machinery for the comparison. */
959 gtc_next_dfs_num = 1;
960 sccstate = pointer_map_create ();
961 gcc_obstack_init (&sccstate_obstack);
962 res = gimple_types_compatible_p_1 (t1, t2, p,
963 &sccstack, sccstate, &sccstate_obstack);
964 VEC_free (type_pair_t, heap, sccstack);
965 pointer_map_destroy (sccstate);
966 obstack_free (&sccstate_obstack, NULL);
967
968 return res;
969 }
970
971 static hashval_t
972 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
973 struct pointer_map_t *, struct obstack *);
974
975 /* DFS visit the edge from the callers type with state *STATE to T.
976 Update the callers type hash V with the hash for T if it is not part
977 of the SCC containing the callers type and return it.
978 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
979
980 static hashval_t
981 visit (tree t, struct sccs *state, hashval_t v,
982 VEC (tree, heap) **sccstack,
983 struct pointer_map_t *sccstate,
984 struct obstack *sccstate_obstack)
985 {
986 struct sccs *cstate = NULL;
987 struct tree_int_map m;
988 void **slot;
989
990 /* If there is a hash value recorded for this type then it can't
991 possibly be part of our parent SCC. Simply mix in its hash. */
992 m.base.from = t;
993 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
994 && *slot)
995 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
996
997 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
998 cstate = (struct sccs *)*slot;
999 if (!cstate)
1000 {
1001 hashval_t tem;
1002 /* Not yet visited. DFS recurse. */
1003 tem = iterative_hash_gimple_type (t, v,
1004 sccstack, sccstate, sccstate_obstack);
1005 if (!cstate)
1006 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1007 state->low = MIN (state->low, cstate->low);
1008 /* If the type is no longer on the SCC stack and thus is not part
1009 of the parents SCC mix in its hash value. Otherwise we will
1010 ignore the type for hashing purposes and return the unaltered
1011 hash value. */
1012 if (!cstate->on_sccstack)
1013 return tem;
1014 }
1015 if (cstate->dfsnum < state->dfsnum
1016 && cstate->on_sccstack)
1017 state->low = MIN (cstate->dfsnum, state->low);
1018
1019 /* We are part of our parents SCC, skip this type during hashing
1020 and return the unaltered hash value. */
1021 return v;
1022 }
1023
1024 /* Hash NAME with the previous hash value V and return it. */
1025
1026 static hashval_t
1027 iterative_hash_name (tree name, hashval_t v)
1028 {
1029 if (!name)
1030 return v;
1031 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1032 if (TREE_CODE (name) == TYPE_DECL)
1033 name = DECL_NAME (name);
1034 if (!name)
1035 return v;
1036 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1037 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1038 }
1039
1040 /* A type, hashvalue pair for sorting SCC members. */
1041
1042 struct type_hash_pair {
1043 tree type;
1044 hashval_t hash;
1045 };
1046
1047 /* Compare two type, hashvalue pairs. */
1048
1049 static int
1050 type_hash_pair_compare (const void *p1_, const void *p2_)
1051 {
1052 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1053 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1054 if (p1->hash < p2->hash)
1055 return -1;
1056 else if (p1->hash > p2->hash)
1057 return 1;
1058 return 0;
1059 }
1060
1061 /* Returning a hash value for gimple type TYPE combined with VAL.
1062 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1063
1064 To hash a type we end up hashing in types that are reachable.
1065 Through pointers we can end up with cycles which messes up the
1066 required property that we need to compute the same hash value
1067 for structurally equivalent types. To avoid this we have to
1068 hash all types in a cycle (the SCC) in a commutative way. The
1069 easiest way is to not mix in the hashes of the SCC members at
1070 all. To make this work we have to delay setting the hash
1071 values of the SCC until it is complete. */
1072
1073 static hashval_t
1074 iterative_hash_gimple_type (tree type, hashval_t val,
1075 VEC(tree, heap) **sccstack,
1076 struct pointer_map_t *sccstate,
1077 struct obstack *sccstate_obstack)
1078 {
1079 hashval_t v;
1080 void **slot;
1081 struct sccs *state;
1082
1083 /* Not visited during this DFS walk. */
1084 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1085 state = XOBNEW (sccstate_obstack, struct sccs);
1086 *pointer_map_insert (sccstate, type) = state;
1087
1088 VEC_safe_push (tree, heap, *sccstack, type);
1089 state->dfsnum = next_dfs_num++;
1090 state->low = state->dfsnum;
1091 state->on_sccstack = true;
1092
1093 /* Combine a few common features of types so that types are grouped into
1094 smaller sets; when searching for existing matching types to merge,
1095 only existing types having the same features as the new type will be
1096 checked. */
1097 v = iterative_hash_name (TYPE_NAME (type), 0);
1098 if (TYPE_NAME (type)
1099 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1100 && DECL_CONTEXT (TYPE_NAME (type))
1101 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1102 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1103 sccstack, sccstate, sccstate_obstack);
1104 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1105 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1106 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1107
1108 /* Do not hash the types size as this will cause differences in
1109 hash values for the complete vs. the incomplete type variant. */
1110
1111 /* Incorporate common features of numerical types. */
1112 if (INTEGRAL_TYPE_P (type)
1113 || SCALAR_FLOAT_TYPE_P (type)
1114 || FIXED_POINT_TYPE_P (type))
1115 {
1116 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1117 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1118 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1119 }
1120
1121 /* For pointer and reference types, fold in information about the type
1122 pointed to. */
1123 if (POINTER_TYPE_P (type))
1124 v = visit (TREE_TYPE (type), state, v,
1125 sccstack, sccstate, sccstate_obstack);
1126
1127 /* For integer types hash the types min/max values and the string flag. */
1128 if (TREE_CODE (type) == INTEGER_TYPE)
1129 {
1130 /* OMP lowering can introduce error_mark_node in place of
1131 random local decls in types. */
1132 if (TYPE_MIN_VALUE (type) != error_mark_node)
1133 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1134 if (TYPE_MAX_VALUE (type) != error_mark_node)
1135 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1136 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1137 }
1138
1139 /* For array types hash the domain and the string flag. */
1140 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1141 {
1142 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1143 v = visit (TYPE_DOMAIN (type), state, v,
1144 sccstack, sccstate, sccstate_obstack);
1145 }
1146
1147 /* Recurse for aggregates with a single element type. */
1148 if (TREE_CODE (type) == ARRAY_TYPE
1149 || TREE_CODE (type) == COMPLEX_TYPE
1150 || TREE_CODE (type) == VECTOR_TYPE)
1151 v = visit (TREE_TYPE (type), state, v,
1152 sccstack, sccstate, sccstate_obstack);
1153
1154 /* Incorporate function return and argument types. */
1155 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1156 {
1157 unsigned na;
1158 tree p;
1159
1160 /* For method types also incorporate their parent class. */
1161 if (TREE_CODE (type) == METHOD_TYPE)
1162 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1163 sccstack, sccstate, sccstate_obstack);
1164
1165 /* Check result and argument types. */
1166 v = visit (TREE_TYPE (type), state, v,
1167 sccstack, sccstate, sccstate_obstack);
1168 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1169 {
1170 v = visit (TREE_VALUE (p), state, v,
1171 sccstack, sccstate, sccstate_obstack);
1172 na++;
1173 }
1174
1175 v = iterative_hash_hashval_t (na, v);
1176 }
1177
1178 if (RECORD_OR_UNION_TYPE_P (type))
1179 {
1180 unsigned nf;
1181 tree f;
1182
1183 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1184 {
1185 v = iterative_hash_name (DECL_NAME (f), v);
1186 v = visit (TREE_TYPE (f), state, v,
1187 sccstack, sccstate, sccstate_obstack);
1188 nf++;
1189 }
1190
1191 v = iterative_hash_hashval_t (nf, v);
1192 }
1193
1194 /* Record hash for us. */
1195 state->u.hash = v;
1196
1197 /* See if we found an SCC. */
1198 if (state->low == state->dfsnum)
1199 {
1200 tree x;
1201 struct tree_int_map *m;
1202
1203 /* Pop off the SCC and set its hash values. */
1204 x = VEC_pop (tree, *sccstack);
1205 /* Optimize SCC size one. */
1206 if (x == type)
1207 {
1208 state->on_sccstack = false;
1209 m = ggc_alloc_cleared_tree_int_map ();
1210 m->base.from = x;
1211 m->to = v;
1212 slot = htab_find_slot (type_hash_cache, m, INSERT);
1213 gcc_assert (!*slot);
1214 *slot = (void *) m;
1215 }
1216 else
1217 {
1218 struct sccs *cstate;
1219 unsigned first, i, size, j;
1220 struct type_hash_pair *pairs;
1221 /* Pop off the SCC and build an array of type, hash pairs. */
1222 first = VEC_length (tree, *sccstack) - 1;
1223 while (VEC_index (tree, *sccstack, first) != type)
1224 --first;
1225 size = VEC_length (tree, *sccstack) - first + 1;
1226 pairs = XALLOCAVEC (struct type_hash_pair, size);
1227 i = 0;
1228 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1229 cstate->on_sccstack = false;
1230 pairs[i].type = x;
1231 pairs[i].hash = cstate->u.hash;
1232 do
1233 {
1234 x = VEC_pop (tree, *sccstack);
1235 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1236 cstate->on_sccstack = false;
1237 ++i;
1238 pairs[i].type = x;
1239 pairs[i].hash = cstate->u.hash;
1240 }
1241 while (x != type);
1242 gcc_assert (i + 1 == size);
1243 /* Sort the arrays of type, hash pairs so that when we mix in
1244 all members of the SCC the hash value becomes independent on
1245 the order we visited the SCC. Disregard hashes equal to
1246 the hash of the type we mix into because we cannot guarantee
1247 a stable sort for those across different TUs. */
1248 qsort (pairs, size, sizeof (struct type_hash_pair),
1249 type_hash_pair_compare);
1250 for (i = 0; i < size; ++i)
1251 {
1252 hashval_t hash;
1253 m = ggc_alloc_cleared_tree_int_map ();
1254 m->base.from = pairs[i].type;
1255 hash = pairs[i].hash;
1256 /* Skip same hashes. */
1257 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1258 ;
1259 for (; j < size; ++j)
1260 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1261 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1262 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1263 m->to = hash;
1264 if (pairs[i].type == type)
1265 v = hash;
1266 slot = htab_find_slot (type_hash_cache, m, INSERT);
1267 gcc_assert (!*slot);
1268 *slot = (void *) m;
1269 }
1270 }
1271 }
1272
1273 return iterative_hash_hashval_t (v, val);
1274 }
1275
1276 /* Returns a hash value for P (assumed to be a type). The hash value
1277 is computed using some distinguishing features of the type. Note
1278 that we cannot use pointer hashing here as we may be dealing with
1279 two distinct instances of the same type.
1280
1281 This function should produce the same hash value for two compatible
1282 types according to gimple_types_compatible_p. */
1283
1284 static hashval_t
1285 gimple_type_hash (const void *p)
1286 {
1287 const_tree t = (const_tree) p;
1288 VEC(tree, heap) *sccstack = NULL;
1289 struct pointer_map_t *sccstate;
1290 struct obstack sccstate_obstack;
1291 hashval_t val;
1292 void **slot;
1293 struct tree_int_map m;
1294
1295 m.base.from = CONST_CAST_TREE (t);
1296 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1297 && *slot)
1298 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1299
1300 /* Perform a DFS walk and pre-hash all reachable types. */
1301 next_dfs_num = 1;
1302 sccstate = pointer_map_create ();
1303 gcc_obstack_init (&sccstate_obstack);
1304 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1305 &sccstack, sccstate, &sccstate_obstack);
1306 VEC_free (tree, heap, sccstack);
1307 pointer_map_destroy (sccstate);
1308 obstack_free (&sccstate_obstack, NULL);
1309
1310 return val;
1311 }
1312
1313 /* Returns nonzero if P1 and P2 are equal. */
1314
1315 static int
1316 gimple_type_eq (const void *p1, const void *p2)
1317 {
1318 const_tree t1 = (const_tree) p1;
1319 const_tree t2 = (const_tree) p2;
1320 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1321 CONST_CAST_TREE (t2));
1322 }
1323
1324
1325 /* Worker for gimple_register_type.
1326 Register type T in the global type table gimple_types.
1327 When REGISTERING_MV is false first recurse for the main variant of T. */
1328
1329 static tree
1330 gimple_register_type_1 (tree t, bool registering_mv)
1331 {
1332 void **slot;
1333 gimple_type_leader_entry *leader;
1334
1335 /* If we registered this type before return the cached result. */
1336 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1337 if (leader->type == t)
1338 return leader->leader;
1339
1340 /* Always register the main variant first. This is important so we
1341 pick up the non-typedef variants as canonical, otherwise we'll end
1342 up taking typedef ids for structure tags during comparison.
1343 It also makes sure that main variants will be merged to main variants.
1344 As we are operating on a possibly partially fixed up type graph
1345 do not bother to recurse more than once, otherwise we may end up
1346 walking in circles.
1347 If we are registering a main variant it will either remain its
1348 own main variant or it will be merged to something else in which
1349 case we do not care for the main variant leader. */
1350 if (!registering_mv
1351 && TYPE_MAIN_VARIANT (t) != t)
1352 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1353
1354 /* See if we already have an equivalent type registered. */
1355 slot = htab_find_slot (gimple_types, t, INSERT);
1356 if (*slot
1357 && *(tree *)slot != t)
1358 {
1359 tree new_type = (tree) *((tree *) slot);
1360 leader->type = t;
1361 leader->leader = new_type;
1362 return new_type;
1363 }
1364
1365 /* If not, insert it to the cache and the hash. */
1366 leader->type = t;
1367 leader->leader = t;
1368 *slot = (void *) t;
1369 return t;
1370 }
1371
1372 /* Register type T in the global type table gimple_types.
1373 If another type T', compatible with T, already existed in
1374 gimple_types then return T', otherwise return T. This is used by
1375 LTO to merge identical types read from different TUs. */
1376
1377 static tree
1378 gimple_register_type (tree t)
1379 {
1380 gcc_assert (TYPE_P (t));
1381 return gimple_register_type_1 (t, false);
1382 }
1383
1384 #define GIMPLE_REGISTER_TYPE(tt) \
1385 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1386
1387
1388
1389 /* A hashtable of trees that potentially refer to variables or functions
1390 that must be replaced with their prevailing variant. */
1391 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1392 tree_with_vars;
1393
1394 /* Remember that T is a tree that (potentially) refers to a variable
1395 or function decl that may be replaced with its prevailing variant. */
1396 static void
1397 remember_with_vars (tree t)
1398 {
1399 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1400 }
1401
1402 #define LTO_FIXUP_TREE(tt) \
1403 do \
1404 { \
1405 if (tt) \
1406 { \
1407 if (TYPE_P (tt)) \
1408 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1409 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1410 remember_with_vars (t); \
1411 } \
1412 } while (0)
1413
1414 static void lto_fixup_types (tree);
1415
1416 /* Fix up fields of a tree_typed T. */
1417
1418 static void
1419 lto_ft_typed (tree t)
1420 {
1421 LTO_FIXUP_TREE (TREE_TYPE (t));
1422 }
1423
1424 /* Fix up fields of a tree_common T. */
1425
1426 static void
1427 lto_ft_common (tree t)
1428 {
1429 lto_ft_typed (t);
1430 LTO_FIXUP_TREE (TREE_CHAIN (t));
1431 }
1432
1433 /* Fix up fields of a decl_minimal T. */
1434
1435 static void
1436 lto_ft_decl_minimal (tree t)
1437 {
1438 lto_ft_common (t);
1439 LTO_FIXUP_TREE (DECL_NAME (t));
1440 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1441 }
1442
1443 /* Fix up fields of a decl_common T. */
1444
1445 static void
1446 lto_ft_decl_common (tree t)
1447 {
1448 lto_ft_decl_minimal (t);
1449 LTO_FIXUP_TREE (DECL_SIZE (t));
1450 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1451 LTO_FIXUP_TREE (DECL_INITIAL (t));
1452 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1453 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1454 }
1455
1456 /* Fix up fields of a decl_with_vis T. */
1457
1458 static void
1459 lto_ft_decl_with_vis (tree t)
1460 {
1461 lto_ft_decl_common (t);
1462
1463 /* Accessor macro has side-effects, use field-name here. */
1464 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1465 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1466 }
1467
1468 /* Fix up fields of a decl_non_common T. */
1469
1470 static void
1471 lto_ft_decl_non_common (tree t)
1472 {
1473 lto_ft_decl_with_vis (t);
1474 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1475 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1476 LTO_FIXUP_TREE (DECL_VINDEX (t));
1477 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1478 like for 'typedef enum foo foo'. We have no way of avoiding to
1479 merge them and dwarf2out.c cannot deal with this,
1480 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1481 if (TREE_CODE (t) == TYPE_DECL
1482 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1483 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1484 }
1485
1486 /* Fix up fields of a decl_non_common T. */
1487
1488 static void
1489 lto_ft_function (tree t)
1490 {
1491 lto_ft_decl_non_common (t);
1492 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1493 }
1494
1495 /* Fix up fields of a field_decl T. */
1496
1497 static void
1498 lto_ft_field_decl (tree t)
1499 {
1500 lto_ft_decl_common (t);
1501 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1502 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1503 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1504 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1505 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1506 }
1507
1508 /* Fix up fields of a type T. */
1509
1510 static void
1511 lto_ft_type (tree t)
1512 {
1513 lto_ft_common (t);
1514 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1515 LTO_FIXUP_TREE (TYPE_SIZE (t));
1516 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1517 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1518 LTO_FIXUP_TREE (TYPE_NAME (t));
1519
1520 /* Accessors are for derived node types only. */
1521 if (!POINTER_TYPE_P (t))
1522 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1523 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1524
1525 /* Accessor is for derived node types only. */
1526 LTO_FIXUP_TREE (t->type_non_common.binfo);
1527
1528 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1529 }
1530
1531 /* Fix up fields of a BINFO T. */
1532
1533 static void
1534 lto_ft_binfo (tree t)
1535 {
1536 unsigned HOST_WIDE_INT i, n;
1537 tree base, saved_base;
1538
1539 lto_ft_common (t);
1540 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1541 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1542 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1543 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1544 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1545 for (i = 0; i < n; i++)
1546 {
1547 saved_base = base = BINFO_BASE_ACCESS (t, i);
1548 LTO_FIXUP_TREE (base);
1549 if (base != saved_base)
1550 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1551 }
1552 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1553 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1554 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1555 n = BINFO_N_BASE_BINFOS (t);
1556 for (i = 0; i < n; i++)
1557 {
1558 saved_base = base = BINFO_BASE_BINFO (t, i);
1559 LTO_FIXUP_TREE (base);
1560 if (base != saved_base)
1561 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1562 }
1563 }
1564
1565 /* Fix up fields of a CONSTRUCTOR T. */
1566
1567 static void
1568 lto_ft_constructor (tree t)
1569 {
1570 unsigned HOST_WIDE_INT idx;
1571 constructor_elt *ce;
1572
1573 lto_ft_typed (t);
1574
1575 for (idx = 0;
1576 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1577 idx++)
1578 {
1579 LTO_FIXUP_TREE (ce->index);
1580 LTO_FIXUP_TREE (ce->value);
1581 }
1582 }
1583
1584 /* Fix up fields of an expression tree T. */
1585
1586 static void
1587 lto_ft_expr (tree t)
1588 {
1589 int i;
1590 lto_ft_typed (t);
1591 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1592 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1593 }
1594
1595 /* Given a tree T fixup fields of T by replacing types with their merged
1596 variant and other entities by an equal entity from an earlier compilation
1597 unit, or an entity being canonical in a different way. This includes
1598 for instance integer or string constants. */
1599
1600 static void
1601 lto_fixup_types (tree t)
1602 {
1603 switch (TREE_CODE (t))
1604 {
1605 case IDENTIFIER_NODE:
1606 break;
1607
1608 case TREE_LIST:
1609 LTO_FIXUP_TREE (TREE_VALUE (t));
1610 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1611 LTO_FIXUP_TREE (TREE_CHAIN (t));
1612 break;
1613
1614 case FIELD_DECL:
1615 lto_ft_field_decl (t);
1616 break;
1617
1618 case LABEL_DECL:
1619 case CONST_DECL:
1620 case PARM_DECL:
1621 case RESULT_DECL:
1622 case IMPORTED_DECL:
1623 lto_ft_decl_common (t);
1624 break;
1625
1626 case VAR_DECL:
1627 lto_ft_decl_with_vis (t);
1628 break;
1629
1630 case TYPE_DECL:
1631 lto_ft_decl_non_common (t);
1632 break;
1633
1634 case FUNCTION_DECL:
1635 lto_ft_function (t);
1636 break;
1637
1638 case TREE_BINFO:
1639 lto_ft_binfo (t);
1640 break;
1641
1642 case PLACEHOLDER_EXPR:
1643 lto_ft_common (t);
1644 break;
1645
1646 case BLOCK:
1647 case TRANSLATION_UNIT_DECL:
1648 case OPTIMIZATION_NODE:
1649 case TARGET_OPTION_NODE:
1650 break;
1651
1652 default:
1653 if (TYPE_P (t))
1654 lto_ft_type (t);
1655 else if (TREE_CODE (t) == CONSTRUCTOR)
1656 lto_ft_constructor (t);
1657 else if (CONSTANT_CLASS_P (t))
1658 LTO_FIXUP_TREE (TREE_TYPE (t));
1659 else if (EXPR_P (t))
1660 {
1661 lto_ft_expr (t);
1662 }
1663 else
1664 {
1665 remember_with_vars (t);
1666 }
1667 }
1668 }
1669
1670
1671 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1672
1673 static enum ld_plugin_symbol_resolution
1674 get_resolution (struct data_in *data_in, unsigned index)
1675 {
1676 if (data_in->globals_resolution)
1677 {
1678 ld_plugin_symbol_resolution_t ret;
1679 /* We can have references to not emitted functions in
1680 DECL_FUNCTION_PERSONALITY at least. So we can and have
1681 to indeed return LDPR_UNKNOWN in some cases. */
1682 if (VEC_length (ld_plugin_symbol_resolution_t,
1683 data_in->globals_resolution) <= index)
1684 return LDPR_UNKNOWN;
1685 ret = VEC_index (ld_plugin_symbol_resolution_t,
1686 data_in->globals_resolution,
1687 index);
1688 return ret;
1689 }
1690 else
1691 /* Delay resolution finding until decl merging. */
1692 return LDPR_UNKNOWN;
1693 }
1694
1695 /* Map assigning declarations their resolutions. */
1696 static pointer_map_t *resolution_map;
1697
1698 /* We need to record resolutions until symbol table is read. */
1699 static void
1700 register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1701 {
1702 if (resolution == LDPR_UNKNOWN)
1703 return;
1704 if (!resolution_map)
1705 resolution_map = pointer_map_create ();
1706 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1707 }
1708
1709 /* Register DECL with the global symbol table and change its
1710 name if necessary to avoid name clashes for static globals across
1711 different files. */
1712
1713 static void
1714 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1715 {
1716 tree context;
1717
1718 /* Variable has file scope, not local. Need to ensure static variables
1719 between different files don't clash unexpectedly. */
1720 if (!TREE_PUBLIC (decl)
1721 && !((context = decl_function_context (decl))
1722 && auto_var_in_fn_p (decl, context)))
1723 {
1724 /* ??? We normally pre-mangle names before we serialize them
1725 out. Here, in lto1, we do not know the language, and
1726 thus cannot do the mangling again. Instead, we just
1727 append a suffix to the mangled name. The resulting name,
1728 however, is not a properly-formed mangled name, and will
1729 confuse any attempt to unmangle it. */
1730 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1731 char *label;
1732
1733 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1734 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1735 rest_of_decl_compilation (decl, 1, 0);
1736 VEC_safe_push (tree, gc, lto_global_var_decls, decl);
1737 }
1738
1739 /* If this variable has already been declared, queue the
1740 declaration for merging. */
1741 if (TREE_PUBLIC (decl))
1742 {
1743 unsigned ix;
1744 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1745 gcc_unreachable ();
1746 register_resolution (decl, get_resolution (data_in, ix));
1747 }
1748 }
1749
1750
1751 /* Register DECL with the global symbol table and change its
1752 name if necessary to avoid name clashes for static globals across
1753 different files. DATA_IN contains descriptors and tables for the
1754 file being read. */
1755
1756 static void
1757 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1758 {
1759 /* Need to ensure static entities between different files
1760 don't clash unexpectedly. */
1761 if (!TREE_PUBLIC (decl))
1762 {
1763 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1764 may set the assembler name where it was previously empty. */
1765 tree old_assembler_name = decl->decl_with_vis.assembler_name;
1766
1767 /* FIXME lto: We normally pre-mangle names before we serialize
1768 them out. Here, in lto1, we do not know the language, and
1769 thus cannot do the mangling again. Instead, we just append a
1770 suffix to the mangled name. The resulting name, however, is
1771 not a properly-formed mangled name, and will confuse any
1772 attempt to unmangle it. */
1773 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1774 char *label;
1775
1776 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1777 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1778
1779 /* We may arrive here with the old assembler name not set
1780 if the function body is not needed, e.g., it has been
1781 inlined away and does not appear in the cgraph. */
1782 if (old_assembler_name)
1783 {
1784 tree new_assembler_name = DECL_ASSEMBLER_NAME (decl);
1785
1786 /* Make the original assembler name available for later use.
1787 We may have used it to indicate the section within its
1788 object file where the function body may be found.
1789 FIXME lto: Find a better way to maintain the function decl
1790 to body section mapping so we don't need this hack. */
1791 lto_record_renamed_decl (data_in->file_data,
1792 IDENTIFIER_POINTER (old_assembler_name),
1793 IDENTIFIER_POINTER (new_assembler_name));
1794 }
1795 }
1796
1797 /* If this variable has already been declared, queue the
1798 declaration for merging. */
1799 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1800 {
1801 unsigned ix;
1802 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1803 gcc_unreachable ();
1804 register_resolution (decl, get_resolution (data_in, ix));
1805 }
1806 }
1807
1808
1809 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1810 for one compilation unit) go over all trees starting at index FROM until the
1811 end of the sequence and replace fields of those trees, and the trees
1812 themself with their canonical variants as per gimple_register_type. */
1813
1814 static void
1815 uniquify_nodes (struct data_in *data_in, unsigned from)
1816 {
1817 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1818 unsigned len = VEC_length (tree, cache->nodes);
1819 unsigned i;
1820
1821 /* Go backwards because children streamed for the first time come
1822 as part of their parents, and hence are created after them. */
1823
1824 /* First register all the types in the cache. This makes sure to
1825 have the original structure in the type cycles when registering
1826 them and computing hashes. */
1827 for (i = len; i-- > from;)
1828 {
1829 tree t = VEC_index (tree, cache->nodes, i);
1830 if (t && TYPE_P (t))
1831 {
1832 tree newt = gimple_register_type (t);
1833 /* Mark non-prevailing types so we fix them up. No need
1834 to reset that flag afterwards - nothing that refers
1835 to those types is left and they are collected. */
1836 if (newt != t)
1837 TREE_VISITED (t) = 1;
1838 }
1839 }
1840
1841 /* Second fixup all trees in the new cache entries. */
1842 for (i = len; i-- > from;)
1843 {
1844 tree t = VEC_index (tree, cache->nodes, i);
1845 tree oldt = t;
1846 if (!t)
1847 continue;
1848
1849 /* First fixup the fields of T. */
1850 lto_fixup_types (t);
1851
1852 if (!TYPE_P (t))
1853 continue;
1854
1855 /* Now try to find a canonical variant of T itself. */
1856 t = GIMPLE_REGISTER_TYPE (t);
1857
1858 if (t == oldt)
1859 {
1860 /* The following re-creates proper variant lists while fixing up
1861 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1862 variant list state before fixup is broken. */
1863 tree tem, mv;
1864
1865 #ifdef ENABLE_CHECKING
1866 /* Remove us from our main variant list if we are not the
1867 variant leader. */
1868 if (TYPE_MAIN_VARIANT (t) != t)
1869 {
1870 tem = TYPE_MAIN_VARIANT (t);
1871 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1872 tem = TYPE_NEXT_VARIANT (tem);
1873 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1874 }
1875 #endif
1876
1877 /* Query our new main variant. */
1878 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1879
1880 /* If we were the variant leader and we get replaced ourselves drop
1881 all variants from our list. */
1882 if (TYPE_MAIN_VARIANT (t) == t
1883 && mv != t)
1884 {
1885 tem = t;
1886 while (tem)
1887 {
1888 tree tem2 = TYPE_NEXT_VARIANT (tem);
1889 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1890 tem = tem2;
1891 }
1892 }
1893
1894 /* If we are not our own variant leader link us into our new leaders
1895 variant list. */
1896 if (mv != t)
1897 {
1898 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1899 TYPE_NEXT_VARIANT (mv) = t;
1900 if (RECORD_OR_UNION_TYPE_P (t))
1901 TYPE_BINFO (t) = TYPE_BINFO (mv);
1902 /* Preserve the invariant that type variants share their
1903 TYPE_FIELDS. */
1904 if (RECORD_OR_UNION_TYPE_P (t)
1905 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1906 {
1907 tree f1, f2;
1908 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1909 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1910 {
1911 unsigned ix;
1912 gcc_assert (f1 != f2
1913 && DECL_NAME (f1) == DECL_NAME (f2));
1914 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1915 gcc_unreachable ();
1916 /* If we're going to replace an element which we'd
1917 still visit in the next iterations, we wouldn't
1918 handle it, so do it here. We do have to handle it
1919 even though the field_decl itself will be removed,
1920 as it could refer to e.g. integer_cst which we
1921 wouldn't reach via any other way, hence they
1922 (and their type) would stay uncollected. */
1923 /* ??? We should rather make sure to replace all
1924 references to f2 with f1. That means handling
1925 COMPONENT_REFs and CONSTRUCTOR elements in
1926 lto_fixup_types and special-case the field-decl
1927 operand handling. */
1928 /* ??? Not sure the above is all relevant in this
1929 path canonicalizing TYPE_FIELDS to that of the
1930 main variant. */
1931 if (ix < i)
1932 lto_fixup_types (f2);
1933 streamer_tree_cache_insert_at (cache, f1, ix);
1934 }
1935 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1936 }
1937 }
1938
1939 /* Finally adjust our main variant and fix it up. */
1940 TYPE_MAIN_VARIANT (t) = mv;
1941
1942 /* The following reconstructs the pointer chains
1943 of the new pointed-to type if we are a main variant. We do
1944 not stream those so they are broken before fixup. */
1945 if (TREE_CODE (t) == POINTER_TYPE
1946 && TYPE_MAIN_VARIANT (t) == t)
1947 {
1948 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1949 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1950 }
1951 else if (TREE_CODE (t) == REFERENCE_TYPE
1952 && TYPE_MAIN_VARIANT (t) == t)
1953 {
1954 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1955 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1956 }
1957 }
1958
1959 else
1960 {
1961 if (RECORD_OR_UNION_TYPE_P (t))
1962 {
1963 tree f1, f2;
1964 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
1965 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
1966 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1967 {
1968 unsigned ix;
1969 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
1970 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1971 gcc_unreachable ();
1972 /* If we're going to replace an element which we'd
1973 still visit in the next iterations, we wouldn't
1974 handle it, so do it here. We do have to handle it
1975 even though the field_decl itself will be removed,
1976 as it could refer to e.g. integer_cst which we
1977 wouldn't reach via any other way, hence they
1978 (and their type) would stay uncollected. */
1979 /* ??? We should rather make sure to replace all
1980 references to f2 with f1. That means handling
1981 COMPONENT_REFs and CONSTRUCTOR elements in
1982 lto_fixup_types and special-case the field-decl
1983 operand handling. */
1984 if (ix < i)
1985 lto_fixup_types (f2);
1986 streamer_tree_cache_insert_at (cache, f1, ix);
1987 }
1988 }
1989
1990 /* If we found a tree that is equal to oldt replace it in the
1991 cache, so that further users (in the various LTO sections)
1992 make use of it. */
1993 streamer_tree_cache_insert_at (cache, t, i);
1994 }
1995 }
1996
1997 /* Finally compute the canonical type of all TREE_TYPEs and register
1998 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
1999 From this point there are no longer any types with
2000 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
2001 This step requires the TYPE_POINTER_TO lists being present, so
2002 make sure it is done last. */
2003 for (i = len; i-- > from;)
2004 {
2005 tree t = VEC_index (tree, cache->nodes, i);
2006 if (t == NULL_TREE)
2007 continue;
2008
2009 if (TREE_CODE (t) == VAR_DECL)
2010 lto_register_var_decl_in_symtab (data_in, t);
2011 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
2012 lto_register_function_decl_in_symtab (data_in, t);
2013 else if (!flag_wpa
2014 && TREE_CODE (t) == TYPE_DECL)
2015 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2016 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2017 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2018 }
2019 }
2020
2021
2022 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2023 RESOLUTIONS is the set of symbols picked by the linker (read from the
2024 resolution file when the linker plugin is being used). */
2025
2026 static void
2027 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2028 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
2029 {
2030 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2031 const int decl_offset = sizeof (struct lto_decl_header);
2032 const int main_offset = decl_offset + header->decl_state_size;
2033 const int string_offset = main_offset + header->main_size;
2034 struct lto_input_block ib_main;
2035 struct data_in *data_in;
2036 unsigned int i;
2037 const uint32_t *data_ptr, *data_end;
2038 uint32_t num_decl_states;
2039
2040 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2041 header->main_size);
2042
2043 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2044 header->string_size, resolutions);
2045
2046 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2047 internal types that should not be merged. */
2048
2049 /* Read the global declarations and types. */
2050 while (ib_main.p < ib_main.len)
2051 {
2052 tree t;
2053 unsigned from = VEC_length (tree, data_in->reader_cache->nodes);
2054 t = stream_read_tree (&ib_main, data_in);
2055 gcc_assert (t && ib_main.p <= ib_main.len);
2056 uniquify_nodes (data_in, from);
2057 }
2058
2059 /* Read in lto_in_decl_state objects. */
2060 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2061 data_end =
2062 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2063 num_decl_states = *data_ptr++;
2064
2065 gcc_assert (num_decl_states > 0);
2066 decl_data->global_decl_state = lto_new_in_decl_state ();
2067 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2068 decl_data->global_decl_state);
2069
2070 /* Read in per-function decl states and enter them in hash table. */
2071 decl_data->function_decl_states =
2072 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2073
2074 for (i = 1; i < num_decl_states; i++)
2075 {
2076 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2077 void **slot;
2078
2079 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2080 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2081 gcc_assert (*slot == NULL);
2082 *slot = state;
2083 }
2084
2085 if (data_ptr != data_end)
2086 internal_error ("bytecode stream: garbage at the end of symbols section");
2087
2088 /* Set the current decl state to be the global state. */
2089 decl_data->current_decl_state = decl_data->global_decl_state;
2090
2091 lto_data_in_delete (data_in);
2092 }
2093
2094 /* Custom version of strtoll, which is not portable. */
2095
2096 static HOST_WIDEST_INT
2097 lto_parse_hex (const char *p)
2098 {
2099 HOST_WIDEST_INT ret = 0;
2100
2101 for (; *p != '\0'; ++p)
2102 {
2103 char c = *p;
2104 unsigned char part;
2105 ret <<= 4;
2106 if (c >= '0' && c <= '9')
2107 part = c - '0';
2108 else if (c >= 'a' && c <= 'f')
2109 part = c - 'a' + 10;
2110 else if (c >= 'A' && c <= 'F')
2111 part = c - 'A' + 10;
2112 else
2113 internal_error ("could not parse hex number");
2114 ret |= part;
2115 }
2116
2117 return ret;
2118 }
2119
2120 /* Read resolution for file named FILE_NAME. The resolution is read from
2121 RESOLUTION. */
2122
2123 static void
2124 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2125 {
2126 /* We require that objects in the resolution file are in the same
2127 order as the lto1 command line. */
2128 unsigned int name_len;
2129 char *obj_name;
2130 unsigned int num_symbols;
2131 unsigned int i;
2132 struct lto_file_decl_data *file_data;
2133 splay_tree_node nd = NULL;
2134
2135 if (!resolution)
2136 return;
2137
2138 name_len = strlen (file->filename);
2139 obj_name = XNEWVEC (char, name_len + 1);
2140 fscanf (resolution, " "); /* Read white space. */
2141
2142 fread (obj_name, sizeof (char), name_len, resolution);
2143 obj_name[name_len] = '\0';
2144 if (filename_cmp (obj_name, file->filename) != 0)
2145 internal_error ("unexpected file name %s in linker resolution file. "
2146 "Expected %s", obj_name, file->filename);
2147 if (file->offset != 0)
2148 {
2149 int t;
2150 char offset_p[17];
2151 HOST_WIDEST_INT offset;
2152 t = fscanf (resolution, "@0x%16s", offset_p);
2153 if (t != 1)
2154 internal_error ("could not parse file offset");
2155 offset = lto_parse_hex (offset_p);
2156 if (offset != file->offset)
2157 internal_error ("unexpected offset");
2158 }
2159
2160 free (obj_name);
2161
2162 fscanf (resolution, "%u", &num_symbols);
2163
2164 for (i = 0; i < num_symbols; i++)
2165 {
2166 int t;
2167 unsigned index;
2168 unsigned HOST_WIDE_INT id;
2169 char r_str[27];
2170 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2171 unsigned int j;
2172 unsigned int lto_resolution_str_len =
2173 sizeof (lto_resolution_str) / sizeof (char *);
2174 res_pair rp;
2175
2176 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2177 &index, &id, r_str);
2178 if (t != 3)
2179 internal_error ("invalid line in the resolution file");
2180
2181 for (j = 0; j < lto_resolution_str_len; j++)
2182 {
2183 if (strcmp (lto_resolution_str[j], r_str) == 0)
2184 {
2185 r = (enum ld_plugin_symbol_resolution) j;
2186 break;
2187 }
2188 }
2189 if (j == lto_resolution_str_len)
2190 internal_error ("invalid resolution in the resolution file");
2191
2192 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2193 {
2194 nd = lto_splay_tree_lookup (file_ids, id);
2195 if (nd == NULL)
2196 internal_error ("resolution sub id " HOST_WIDE_INT_PRINT_HEX_PURE
2197 " not in object file", id);
2198 }
2199
2200 file_data = (struct lto_file_decl_data *)nd->value;
2201 /* The indexes are very sparse. To save memory save them in a compact
2202 format that is only unpacked later when the subfile is processed. */
2203 rp.res = r;
2204 rp.index = index;
2205 VEC_safe_push (res_pair, heap, file_data->respairs, rp);
2206 if (file_data->max_index < index)
2207 file_data->max_index = index;
2208 }
2209 }
2210
2211 /* List of file_decl_datas */
2212 struct file_data_list
2213 {
2214 struct lto_file_decl_data *first, *last;
2215 };
2216
2217 /* Is the name for a id'ed LTO section? */
2218
2219 static int
2220 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2221 {
2222 const char *s;
2223
2224 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2225 return 0;
2226 s = strrchr (name, '.');
2227 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2228 }
2229
2230 /* Create file_data of each sub file id */
2231
2232 static int
2233 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2234 struct file_data_list *list)
2235 {
2236 struct lto_section_slot s_slot, *new_slot;
2237 unsigned HOST_WIDE_INT id;
2238 splay_tree_node nd;
2239 void **hash_slot;
2240 char *new_name;
2241 struct lto_file_decl_data *file_data;
2242
2243 if (!lto_section_with_id (ls->name, &id))
2244 return 1;
2245
2246 /* Find hash table of sub module id */
2247 nd = lto_splay_tree_lookup (file_ids, id);
2248 if (nd != NULL)
2249 {
2250 file_data = (struct lto_file_decl_data *)nd->value;
2251 }
2252 else
2253 {
2254 file_data = ggc_alloc_lto_file_decl_data ();
2255 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2256 file_data->id = id;
2257 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2258 lto_splay_tree_insert (file_ids, id, file_data);
2259
2260 /* Maintain list in linker order */
2261 if (!list->first)
2262 list->first = file_data;
2263 if (list->last)
2264 list->last->next = file_data;
2265 list->last = file_data;
2266 }
2267
2268 /* Copy section into sub module hash table */
2269 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2270 s_slot.name = new_name;
2271 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2272 gcc_assert (*hash_slot == NULL);
2273
2274 new_slot = XDUP (struct lto_section_slot, ls);
2275 new_slot->name = new_name;
2276 *hash_slot = new_slot;
2277 return 1;
2278 }
2279
2280 /* Read declarations and other initializations for a FILE_DATA. */
2281
2282 static void
2283 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2284 {
2285 const char *data;
2286 size_t len;
2287 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions = NULL;
2288 int i;
2289 res_pair *rp;
2290
2291 /* Create vector for fast access of resolution. We do this lazily
2292 to save memory. */
2293 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
2294 resolutions,
2295 file_data->max_index + 1);
2296 for (i = 0; VEC_iterate (res_pair, file_data->respairs, i, rp); i++)
2297 VEC_replace (ld_plugin_symbol_resolution_t, resolutions, rp->index, rp->res);
2298 VEC_free (res_pair, heap, file_data->respairs);
2299
2300 file_data->renaming_hash_table = lto_create_renaming_table ();
2301 file_data->file_name = file->filename;
2302 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2303 if (data == NULL)
2304 {
2305 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2306 return;
2307 }
2308 /* Frees resolutions */
2309 lto_read_decls (file_data, data, resolutions);
2310 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2311 }
2312
2313 /* Finalize FILE_DATA in FILE and increase COUNT. */
2314
2315 static int
2316 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2317 int *count)
2318 {
2319 lto_file_finalize (file_data, file);
2320 if (cgraph_dump_file)
2321 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2322 file_data->file_name, file_data->id);
2323 (*count)++;
2324 return 0;
2325 }
2326
2327 /* Generate a TREE representation for all types and external decls
2328 entities in FILE.
2329
2330 Read all of the globals out of the file. Then read the cgraph
2331 and process the .o index into the cgraph nodes so that it can open
2332 the .o file to load the functions and ipa information. */
2333
2334 static struct lto_file_decl_data *
2335 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2336 {
2337 struct lto_file_decl_data *file_data = NULL;
2338 splay_tree file_ids;
2339 htab_t section_hash_table;
2340 struct lto_section_slot *section;
2341 struct file_data_list file_list;
2342 struct lto_section_list section_list;
2343
2344 memset (&section_list, 0, sizeof (struct lto_section_list));
2345 section_hash_table = lto_obj_build_section_table (file, &section_list);
2346
2347 /* Find all sub modules in the object and put their sections into new hash
2348 tables in a splay tree. */
2349 file_ids = lto_splay_tree_new ();
2350 memset (&file_list, 0, sizeof (struct file_data_list));
2351 for (section = section_list.first; section != NULL; section = section->next)
2352 create_subid_section_table (section, file_ids, &file_list);
2353
2354 /* Add resolutions to file ids */
2355 lto_resolution_read (file_ids, resolution_file, file);
2356
2357 /* Finalize each lto file for each submodule in the merged object */
2358 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2359 lto_create_files_from_ids (file, file_data, count);
2360
2361 splay_tree_delete (file_ids);
2362 htab_delete (section_hash_table);
2363
2364 return file_list.first;
2365 }
2366
2367 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2368 #define LTO_MMAP_IO 1
2369 #endif
2370
2371 #if LTO_MMAP_IO
2372 /* Page size of machine is used for mmap and munmap calls. */
2373 static size_t page_mask;
2374 #endif
2375
2376 /* Get the section data of length LEN from FILENAME starting at
2377 OFFSET. The data segment must be freed by the caller when the
2378 caller is finished. Returns NULL if all was not well. */
2379
2380 static char *
2381 lto_read_section_data (struct lto_file_decl_data *file_data,
2382 intptr_t offset, size_t len)
2383 {
2384 char *result;
2385 static int fd = -1;
2386 static char *fd_name;
2387 #if LTO_MMAP_IO
2388 intptr_t computed_len;
2389 intptr_t computed_offset;
2390 intptr_t diff;
2391 #endif
2392
2393 /* Keep a single-entry file-descriptor cache. The last file we
2394 touched will get closed at exit.
2395 ??? Eventually we want to add a more sophisticated larger cache
2396 or rather fix function body streaming to not stream them in
2397 practically random order. */
2398 if (fd != -1
2399 && filename_cmp (fd_name, file_data->file_name) != 0)
2400 {
2401 free (fd_name);
2402 close (fd);
2403 fd = -1;
2404 }
2405 if (fd == -1)
2406 {
2407 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2408 if (fd == -1)
2409 {
2410 fatal_error ("Cannot open %s", file_data->file_name);
2411 return NULL;
2412 }
2413 fd_name = xstrdup (file_data->file_name);
2414 }
2415
2416 #if LTO_MMAP_IO
2417 if (!page_mask)
2418 {
2419 size_t page_size = sysconf (_SC_PAGE_SIZE);
2420 page_mask = ~(page_size - 1);
2421 }
2422
2423 computed_offset = offset & page_mask;
2424 diff = offset - computed_offset;
2425 computed_len = len + diff;
2426
2427 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2428 fd, computed_offset);
2429 if (result == MAP_FAILED)
2430 {
2431 fatal_error ("Cannot map %s", file_data->file_name);
2432 return NULL;
2433 }
2434
2435 return result + diff;
2436 #else
2437 result = (char *) xmalloc (len);
2438 if (lseek (fd, offset, SEEK_SET) != offset
2439 || read (fd, result, len) != (ssize_t) len)
2440 {
2441 free (result);
2442 fatal_error ("Cannot read %s", file_data->file_name);
2443 result = NULL;
2444 }
2445 #ifdef __MINGW32__
2446 /* Native windows doesn't supports delayed unlink on opened file. So
2447 we close file here again. This produces higher I/O load, but at least
2448 it prevents to have dangling file handles preventing unlink. */
2449 free (fd_name);
2450 fd_name = NULL;
2451 close (fd);
2452 fd = -1;
2453 #endif
2454 return result;
2455 #endif
2456 }
2457
2458
2459 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2460 NAME will be NULL unless the section type is for a function
2461 body. */
2462
2463 static const char *
2464 get_section_data (struct lto_file_decl_data *file_data,
2465 enum lto_section_type section_type,
2466 const char *name,
2467 size_t *len)
2468 {
2469 htab_t section_hash_table = file_data->section_hash_table;
2470 struct lto_section_slot *f_slot;
2471 struct lto_section_slot s_slot;
2472 const char *section_name = lto_get_section_name (section_type, name, file_data);
2473 char *data = NULL;
2474
2475 *len = 0;
2476 s_slot.name = section_name;
2477 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2478 if (f_slot)
2479 {
2480 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2481 *len = f_slot->len;
2482 }
2483
2484 free (CONST_CAST (char *, section_name));
2485 return data;
2486 }
2487
2488
2489 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2490 starts at OFFSET and has LEN bytes. */
2491
2492 static void
2493 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2494 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2495 const char *name ATTRIBUTE_UNUSED,
2496 const char *offset, size_t len ATTRIBUTE_UNUSED)
2497 {
2498 #if LTO_MMAP_IO
2499 intptr_t computed_len;
2500 intptr_t computed_offset;
2501 intptr_t diff;
2502 #endif
2503
2504 #if LTO_MMAP_IO
2505 computed_offset = ((intptr_t) offset) & page_mask;
2506 diff = (intptr_t) offset - computed_offset;
2507 computed_len = len + diff;
2508
2509 munmap ((caddr_t) computed_offset, computed_len);
2510 #else
2511 free (CONST_CAST(char *, offset));
2512 #endif
2513 }
2514
2515 static lto_file *current_lto_file;
2516
2517 /* Helper for qsort; compare partitions and return one with smaller size.
2518 We sort from greatest to smallest so parallel build doesn't stale on the
2519 longest compilation being executed too late. */
2520
2521 static int
2522 cmp_partitions_size (const void *a, const void *b)
2523 {
2524 const struct ltrans_partition_def *pa
2525 = *(struct ltrans_partition_def *const *)a;
2526 const struct ltrans_partition_def *pb
2527 = *(struct ltrans_partition_def *const *)b;
2528 return pb->insns - pa->insns;
2529 }
2530
2531 /* Helper for qsort; compare partitions and return one with smaller order. */
2532
2533 static int
2534 cmp_partitions_order (const void *a, const void *b)
2535 {
2536 const struct ltrans_partition_def *pa
2537 = *(struct ltrans_partition_def *const *)a;
2538 const struct ltrans_partition_def *pb
2539 = *(struct ltrans_partition_def *const *)b;
2540 int ordera = -1, orderb = -1;
2541
2542 if (lto_symtab_encoder_size (pa->encoder))
2543 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2544 if (lto_symtab_encoder_size (pb->encoder))
2545 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2546 return orderb - ordera;
2547 }
2548
2549 /* Write all output files in WPA mode and the file with the list of
2550 LTRANS units. */
2551
2552 static void
2553 lto_wpa_write_files (void)
2554 {
2555 unsigned i, n_sets;
2556 lto_file *file;
2557 ltrans_partition part;
2558 FILE *ltrans_output_list_stream;
2559 char *temp_filename;
2560 size_t blen;
2561
2562 /* Open the LTRANS output list. */
2563 if (!ltrans_output_list)
2564 fatal_error ("no LTRANS output list filename provided");
2565 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2566 if (ltrans_output_list_stream == NULL)
2567 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2568
2569 timevar_push (TV_WHOPR_WPA);
2570
2571 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
2572 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2573
2574 /* Find out statics that need to be promoted
2575 to globals with hidden visibility because they are accessed from multiple
2576 partitions. */
2577 lto_promote_cross_file_statics ();
2578
2579 timevar_pop (TV_WHOPR_WPA);
2580
2581 timevar_push (TV_WHOPR_WPA_IO);
2582
2583 /* Generate a prefix for the LTRANS unit files. */
2584 blen = strlen (ltrans_output_list);
2585 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2586 strcpy (temp_filename, ltrans_output_list);
2587 if (blen > sizeof (".out")
2588 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2589 ".out") == 0)
2590 temp_filename[blen - sizeof (".out") + 1] = '\0';
2591 blen = strlen (temp_filename);
2592
2593 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
2594
2595 /* Sort partitions by size so small ones are compiled last.
2596 FIXME: Even when not reordering we may want to output one list for parallel make
2597 and other for final link command. */
2598 VEC_qsort (ltrans_partition, ltrans_partitions,
2599 flag_toplevel_reorder ? cmp_partitions_size : cmp_partitions_order);
2600 for (i = 0; i < n_sets; i++)
2601 {
2602 size_t len;
2603 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
2604
2605 /* Write all the nodes in SET. */
2606 sprintf (temp_filename + blen, "%u.o", i);
2607 file = lto_obj_file_open (temp_filename, true);
2608 if (!file)
2609 fatal_error ("lto_obj_file_open() failed");
2610
2611 if (!quiet_flag)
2612 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2613 if (cgraph_dump_file)
2614 {
2615 lto_symtab_encoder_iterator lsei;
2616
2617 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2618 part->name, temp_filename, part->insns);
2619 fprintf (cgraph_dump_file, " Symbols in partition: ");
2620 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2621 lsei_next_in_partition (&lsei))
2622 {
2623 symtab_node node = lsei_node (lsei);
2624 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2625 }
2626 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2627 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2628 lsei_next (&lsei))
2629 {
2630 symtab_node node = lsei_node (lsei);
2631 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2632 {
2633 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2634 if (symtab_function_p (node)
2635 && lto_symtab_encoder_encode_body_p (part->encoder, cgraph (node)))
2636 fprintf (cgraph_dump_file, "(body included)");
2637 else if (symtab_variable_p (node)
2638 && lto_symtab_encoder_encode_initializer_p (part->encoder, varpool (node)))
2639 fprintf (cgraph_dump_file, "(initializer included)");
2640 }
2641 }
2642 fprintf (cgraph_dump_file, "\n");
2643 }
2644 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2645
2646 lto_set_current_out_file (file);
2647
2648 ipa_write_optimization_summaries (part->encoder);
2649
2650 lto_set_current_out_file (NULL);
2651 lto_obj_file_close (file);
2652 part->encoder = NULL;
2653
2654 len = strlen (temp_filename);
2655 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2656 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2657 fatal_error ("writing to LTRANS output list %s: %m",
2658 ltrans_output_list);
2659 }
2660
2661 lto_stats.num_output_files += n_sets;
2662
2663 /* Close the LTRANS output list. */
2664 if (fclose (ltrans_output_list_stream))
2665 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2666
2667 free_ltrans_partitions();
2668
2669 timevar_pop (TV_WHOPR_WPA_IO);
2670 }
2671
2672
2673 /* If TT is a variable or function decl replace it with its
2674 prevailing variant. */
2675 #define LTO_SET_PREVAIL(tt) \
2676 do {\
2677 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2678 tt = lto_symtab_prevailing_decl (tt); \
2679 } while (0)
2680
2681 /* Ensure that TT isn't a replacable var of function decl. */
2682 #define LTO_NO_PREVAIL(tt) \
2683 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2684
2685 /* Given a tree T replace all fields referring to variables or functions
2686 with their prevailing variant. */
2687 static void
2688 lto_fixup_prevailing_decls (tree t)
2689 {
2690 enum tree_code code = TREE_CODE (t);
2691 LTO_NO_PREVAIL (TREE_TYPE (t));
2692 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2693 LTO_NO_PREVAIL (TREE_CHAIN (t));
2694 if (DECL_P (t))
2695 {
2696 LTO_NO_PREVAIL (DECL_NAME (t));
2697 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2698 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2699 {
2700 LTO_SET_PREVAIL (DECL_SIZE (t));
2701 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2702 LTO_SET_PREVAIL (DECL_INITIAL (t));
2703 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2704 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2705 }
2706 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2707 {
2708 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2709 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2710 }
2711 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2712 {
2713 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2714 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2715 LTO_NO_PREVAIL (DECL_VINDEX (t));
2716 }
2717 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2718 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2719 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2720 {
2721 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2722 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2723 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2724 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2725 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2726 }
2727 }
2728 else if (TYPE_P (t))
2729 {
2730 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2731 LTO_SET_PREVAIL (TYPE_SIZE (t));
2732 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2733 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2734 LTO_NO_PREVAIL (TYPE_NAME (t));
2735
2736 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2737 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2738 LTO_SET_PREVAIL (t->type_non_common.binfo);
2739
2740 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2741
2742 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2743 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2744 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2745 }
2746 else if (EXPR_P (t))
2747 {
2748 int i;
2749 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2750 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2751 }
2752 else
2753 {
2754 switch (code)
2755 {
2756 case TREE_LIST:
2757 LTO_SET_PREVAIL (TREE_VALUE (t));
2758 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2759 break;
2760 default:
2761 gcc_unreachable ();
2762 }
2763 }
2764 }
2765 #undef LTO_SET_PREVAIL
2766 #undef LTO_NO_PREVAIL
2767
2768 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2769 replaces var and function decls with the corresponding prevailing def. */
2770
2771 static void
2772 lto_fixup_state (struct lto_in_decl_state *state)
2773 {
2774 unsigned i, si;
2775 struct lto_tree_ref_table *table;
2776
2777 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2778 we still need to walk from all DECLs to find the reachable
2779 FUNCTION_DECLs and VAR_DECLs. */
2780 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2781 {
2782 table = &state->streams[si];
2783 for (i = 0; i < table->size; i++)
2784 {
2785 tree *tp = table->trees + i;
2786 if (VAR_OR_FUNCTION_DECL_P (*tp))
2787 *tp = lto_symtab_prevailing_decl (*tp);
2788 }
2789 }
2790 }
2791
2792 /* A callback of htab_traverse. Just extracts a state from SLOT
2793 and calls lto_fixup_state. */
2794
2795 static int
2796 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2797 {
2798 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2799 lto_fixup_state (state);
2800 return 1;
2801 }
2802
2803 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2804 prevailing one. */
2805
2806 static void
2807 lto_fixup_decls (struct lto_file_decl_data **files)
2808 {
2809 unsigned int i;
2810 htab_iterator hi;
2811 tree t;
2812
2813 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2814 lto_fixup_prevailing_decls (t);
2815
2816 for (i = 0; files[i]; i++)
2817 {
2818 struct lto_file_decl_data *file = files[i];
2819 struct lto_in_decl_state *state = file->global_decl_state;
2820 lto_fixup_state (state);
2821
2822 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2823 }
2824 }
2825
2826 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2827
2828 /* Turn file datas for sub files into a single array, so that they look
2829 like separate files for further passes. */
2830
2831 static void
2832 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2833 {
2834 struct lto_file_decl_data *n, *next;
2835 int i, k;
2836
2837 lto_stats.num_input_files = count;
2838 all_file_decl_data
2839 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2840 /* Set the hooks so that all of the ipa passes can read in their data. */
2841 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2842 for (i = 0, k = 0; i < last_file_ix; i++)
2843 {
2844 for (n = orig[i]; n != NULL; n = next)
2845 {
2846 all_file_decl_data[k++] = n;
2847 next = n->next;
2848 n->next = NULL;
2849 }
2850 }
2851 all_file_decl_data[k] = NULL;
2852 gcc_assert (k == count);
2853 }
2854
2855 /* Input file data before flattening (i.e. splitting them to subfiles to support
2856 incremental linking. */
2857 static int real_file_count;
2858 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2859
2860 /* Read all the symbols from the input files FNAMES. NFILES is the
2861 number of files requested in the command line. Instantiate a
2862 global call graph by aggregating all the sub-graphs found in each
2863 file. */
2864
2865 static void
2866 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2867 {
2868 unsigned int i, last_file_ix;
2869 FILE *resolution;
2870 struct cgraph_node *node;
2871 int count = 0;
2872 struct lto_file_decl_data **decl_data;
2873
2874 init_cgraph ();
2875
2876 timevar_push (TV_IPA_LTO_DECL_IN);
2877
2878 real_file_decl_data
2879 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2880 real_file_count = nfiles;
2881
2882 /* Read the resolution file. */
2883 resolution = NULL;
2884 if (resolution_file_name)
2885 {
2886 int t;
2887 unsigned num_objects;
2888
2889 resolution = fopen (resolution_file_name, "r");
2890 if (resolution == NULL)
2891 fatal_error ("could not open symbol resolution file: %m");
2892
2893 t = fscanf (resolution, "%u", &num_objects);
2894 gcc_assert (t == 1);
2895
2896 /* True, since the plugin splits the archives. */
2897 gcc_assert (num_objects == nfiles);
2898 }
2899
2900 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2901 NULL);
2902 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2903 tree_int_map_eq, NULL);
2904 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2905 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2906 (GIMPLE_TYPE_LEADER_SIZE);
2907 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2908
2909 if (!quiet_flag)
2910 fprintf (stderr, "Reading object files:");
2911
2912 /* Read all of the object files specified on the command line. */
2913 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2914 {
2915 struct lto_file_decl_data *file_data = NULL;
2916 if (!quiet_flag)
2917 {
2918 fprintf (stderr, " %s", fnames[i]);
2919 fflush (stderr);
2920 }
2921
2922 current_lto_file = lto_obj_file_open (fnames[i], false);
2923 if (!current_lto_file)
2924 break;
2925
2926 file_data = lto_file_read (current_lto_file, resolution, &count);
2927 if (!file_data)
2928 {
2929 lto_obj_file_close (current_lto_file);
2930 current_lto_file = NULL;
2931 break;
2932 }
2933
2934 decl_data[last_file_ix++] = file_data;
2935
2936 lto_obj_file_close (current_lto_file);
2937 current_lto_file = NULL;
2938 ggc_collect ();
2939 }
2940
2941 lto_flatten_files (decl_data, count, last_file_ix);
2942 lto_stats.num_input_files = count;
2943 ggc_free(decl_data);
2944 real_file_decl_data = NULL;
2945
2946 if (resolution_file_name)
2947 fclose (resolution);
2948
2949 /* Free gimple type merging datastructures. */
2950 htab_delete (gimple_types);
2951 gimple_types = NULL;
2952 htab_delete (type_hash_cache);
2953 type_hash_cache = NULL;
2954 free (type_pair_cache);
2955 type_pair_cache = NULL;
2956 gimple_type_leader = NULL;
2957 free_gimple_type_tables ();
2958 ggc_collect ();
2959
2960 /* Set the hooks so that all of the ipa passes can read in their data. */
2961 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2962
2963 timevar_pop (TV_IPA_LTO_DECL_IN);
2964
2965 if (!quiet_flag)
2966 fprintf (stderr, "\nReading the callgraph\n");
2967
2968 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2969 /* Read the symtab. */
2970 input_symtab ();
2971
2972 /* Store resolutions into the symbol table. */
2973 if (resolution_map)
2974 {
2975 void **res;
2976 symtab_node snode;
2977
2978 FOR_EACH_SYMBOL (snode)
2979 if (symtab_real_symbol_p (snode)
2980 && (res = pointer_map_contains (resolution_map,
2981 snode->symbol.decl)))
2982 snode->symbol.resolution
2983 = (enum ld_plugin_symbol_resolution)(size_t)*res;
2984
2985 pointer_map_destroy (resolution_map);
2986 resolution_map = NULL;
2987 }
2988
2989 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2990
2991 if (!quiet_flag)
2992 fprintf (stderr, "Merging declarations\n");
2993
2994 timevar_push (TV_IPA_LTO_DECL_MERGE);
2995 /* Merge global decls. */
2996 lto_symtab_merge_decls ();
2997
2998 /* If there were errors during symbol merging bail out, we have no
2999 good way to recover here. */
3000 if (seen_error ())
3001 fatal_error ("errors during merging of translation units");
3002
3003 /* Fixup all decls. */
3004 lto_fixup_decls (all_file_decl_data);
3005 htab_delete (tree_with_vars);
3006 tree_with_vars = NULL;
3007 ggc_collect ();
3008
3009 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3010 /* Each pass will set the appropriate timer. */
3011
3012 if (!quiet_flag)
3013 fprintf (stderr, "Reading summaries\n");
3014
3015 /* Read the IPA summary data. */
3016 if (flag_ltrans)
3017 ipa_read_optimization_summaries ();
3018 else
3019 ipa_read_summaries ();
3020
3021 for (i = 0; all_file_decl_data[i]; i++)
3022 {
3023 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3024 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3025 all_file_decl_data[i]->symtab_node_encoder = NULL;
3026 }
3027
3028 /* Finally merge the cgraph according to the decl merging decisions. */
3029 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3030 if (cgraph_dump_file)
3031 {
3032 fprintf (cgraph_dump_file, "Before merging:\n");
3033 dump_cgraph (cgraph_dump_file);
3034 dump_varpool (cgraph_dump_file);
3035 }
3036 lto_symtab_merge_cgraph_nodes ();
3037 ggc_collect ();
3038
3039 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3040 summaries computed and needs to apply changes. At the moment WHOPR only
3041 supports inlining, so we can push it here by hand. In future we need to stream
3042 this field into ltrans compilation. */
3043 if (flag_ltrans)
3044 FOR_EACH_DEFINED_FUNCTION (node)
3045 VEC_safe_push (ipa_opt_pass, heap,
3046 node->ipa_transforms_to_apply,
3047 (ipa_opt_pass)&pass_ipa_inline);
3048
3049 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3050
3051 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3052
3053 /* Indicate that the cgraph is built and ready. */
3054 cgraph_function_flags_ready = true;
3055
3056 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3057 ggc_free (all_file_decl_data);
3058 all_file_decl_data = NULL;
3059 }
3060
3061
3062 /* Materialize all the bodies for all the nodes in the callgraph. */
3063
3064 static void
3065 materialize_cgraph (void)
3066 {
3067 tree decl;
3068 struct cgraph_node *node;
3069 unsigned i;
3070 timevar_id_t lto_timer;
3071
3072 if (!quiet_flag)
3073 fprintf (stderr,
3074 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3075
3076 /* Now that we have input the cgraph, we need to clear all of the aux
3077 nodes and read the functions if we are not running in WPA mode. */
3078 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3079
3080 FOR_EACH_FUNCTION (node)
3081 {
3082 if (node->symbol.lto_file_data)
3083 {
3084 lto_materialize_function (node);
3085 lto_stats.num_input_cgraph_nodes++;
3086 }
3087 }
3088
3089 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3090
3091 /* Start the appropriate timer depending on the mode that we are
3092 operating in. */
3093 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3094 : (flag_ltrans) ? TV_WHOPR_LTRANS
3095 : TV_LTO;
3096 timevar_push (lto_timer);
3097
3098 current_function_decl = NULL;
3099 set_cfun (NULL);
3100
3101 /* Inform the middle end about the global variables we have seen. */
3102 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
3103 rest_of_decl_compilation (decl, 1, 0);
3104
3105 if (!quiet_flag)
3106 fprintf (stderr, "\n");
3107
3108 timevar_pop (lto_timer);
3109 }
3110
3111
3112 /* Show various memory usage statistics related to LTO. */
3113 static void
3114 print_lto_report_1 (void)
3115 {
3116 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3117 fprintf (stderr, "%s statistics\n", pfx);
3118
3119 if (gimple_types)
3120 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3121 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3122 (long) htab_size (gimple_types),
3123 (long) htab_elements (gimple_types),
3124 (long) gimple_types->searches,
3125 (long) gimple_types->collisions,
3126 htab_collisions (gimple_types));
3127 else
3128 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3129 if (type_hash_cache)
3130 fprintf (stderr, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3131 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3132 (long) htab_size (type_hash_cache),
3133 (long) htab_elements (type_hash_cache),
3134 (long) type_hash_cache->searches,
3135 (long) type_hash_cache->collisions,
3136 htab_collisions (type_hash_cache));
3137 else
3138 fprintf (stderr, "[%s] GIMPLE type hash table is empty\n", pfx);
3139
3140 print_gimple_types_stats (pfx);
3141 print_lto_report (pfx);
3142 }
3143
3144 /* Perform whole program analysis (WPA) on the callgraph and write out the
3145 optimization plan. */
3146
3147 static void
3148 do_whole_program_analysis (void)
3149 {
3150 symtab_node node;
3151
3152 timevar_start (TV_PHASE_OPT_GEN);
3153
3154 /* Note that since we are in WPA mode, materialize_cgraph will not
3155 actually read in all the function bodies. It only materializes
3156 the decls and cgraph nodes so that analysis can be performed. */
3157 materialize_cgraph ();
3158
3159 /* Reading in the cgraph uses different timers, start timing WPA now. */
3160 timevar_push (TV_WHOPR_WPA);
3161
3162 if (pre_ipa_mem_report)
3163 {
3164 fprintf (stderr, "Memory consumption before IPA\n");
3165 dump_memory_report (false);
3166 }
3167
3168 cgraph_function_flags_ready = true;
3169
3170 if (cgraph_dump_file)
3171 {
3172 dump_cgraph (cgraph_dump_file);
3173 dump_varpool (cgraph_dump_file);
3174 }
3175 bitmap_obstack_initialize (NULL);
3176 cgraph_state = CGRAPH_STATE_IPA_SSA;
3177
3178 execute_ipa_pass_list (all_regular_ipa_passes);
3179
3180 if (cgraph_dump_file)
3181 {
3182 fprintf (cgraph_dump_file, "Optimized ");
3183 dump_cgraph (cgraph_dump_file);
3184 dump_varpool (cgraph_dump_file);
3185 }
3186 #ifdef ENABLE_CHECKING
3187 verify_cgraph ();
3188 #endif
3189 bitmap_obstack_release (NULL);
3190
3191 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3192 timevar_pop (TV_WHOPR_WPA);
3193
3194 timevar_push (TV_WHOPR_PARTITIONING);
3195 if (flag_lto_partition_1to1)
3196 lto_1_to_1_map ();
3197 else if (flag_lto_partition_max)
3198 lto_max_map ();
3199 else
3200 lto_balanced_map ();
3201
3202 /* AUX pointers are used by partitioning code to bookkeep number of
3203 partitions symbol is in. This is no longer needed. */
3204 FOR_EACH_SYMBOL (node)
3205 node->symbol.aux = NULL;
3206
3207 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
3208 ltrans_partitions);
3209 timevar_pop (TV_WHOPR_PARTITIONING);
3210
3211 timevar_stop (TV_PHASE_OPT_GEN);
3212 timevar_start (TV_PHASE_STREAM_OUT);
3213
3214 if (!quiet_flag)
3215 {
3216 fprintf (stderr, "\nStreaming out");
3217 fflush (stderr);
3218 }
3219 lto_wpa_write_files ();
3220 if (!quiet_flag)
3221 fprintf (stderr, "\n");
3222
3223 timevar_stop (TV_PHASE_STREAM_OUT);
3224
3225 ggc_collect ();
3226 if (post_ipa_mem_report)
3227 {
3228 fprintf (stderr, "Memory consumption after IPA\n");
3229 dump_memory_report (false);
3230 }
3231
3232 /* Show the LTO report before launching LTRANS. */
3233 if (flag_lto_report)
3234 print_lto_report_1 ();
3235 if (mem_report_wpa)
3236 dump_memory_report (true);
3237 }
3238
3239
3240 static GTY(()) tree lto_eh_personality_decl;
3241
3242 /* Return the LTO personality function decl. */
3243
3244 tree
3245 lto_eh_personality (void)
3246 {
3247 if (!lto_eh_personality_decl)
3248 {
3249 /* Use the first personality DECL for our personality if we don't
3250 support multiple ones. This ensures that we don't artificially
3251 create the need for them in a single-language program. */
3252 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3253 lto_eh_personality_decl = first_personality_decl;
3254 else
3255 lto_eh_personality_decl = lhd_gcc_personality ();
3256 }
3257
3258 return lto_eh_personality_decl;
3259 }
3260
3261 /* Set the process name based on the LTO mode. */
3262
3263 static void
3264 lto_process_name (void)
3265 {
3266 if (flag_lto)
3267 setproctitle ("lto1-lto");
3268 if (flag_wpa)
3269 setproctitle ("lto1-wpa");
3270 if (flag_ltrans)
3271 setproctitle ("lto1-ltrans");
3272 }
3273
3274
3275 /* Initialize the LTO front end. */
3276
3277 static void
3278 lto_init (void)
3279 {
3280 lto_process_name ();
3281 lto_streamer_hooks_init ();
3282 lto_reader_init ();
3283 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3284 memset (&lto_stats, 0, sizeof (lto_stats));
3285 bitmap_obstack_initialize (NULL);
3286 gimple_register_cfg_hooks ();
3287 }
3288
3289
3290 /* Main entry point for the GIMPLE front end. This front end has
3291 three main personalities:
3292
3293 - LTO (-flto). All the object files on the command line are
3294 loaded in memory and processed as a single translation unit.
3295 This is the traditional link-time optimization behavior.
3296
3297 - WPA (-fwpa). Only the callgraph and summary information for
3298 files in the command file are loaded. A single callgraph
3299 (without function bodies) is instantiated for the whole set of
3300 files. IPA passes are only allowed to analyze the call graph
3301 and make transformation decisions. The callgraph is
3302 partitioned, each partition is written to a new object file
3303 together with the transformation decisions.
3304
3305 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3306 summary files from running again. Since WPA computed summary
3307 information and decided what transformations to apply, LTRANS
3308 simply applies them. */
3309
3310 void
3311 lto_main (void)
3312 {
3313 /* LTO is called as a front end, even though it is not a front end.
3314 Because it is called as a front end, TV_PHASE_PARSING and
3315 TV_PARSE_GLOBAL are active, and we need to turn them off while
3316 doing LTO. Later we turn them back on so they are active up in
3317 toplev.c. */
3318 timevar_pop (TV_PARSE_GLOBAL);
3319 timevar_stop (TV_PHASE_PARSING);
3320
3321 timevar_start (TV_PHASE_SETUP);
3322
3323 /* Initialize the LTO front end. */
3324 lto_init ();
3325
3326 timevar_stop (TV_PHASE_SETUP);
3327 timevar_start (TV_PHASE_STREAM_IN);
3328
3329 /* Read all the symbols and call graph from all the files in the
3330 command line. */
3331 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3332
3333 timevar_stop (TV_PHASE_STREAM_IN);
3334
3335 if (!seen_error ())
3336 {
3337 /* If WPA is enabled analyze the whole call graph and create an
3338 optimization plan. Otherwise, read in all the function
3339 bodies and continue with optimization. */
3340 if (flag_wpa)
3341 do_whole_program_analysis ();
3342 else
3343 {
3344 timevar_start (TV_PHASE_OPT_GEN);
3345
3346 materialize_cgraph ();
3347
3348 /* Let the middle end know that we have read and merged all of
3349 the input files. */
3350 compile ();
3351
3352 timevar_stop (TV_PHASE_OPT_GEN);
3353
3354 /* FIXME lto, if the processes spawned by WPA fail, we miss
3355 the chance to print WPA's report, so WPA will call
3356 print_lto_report before launching LTRANS. If LTRANS was
3357 launched directly by the driver we would not need to do
3358 this. */
3359 if (flag_lto_report)
3360 print_lto_report_1 ();
3361 }
3362 }
3363
3364 /* Here we make LTO pretend to be a parser. */
3365 timevar_start (TV_PHASE_PARSING);
3366 timevar_push (TV_PARSE_GLOBAL);
3367 }
3368
3369 #include "gt-lto-lto.h"