This patch implements generic type query and conversion functions,
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48
49 static GTY(()) tree first_personality_decl;
50
51 /* Returns a hash code for P. */
52
53 static hashval_t
54 hash_name (const void *p)
55 {
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
58 }
59
60
61 /* Returns nonzero if P1 and P2 are equal. */
62
63 static int
64 eq_name (const void *p1, const void *p2)
65 {
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
70
71 return strcmp (s1->name, s2->name) == 0;
72 }
73
74 /* Free lto_section_slot */
75
76 static void
77 free_with_string (void *arg)
78 {
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
80
81 free (CONST_CAST (char *, s->name));
82 free (arg);
83 }
84
85 /* Create section hash table */
86
87 htab_t
88 lto_obj_create_section_hash_table (void)
89 {
90 return htab_create (37, hash_name, eq_name, free_with_string);
91 }
92
93 /* Delete an allocated integer KEY in the splay tree. */
94
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
97 {
98 free ((void *) key);
99 }
100
101 /* Compare splay tree node ids A and B. */
102
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
105 {
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
108
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
111
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
117 }
118
119 /* Look up splay tree node by ID in splay tree T. */
120
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
123 {
124 return splay_tree_lookup (t, (splay_tree_key) &id);
125 }
126
127 /* Check if KEY has ID. */
128
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
131 {
132 return *(unsigned HOST_WIDE_INT *) key == id;
133 }
134
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
138
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
142 {
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
146 }
147
148 /* Create a splay tree. */
149
150 static splay_tree
151 lto_splay_tree_new (void)
152 {
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
156 }
157
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
160
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
163 {
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
168 {
169 if (node->analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
176 {
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
181 }
182 }
183 return false;
184 }
185
186 /* Read the function body for the function associated with NODE. */
187
188 static void
189 lto_materialize_function (struct cgraph_node *node)
190 {
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
195
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
200 {
201 /* Clones don't need to be read. */
202 if (node->clone_of)
203 return;
204
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
207 if (!flag_wpa)
208 {
209 file_data = node->symbol.lto_file_data;
210 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
211
212 /* We may have renamed the declaration, e.g., a static function. */
213 name = lto_get_decl_name_mapping (file_data, name);
214
215 data = lto_get_section_data (file_data, LTO_section_function_body,
216 name, &len);
217 if (!data)
218 fatal_error ("%s: section %s is missing",
219 file_data->file_name,
220 name);
221
222 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
223
224 push_struct_function (decl);
225 announce_function (decl);
226 lto_input_function_body (file_data, decl, data);
227 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
228 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
229 lto_stats.num_function_bodies++;
230 lto_free_section_data (file_data, LTO_section_function_body, name,
231 data, len);
232 pop_cfun ();
233 ggc_collect ();
234 }
235 }
236
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl, 1, 0);
239 }
240
241
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
245 input. */
246
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
249 struct lto_in_decl_state *state)
250 {
251 uint32_t ix;
252 tree decl;
253 uint32_t i, j;
254
255 ix = *data++;
256 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
257 if (TREE_CODE (decl) != FUNCTION_DECL)
258 {
259 gcc_assert (decl == void_type_node);
260 decl = NULL_TREE;
261 }
262 state->fn_decl = decl;
263
264 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
265 {
266 uint32_t size = *data++;
267 tree *decls = ggc_alloc_vec_tree (size);
268
269 for (j = 0; j < size; j++)
270 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
271
272 state->streams[i].size = size;
273 state->streams[i].trees = decls;
274 data += size;
275 }
276
277 return data;
278 }
279
280
281
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
287 htab_t gimple_types;
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
289 htab_t type_hash_cache;
290
291 static hashval_t gimple_type_hash (const void *);
292
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
296
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
300
301 struct type_pair_d
302 {
303 unsigned int uid1;
304 unsigned int uid2;
305 signed char same_p;
306 };
307 typedef struct type_pair_d *type_pair_t;
308 DEF_VEC_P(type_pair_t);
309 DEF_VEC_ALLOC_P(type_pair_t,heap);
310
311 #define GIMPLE_TYPE_PAIR_SIZE 16381
312 struct type_pair_d *type_pair_cache;
313
314
315 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
316 entry if none existed. */
317
318 static inline type_pair_t
319 lookup_type_pair (tree t1, tree t2)
320 {
321 unsigned int index;
322 unsigned int uid1, uid2;
323
324 if (TYPE_UID (t1) < TYPE_UID (t2))
325 {
326 uid1 = TYPE_UID (t1);
327 uid2 = TYPE_UID (t2);
328 }
329 else
330 {
331 uid1 = TYPE_UID (t2);
332 uid2 = TYPE_UID (t1);
333 }
334 gcc_checking_assert (uid1 != uid2);
335
336 /* iterative_hash_hashval_t imply an function calls.
337 We know that UIDS are in limited range. */
338 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
339 % GIMPLE_TYPE_PAIR_SIZE);
340 if (type_pair_cache [index].uid1 == uid1
341 && type_pair_cache [index].uid2 == uid2)
342 return &type_pair_cache[index];
343
344 type_pair_cache [index].uid1 = uid1;
345 type_pair_cache [index].uid2 = uid2;
346 type_pair_cache [index].same_p = -2;
347
348 return &type_pair_cache[index];
349 }
350
351 /* Per pointer state for the SCC finding. The on_sccstack flag
352 is not strictly required, it is true when there is no hash value
353 recorded for the type and false otherwise. But querying that
354 is slower. */
355
356 struct sccs
357 {
358 unsigned int dfsnum;
359 unsigned int low;
360 bool on_sccstack;
361 union {
362 hashval_t hash;
363 signed char same_p;
364 } u;
365 };
366
367 static unsigned int next_dfs_num;
368 static unsigned int gtc_next_dfs_num;
369
370 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
371
372 typedef struct GTY(()) gimple_type_leader_entry_s {
373 tree type;
374 tree leader;
375 } gimple_type_leader_entry;
376
377 #define GIMPLE_TYPE_LEADER_SIZE 16381
378 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
379 gimple_type_leader_entry *gimple_type_leader;
380
381 /* Lookup an existing leader for T and return it or NULL_TREE, if
382 there is none in the cache. */
383
384 static inline tree
385 gimple_lookup_type_leader (tree t)
386 {
387 gimple_type_leader_entry *leader;
388
389 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
390 if (leader->type != t)
391 return NULL_TREE;
392
393 return leader->leader;
394 }
395
396
397 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
398 true then if any type has no name return false, otherwise return
399 true if both types have no names. */
400
401 static bool
402 compare_type_names_p (tree t1, tree t2)
403 {
404 tree name1 = TYPE_NAME (t1);
405 tree name2 = TYPE_NAME (t2);
406
407 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
408 return false;
409
410 if (name1 == NULL_TREE)
411 return true;
412
413 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
414 if (TREE_CODE (name1) != TREE_CODE (name2))
415 return false;
416
417 if (TREE_CODE (name1) == TYPE_DECL)
418 name1 = DECL_NAME (name1);
419 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
420
421 if (TREE_CODE (name2) == TYPE_DECL)
422 name2 = DECL_NAME (name2);
423 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
424
425 /* Identifiers can be compared with pointer equality rather
426 than a string comparison. */
427 if (name1 == name2)
428 return true;
429
430 return false;
431 }
432
433 static bool
434 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
435 VEC(type_pair_t, heap) **,
436 struct pointer_map_t *, struct obstack *);
437
438 /* DFS visit the edge from the callers type pair with state *STATE to
439 the pair T1, T2 while operating in FOR_MERGING_P mode.
440 Update the merging status if it is not part of the SCC containing the
441 callers pair and return it.
442 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
443
444 static bool
445 gtc_visit (tree t1, tree t2,
446 struct sccs *state,
447 VEC(type_pair_t, heap) **sccstack,
448 struct pointer_map_t *sccstate,
449 struct obstack *sccstate_obstack)
450 {
451 struct sccs *cstate = NULL;
452 type_pair_t p;
453 void **slot;
454 tree leader1, leader2;
455
456 /* Check first for the obvious case of pointer identity. */
457 if (t1 == t2)
458 return true;
459
460 /* Check that we have two types to compare. */
461 if (t1 == NULL_TREE || t2 == NULL_TREE)
462 return false;
463
464 /* Can't be the same type if the types don't have the same code. */
465 if (TREE_CODE (t1) != TREE_CODE (t2))
466 return false;
467
468 /* Can't be the same type if they have different CV qualifiers. */
469 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
470 return false;
471
472 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
473 return false;
474
475 /* Void types and nullptr types are always the same. */
476 if (TREE_CODE (t1) == VOID_TYPE
477 || TREE_CODE (t1) == NULLPTR_TYPE)
478 return true;
479
480 /* Can't be the same type if they have different alignment or mode. */
481 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
482 || TYPE_MODE (t1) != TYPE_MODE (t2))
483 return false;
484
485 /* Do some simple checks before doing three hashtable queries. */
486 if (INTEGRAL_TYPE_P (t1)
487 || SCALAR_FLOAT_TYPE_P (t1)
488 || FIXED_POINT_TYPE_P (t1)
489 || TREE_CODE (t1) == VECTOR_TYPE
490 || TREE_CODE (t1) == COMPLEX_TYPE
491 || TREE_CODE (t1) == OFFSET_TYPE
492 || POINTER_TYPE_P (t1))
493 {
494 /* Can't be the same type if they have different sign or precision. */
495 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
496 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
497 return false;
498
499 if (TREE_CODE (t1) == INTEGER_TYPE
500 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
501 return false;
502
503 /* That's all we need to check for float and fixed-point types. */
504 if (SCALAR_FLOAT_TYPE_P (t1)
505 || FIXED_POINT_TYPE_P (t1))
506 return true;
507
508 /* For other types fall through to more complex checks. */
509 }
510
511 /* If the types have been previously registered and found equal
512 they still are. */
513 leader1 = gimple_lookup_type_leader (t1);
514 leader2 = gimple_lookup_type_leader (t2);
515 if (leader1 == t2
516 || t1 == leader2
517 || (leader1 && leader1 == leader2))
518 return true;
519
520 /* If the hash values of t1 and t2 are different the types can't
521 possibly be the same. This helps keeping the type-pair hashtable
522 small, only tracking comparisons for hash collisions. */
523 if (gimple_type_hash (t1) != gimple_type_hash (t2))
524 return false;
525
526 /* Allocate a new cache entry for this comparison. */
527 p = lookup_type_pair (t1, t2);
528 if (p->same_p == 0 || p->same_p == 1)
529 {
530 /* We have already decided whether T1 and T2 are the
531 same, return the cached result. */
532 return p->same_p == 1;
533 }
534
535 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
536 cstate = (struct sccs *)*slot;
537 /* Not yet visited. DFS recurse. */
538 if (!cstate)
539 {
540 gimple_types_compatible_p_1 (t1, t2, p,
541 sccstack, sccstate, sccstate_obstack);
542 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
543 state->low = MIN (state->low, cstate->low);
544 }
545 /* If the type is still on the SCC stack adjust the parents low. */
546 if (cstate->dfsnum < state->dfsnum
547 && cstate->on_sccstack)
548 state->low = MIN (cstate->dfsnum, state->low);
549
550 /* Return the current lattice value. We start with an equality
551 assumption so types part of a SCC will be optimistically
552 treated equal unless proven otherwise. */
553 return cstate->u.same_p;
554 }
555
556 /* Worker for gimple_types_compatible.
557 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
558
559 static bool
560 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
561 VEC(type_pair_t, heap) **sccstack,
562 struct pointer_map_t *sccstate,
563 struct obstack *sccstate_obstack)
564 {
565 struct sccs *state;
566
567 gcc_assert (p->same_p == -2);
568
569 state = XOBNEW (sccstate_obstack, struct sccs);
570 *pointer_map_insert (sccstate, p) = state;
571
572 VEC_safe_push (type_pair_t, heap, *sccstack, p);
573 state->dfsnum = gtc_next_dfs_num++;
574 state->low = state->dfsnum;
575 state->on_sccstack = true;
576 /* Start with an equality assumption. As we DFS recurse into child
577 SCCs this assumption may get revisited. */
578 state->u.same_p = 1;
579
580 /* The struct tags shall compare equal. */
581 if (!compare_type_names_p (t1, t2))
582 goto different_types;
583
584 /* The main variant of both types should compare equal. */
585 if (TYPE_MAIN_VARIANT (t1) != t1
586 || TYPE_MAIN_VARIANT (t2) != t2)
587 {
588 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
589 state, sccstack, sccstate, sccstate_obstack))
590 goto different_types;
591 }
592
593 /* We may not merge typedef types to the same type in different
594 contexts. */
595 if (TYPE_NAME (t1)
596 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
597 && DECL_CONTEXT (TYPE_NAME (t1))
598 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
599 {
600 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
601 DECL_CONTEXT (TYPE_NAME (t2)),
602 state, sccstack, sccstate, sccstate_obstack))
603 goto different_types;
604 }
605
606 /* If their attributes are not the same they can't be the same type. */
607 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
608 goto different_types;
609
610 /* Do type-specific comparisons. */
611 switch (TREE_CODE (t1))
612 {
613 case VECTOR_TYPE:
614 case COMPLEX_TYPE:
615 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
616 state, sccstack, sccstate, sccstate_obstack))
617 goto different_types;
618 goto same_types;
619
620 case ARRAY_TYPE:
621 /* Array types are the same if the element types are the same and
622 the number of elements are the same. */
623 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
624 state, sccstack, sccstate, sccstate_obstack)
625 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
626 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
627 goto different_types;
628 else
629 {
630 tree i1 = TYPE_DOMAIN (t1);
631 tree i2 = TYPE_DOMAIN (t2);
632
633 /* For an incomplete external array, the type domain can be
634 NULL_TREE. Check this condition also. */
635 if (i1 == NULL_TREE && i2 == NULL_TREE)
636 goto same_types;
637 else if (i1 == NULL_TREE || i2 == NULL_TREE)
638 goto different_types;
639 else
640 {
641 tree min1 = TYPE_MIN_VALUE (i1);
642 tree min2 = TYPE_MIN_VALUE (i2);
643 tree max1 = TYPE_MAX_VALUE (i1);
644 tree max2 = TYPE_MAX_VALUE (i2);
645
646 /* The minimum/maximum values have to be the same. */
647 if ((min1 == min2
648 || (min1 && min2
649 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
650 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
651 || operand_equal_p (min1, min2, 0))))
652 && (max1 == max2
653 || (max1 && max2
654 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
655 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
656 || operand_equal_p (max1, max2, 0)))))
657 goto same_types;
658 else
659 goto different_types;
660 }
661 }
662
663 case METHOD_TYPE:
664 /* Method types should belong to the same class. */
665 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
666 state, sccstack, sccstate, sccstate_obstack))
667 goto different_types;
668
669 /* Fallthru */
670
671 case FUNCTION_TYPE:
672 /* Function types are the same if the return type and arguments types
673 are the same. */
674 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
675 state, sccstack, sccstate, sccstate_obstack))
676 goto different_types;
677
678 if (!comp_type_attributes (t1, t2))
679 goto different_types;
680
681 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
682 goto same_types;
683 else
684 {
685 tree parms1, parms2;
686
687 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
688 parms1 && parms2;
689 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
690 {
691 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
692 state, sccstack, sccstate, sccstate_obstack))
693 goto different_types;
694 }
695
696 if (parms1 || parms2)
697 goto different_types;
698
699 goto same_types;
700 }
701
702 case OFFSET_TYPE:
703 {
704 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
705 state, sccstack, sccstate, sccstate_obstack)
706 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
707 TYPE_OFFSET_BASETYPE (t2),
708 state, sccstack, sccstate, sccstate_obstack))
709 goto different_types;
710
711 goto same_types;
712 }
713
714 case POINTER_TYPE:
715 case REFERENCE_TYPE:
716 {
717 /* If the two pointers have different ref-all attributes,
718 they can't be the same type. */
719 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
720 goto different_types;
721
722 /* Otherwise, pointer and reference types are the same if the
723 pointed-to types are the same. */
724 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
725 state, sccstack, sccstate, sccstate_obstack))
726 goto same_types;
727
728 goto different_types;
729 }
730
731 case INTEGER_TYPE:
732 case BOOLEAN_TYPE:
733 {
734 tree min1 = TYPE_MIN_VALUE (t1);
735 tree max1 = TYPE_MAX_VALUE (t1);
736 tree min2 = TYPE_MIN_VALUE (t2);
737 tree max2 = TYPE_MAX_VALUE (t2);
738 bool min_equal_p = false;
739 bool max_equal_p = false;
740
741 /* If either type has a minimum value, the other type must
742 have the same. */
743 if (min1 == NULL_TREE && min2 == NULL_TREE)
744 min_equal_p = true;
745 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
746 min_equal_p = true;
747
748 /* Likewise, if either type has a maximum value, the other
749 type must have the same. */
750 if (max1 == NULL_TREE && max2 == NULL_TREE)
751 max_equal_p = true;
752 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
753 max_equal_p = true;
754
755 if (!min_equal_p || !max_equal_p)
756 goto different_types;
757
758 goto same_types;
759 }
760
761 case ENUMERAL_TYPE:
762 {
763 /* FIXME lto, we cannot check bounds on enumeral types because
764 different front ends will produce different values.
765 In C, enumeral types are integers, while in C++ each element
766 will have its own symbolic value. We should decide how enums
767 are to be represented in GIMPLE and have each front end lower
768 to that. */
769 tree v1, v2;
770
771 /* For enumeral types, all the values must be the same. */
772 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
773 goto same_types;
774
775 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
776 v1 && v2;
777 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
778 {
779 tree c1 = TREE_VALUE (v1);
780 tree c2 = TREE_VALUE (v2);
781
782 if (TREE_CODE (c1) == CONST_DECL)
783 c1 = DECL_INITIAL (c1);
784
785 if (TREE_CODE (c2) == CONST_DECL)
786 c2 = DECL_INITIAL (c2);
787
788 if (tree_int_cst_equal (c1, c2) != 1)
789 goto different_types;
790
791 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
792 goto different_types;
793 }
794
795 /* If one enumeration has more values than the other, they
796 are not the same. */
797 if (v1 || v2)
798 goto different_types;
799
800 goto same_types;
801 }
802
803 case RECORD_TYPE:
804 case UNION_TYPE:
805 case QUAL_UNION_TYPE:
806 {
807 tree f1, f2;
808
809 /* For aggregate types, all the fields must be the same. */
810 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
811 f1 && f2;
812 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
813 {
814 /* Different field kinds are not compatible. */
815 if (TREE_CODE (f1) != TREE_CODE (f2))
816 goto different_types;
817 /* Field decls must have the same name and offset. */
818 if (TREE_CODE (f1) == FIELD_DECL
819 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
820 || !gimple_compare_field_offset (f1, f2)))
821 goto different_types;
822 /* All entities should have the same name and type. */
823 if (DECL_NAME (f1) != DECL_NAME (f2)
824 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
825 state, sccstack, sccstate, sccstate_obstack))
826 goto different_types;
827 }
828
829 /* If one aggregate has more fields than the other, they
830 are not the same. */
831 if (f1 || f2)
832 goto different_types;
833
834 goto same_types;
835 }
836
837 default:
838 gcc_unreachable ();
839 }
840
841 /* Common exit path for types that are not compatible. */
842 different_types:
843 state->u.same_p = 0;
844 goto pop;
845
846 /* Common exit path for types that are compatible. */
847 same_types:
848 gcc_assert (state->u.same_p == 1);
849
850 pop:
851 if (state->low == state->dfsnum)
852 {
853 type_pair_t x;
854
855 /* Pop off the SCC and set its cache values to the final
856 comparison result. */
857 do
858 {
859 struct sccs *cstate;
860 x = VEC_pop (type_pair_t, *sccstack);
861 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
862 cstate->on_sccstack = false;
863 x->same_p = state->u.same_p;
864 }
865 while (x != p);
866 }
867
868 return state->u.same_p;
869 }
870
871 /* Return true iff T1 and T2 are structurally identical. When
872 FOR_MERGING_P is true the an incomplete type and a complete type
873 are considered different, otherwise they are considered compatible. */
874
875 static bool
876 gimple_types_compatible_p (tree t1, tree t2)
877 {
878 VEC(type_pair_t, heap) *sccstack = NULL;
879 struct pointer_map_t *sccstate;
880 struct obstack sccstate_obstack;
881 type_pair_t p = NULL;
882 bool res;
883 tree leader1, leader2;
884
885 /* Before starting to set up the SCC machinery handle simple cases. */
886
887 /* Check first for the obvious case of pointer identity. */
888 if (t1 == t2)
889 return true;
890
891 /* Check that we have two types to compare. */
892 if (t1 == NULL_TREE || t2 == NULL_TREE)
893 return false;
894
895 /* Can't be the same type if the types don't have the same code. */
896 if (TREE_CODE (t1) != TREE_CODE (t2))
897 return false;
898
899 /* Can't be the same type if they have different CV qualifiers. */
900 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
901 return false;
902
903 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
904 return false;
905
906 /* Void types and nullptr types are always the same. */
907 if (TREE_CODE (t1) == VOID_TYPE
908 || TREE_CODE (t1) == NULLPTR_TYPE)
909 return true;
910
911 /* Can't be the same type if they have different alignment or mode. */
912 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
913 || TYPE_MODE (t1) != TYPE_MODE (t2))
914 return false;
915
916 /* Do some simple checks before doing three hashtable queries. */
917 if (INTEGRAL_TYPE_P (t1)
918 || SCALAR_FLOAT_TYPE_P (t1)
919 || FIXED_POINT_TYPE_P (t1)
920 || TREE_CODE (t1) == VECTOR_TYPE
921 || TREE_CODE (t1) == COMPLEX_TYPE
922 || TREE_CODE (t1) == OFFSET_TYPE
923 || POINTER_TYPE_P (t1))
924 {
925 /* Can't be the same type if they have different sign or precision. */
926 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
927 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
928 return false;
929
930 if (TREE_CODE (t1) == INTEGER_TYPE
931 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
932 return false;
933
934 /* That's all we need to check for float and fixed-point types. */
935 if (SCALAR_FLOAT_TYPE_P (t1)
936 || FIXED_POINT_TYPE_P (t1))
937 return true;
938
939 /* For other types fall through to more complex checks. */
940 }
941
942 /* If the types have been previously registered and found equal
943 they still are. */
944 leader1 = gimple_lookup_type_leader (t1);
945 leader2 = gimple_lookup_type_leader (t2);
946 if (leader1 == t2
947 || t1 == leader2
948 || (leader1 && leader1 == leader2))
949 return true;
950
951 /* If the hash values of t1 and t2 are different the types can't
952 possibly be the same. This helps keeping the type-pair hashtable
953 small, only tracking comparisons for hash collisions. */
954 if (gimple_type_hash (t1) != gimple_type_hash (t2))
955 return false;
956
957 /* If we've visited this type pair before (in the case of aggregates
958 with self-referential types), and we made a decision, return it. */
959 p = lookup_type_pair (t1, t2);
960 if (p->same_p == 0 || p->same_p == 1)
961 {
962 /* We have already decided whether T1 and T2 are the
963 same, return the cached result. */
964 return p->same_p == 1;
965 }
966
967 /* Now set up the SCC machinery for the comparison. */
968 gtc_next_dfs_num = 1;
969 sccstate = pointer_map_create ();
970 gcc_obstack_init (&sccstate_obstack);
971 res = gimple_types_compatible_p_1 (t1, t2, p,
972 &sccstack, sccstate, &sccstate_obstack);
973 VEC_free (type_pair_t, heap, sccstack);
974 pointer_map_destroy (sccstate);
975 obstack_free (&sccstate_obstack, NULL);
976
977 return res;
978 }
979
980 static hashval_t
981 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
982 struct pointer_map_t *, struct obstack *);
983
984 /* DFS visit the edge from the callers type with state *STATE to T.
985 Update the callers type hash V with the hash for T if it is not part
986 of the SCC containing the callers type and return it.
987 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
988
989 static hashval_t
990 visit (tree t, struct sccs *state, hashval_t v,
991 VEC (tree, heap) **sccstack,
992 struct pointer_map_t *sccstate,
993 struct obstack *sccstate_obstack)
994 {
995 struct sccs *cstate = NULL;
996 struct tree_int_map m;
997 void **slot;
998
999 /* If there is a hash value recorded for this type then it can't
1000 possibly be part of our parent SCC. Simply mix in its hash. */
1001 m.base.from = t;
1002 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1003 && *slot)
1004 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1005
1006 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1007 cstate = (struct sccs *)*slot;
1008 if (!cstate)
1009 {
1010 hashval_t tem;
1011 /* Not yet visited. DFS recurse. */
1012 tem = iterative_hash_gimple_type (t, v,
1013 sccstack, sccstate, sccstate_obstack);
1014 if (!cstate)
1015 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1016 state->low = MIN (state->low, cstate->low);
1017 /* If the type is no longer on the SCC stack and thus is not part
1018 of the parents SCC mix in its hash value. Otherwise we will
1019 ignore the type for hashing purposes and return the unaltered
1020 hash value. */
1021 if (!cstate->on_sccstack)
1022 return tem;
1023 }
1024 if (cstate->dfsnum < state->dfsnum
1025 && cstate->on_sccstack)
1026 state->low = MIN (cstate->dfsnum, state->low);
1027
1028 /* We are part of our parents SCC, skip this type during hashing
1029 and return the unaltered hash value. */
1030 return v;
1031 }
1032
1033 /* Hash NAME with the previous hash value V and return it. */
1034
1035 static hashval_t
1036 iterative_hash_name (tree name, hashval_t v)
1037 {
1038 if (!name)
1039 return v;
1040 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1041 if (TREE_CODE (name) == TYPE_DECL)
1042 name = DECL_NAME (name);
1043 if (!name)
1044 return v;
1045 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1046 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1047 }
1048
1049 /* A type, hashvalue pair for sorting SCC members. */
1050
1051 struct type_hash_pair {
1052 tree type;
1053 hashval_t hash;
1054 };
1055
1056 /* Compare two type, hashvalue pairs. */
1057
1058 static int
1059 type_hash_pair_compare (const void *p1_, const void *p2_)
1060 {
1061 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1062 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1063 if (p1->hash < p2->hash)
1064 return -1;
1065 else if (p1->hash > p2->hash)
1066 return 1;
1067 return 0;
1068 }
1069
1070 /* Returning a hash value for gimple type TYPE combined with VAL.
1071 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1072
1073 To hash a type we end up hashing in types that are reachable.
1074 Through pointers we can end up with cycles which messes up the
1075 required property that we need to compute the same hash value
1076 for structurally equivalent types. To avoid this we have to
1077 hash all types in a cycle (the SCC) in a commutative way. The
1078 easiest way is to not mix in the hashes of the SCC members at
1079 all. To make this work we have to delay setting the hash
1080 values of the SCC until it is complete. */
1081
1082 static hashval_t
1083 iterative_hash_gimple_type (tree type, hashval_t val,
1084 VEC(tree, heap) **sccstack,
1085 struct pointer_map_t *sccstate,
1086 struct obstack *sccstate_obstack)
1087 {
1088 hashval_t v;
1089 void **slot;
1090 struct sccs *state;
1091
1092 /* Not visited during this DFS walk. */
1093 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1094 state = XOBNEW (sccstate_obstack, struct sccs);
1095 *pointer_map_insert (sccstate, type) = state;
1096
1097 VEC_safe_push (tree, heap, *sccstack, type);
1098 state->dfsnum = next_dfs_num++;
1099 state->low = state->dfsnum;
1100 state->on_sccstack = true;
1101
1102 /* Combine a few common features of types so that types are grouped into
1103 smaller sets; when searching for existing matching types to merge,
1104 only existing types having the same features as the new type will be
1105 checked. */
1106 v = iterative_hash_name (TYPE_NAME (type), 0);
1107 if (TYPE_NAME (type)
1108 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1109 && DECL_CONTEXT (TYPE_NAME (type))
1110 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1111 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1112 sccstack, sccstate, sccstate_obstack);
1113
1114 /* Factor in the variant structure. */
1115 if (TYPE_MAIN_VARIANT (type) != type)
1116 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1117 sccstack, sccstate, sccstate_obstack);
1118
1119 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1120 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1121 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1122
1123 /* Do not hash the types size as this will cause differences in
1124 hash values for the complete vs. the incomplete type variant. */
1125
1126 /* Incorporate common features of numerical types. */
1127 if (INTEGRAL_TYPE_P (type)
1128 || SCALAR_FLOAT_TYPE_P (type)
1129 || FIXED_POINT_TYPE_P (type))
1130 {
1131 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1132 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1133 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1134 }
1135
1136 /* For pointer and reference types, fold in information about the type
1137 pointed to. */
1138 if (POINTER_TYPE_P (type))
1139 v = visit (TREE_TYPE (type), state, v,
1140 sccstack, sccstate, sccstate_obstack);
1141
1142 /* For integer types hash the types min/max values and the string flag. */
1143 if (TREE_CODE (type) == INTEGER_TYPE)
1144 {
1145 /* OMP lowering can introduce error_mark_node in place of
1146 random local decls in types. */
1147 if (TYPE_MIN_VALUE (type) != error_mark_node)
1148 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1149 if (TYPE_MAX_VALUE (type) != error_mark_node)
1150 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1151 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1152 }
1153
1154 /* For array types hash the domain and the string flag. */
1155 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1156 {
1157 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1158 v = visit (TYPE_DOMAIN (type), state, v,
1159 sccstack, sccstate, sccstate_obstack);
1160 }
1161
1162 /* Recurse for aggregates with a single element type. */
1163 if (TREE_CODE (type) == ARRAY_TYPE
1164 || TREE_CODE (type) == COMPLEX_TYPE
1165 || TREE_CODE (type) == VECTOR_TYPE)
1166 v = visit (TREE_TYPE (type), state, v,
1167 sccstack, sccstate, sccstate_obstack);
1168
1169 /* Incorporate function return and argument types. */
1170 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1171 {
1172 unsigned na;
1173 tree p;
1174
1175 /* For method types also incorporate their parent class. */
1176 if (TREE_CODE (type) == METHOD_TYPE)
1177 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1178 sccstack, sccstate, sccstate_obstack);
1179
1180 /* Check result and argument types. */
1181 v = visit (TREE_TYPE (type), state, v,
1182 sccstack, sccstate, sccstate_obstack);
1183 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1184 {
1185 v = visit (TREE_VALUE (p), state, v,
1186 sccstack, sccstate, sccstate_obstack);
1187 na++;
1188 }
1189
1190 v = iterative_hash_hashval_t (na, v);
1191 }
1192
1193 if (RECORD_OR_UNION_TYPE_P (type))
1194 {
1195 unsigned nf;
1196 tree f;
1197
1198 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1199 {
1200 v = iterative_hash_name (DECL_NAME (f), v);
1201 v = visit (TREE_TYPE (f), state, v,
1202 sccstack, sccstate, sccstate_obstack);
1203 nf++;
1204 }
1205
1206 v = iterative_hash_hashval_t (nf, v);
1207 }
1208
1209 /* Record hash for us. */
1210 state->u.hash = v;
1211
1212 /* See if we found an SCC. */
1213 if (state->low == state->dfsnum)
1214 {
1215 tree x;
1216 struct tree_int_map *m;
1217
1218 /* Pop off the SCC and set its hash values. */
1219 x = VEC_pop (tree, *sccstack);
1220 /* Optimize SCC size one. */
1221 if (x == type)
1222 {
1223 state->on_sccstack = false;
1224 m = ggc_alloc_cleared_tree_int_map ();
1225 m->base.from = x;
1226 m->to = v;
1227 slot = htab_find_slot (type_hash_cache, m, INSERT);
1228 gcc_assert (!*slot);
1229 *slot = (void *) m;
1230 }
1231 else
1232 {
1233 struct sccs *cstate;
1234 unsigned first, i, size, j;
1235 struct type_hash_pair *pairs;
1236 /* Pop off the SCC and build an array of type, hash pairs. */
1237 first = VEC_length (tree, *sccstack) - 1;
1238 while (VEC_index (tree, *sccstack, first) != type)
1239 --first;
1240 size = VEC_length (tree, *sccstack) - first + 1;
1241 pairs = XALLOCAVEC (struct type_hash_pair, size);
1242 i = 0;
1243 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1244 cstate->on_sccstack = false;
1245 pairs[i].type = x;
1246 pairs[i].hash = cstate->u.hash;
1247 do
1248 {
1249 x = VEC_pop (tree, *sccstack);
1250 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1251 cstate->on_sccstack = false;
1252 ++i;
1253 pairs[i].type = x;
1254 pairs[i].hash = cstate->u.hash;
1255 }
1256 while (x != type);
1257 gcc_assert (i + 1 == size);
1258 /* Sort the arrays of type, hash pairs so that when we mix in
1259 all members of the SCC the hash value becomes independent on
1260 the order we visited the SCC. Disregard hashes equal to
1261 the hash of the type we mix into because we cannot guarantee
1262 a stable sort for those across different TUs. */
1263 qsort (pairs, size, sizeof (struct type_hash_pair),
1264 type_hash_pair_compare);
1265 for (i = 0; i < size; ++i)
1266 {
1267 hashval_t hash;
1268 m = ggc_alloc_cleared_tree_int_map ();
1269 m->base.from = pairs[i].type;
1270 hash = pairs[i].hash;
1271 /* Skip same hashes. */
1272 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1273 ;
1274 for (; j < size; ++j)
1275 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1276 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1277 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1278 m->to = hash;
1279 if (pairs[i].type == type)
1280 v = hash;
1281 slot = htab_find_slot (type_hash_cache, m, INSERT);
1282 gcc_assert (!*slot);
1283 *slot = (void *) m;
1284 }
1285 }
1286 }
1287
1288 return iterative_hash_hashval_t (v, val);
1289 }
1290
1291 /* Returns a hash value for P (assumed to be a type). The hash value
1292 is computed using some distinguishing features of the type. Note
1293 that we cannot use pointer hashing here as we may be dealing with
1294 two distinct instances of the same type.
1295
1296 This function should produce the same hash value for two compatible
1297 types according to gimple_types_compatible_p. */
1298
1299 static hashval_t
1300 gimple_type_hash (const void *p)
1301 {
1302 const_tree t = (const_tree) p;
1303 VEC(tree, heap) *sccstack = NULL;
1304 struct pointer_map_t *sccstate;
1305 struct obstack sccstate_obstack;
1306 hashval_t val;
1307 void **slot;
1308 struct tree_int_map m;
1309
1310 m.base.from = CONST_CAST_TREE (t);
1311 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1312 && *slot)
1313 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1314
1315 /* Perform a DFS walk and pre-hash all reachable types. */
1316 next_dfs_num = 1;
1317 sccstate = pointer_map_create ();
1318 gcc_obstack_init (&sccstate_obstack);
1319 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1320 &sccstack, sccstate, &sccstate_obstack);
1321 VEC_free (tree, heap, sccstack);
1322 pointer_map_destroy (sccstate);
1323 obstack_free (&sccstate_obstack, NULL);
1324
1325 return val;
1326 }
1327
1328 /* Returns nonzero if P1 and P2 are equal. */
1329
1330 static int
1331 gimple_type_eq (const void *p1, const void *p2)
1332 {
1333 const_tree t1 = (const_tree) p1;
1334 const_tree t2 = (const_tree) p2;
1335 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1336 CONST_CAST_TREE (t2));
1337 }
1338
1339
1340 /* Worker for gimple_register_type.
1341 Register type T in the global type table gimple_types.
1342 When REGISTERING_MV is false first recurse for the main variant of T. */
1343
1344 static tree
1345 gimple_register_type_1 (tree t, bool registering_mv)
1346 {
1347 void **slot;
1348 gimple_type_leader_entry *leader;
1349
1350 /* If we registered this type before return the cached result. */
1351 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1352 if (leader->type == t)
1353 return leader->leader;
1354
1355 /* Always register the main variant first. This is important so we
1356 pick up the non-typedef variants as canonical, otherwise we'll end
1357 up taking typedef ids for structure tags during comparison.
1358 It also makes sure that main variants will be merged to main variants.
1359 As we are operating on a possibly partially fixed up type graph
1360 do not bother to recurse more than once, otherwise we may end up
1361 walking in circles.
1362 If we are registering a main variant it will either remain its
1363 own main variant or it will be merged to something else in which
1364 case we do not care for the main variant leader. */
1365 if (!registering_mv
1366 && TYPE_MAIN_VARIANT (t) != t)
1367 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1368
1369 /* See if we already have an equivalent type registered. */
1370 slot = htab_find_slot (gimple_types, t, INSERT);
1371 if (*slot
1372 && *(tree *)slot != t)
1373 {
1374 tree new_type = (tree) *((tree *) slot);
1375 leader->type = t;
1376 leader->leader = new_type;
1377 return new_type;
1378 }
1379
1380 /* If not, insert it to the cache and the hash. */
1381 leader->type = t;
1382 leader->leader = t;
1383 *slot = (void *) t;
1384 return t;
1385 }
1386
1387 /* Register type T in the global type table gimple_types.
1388 If another type T', compatible with T, already existed in
1389 gimple_types then return T', otherwise return T. This is used by
1390 LTO to merge identical types read from different TUs. */
1391
1392 static tree
1393 gimple_register_type (tree t)
1394 {
1395 gcc_assert (TYPE_P (t));
1396 return gimple_register_type_1 (t, false);
1397 }
1398
1399 #define GIMPLE_REGISTER_TYPE(tt) \
1400 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1401
1402
1403
1404 /* A hashtable of trees that potentially refer to variables or functions
1405 that must be replaced with their prevailing variant. */
1406 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1407 tree_with_vars;
1408
1409 /* Remember that T is a tree that (potentially) refers to a variable
1410 or function decl that may be replaced with its prevailing variant. */
1411 static void
1412 remember_with_vars (tree t)
1413 {
1414 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1415 }
1416
1417 #define LTO_FIXUP_TREE(tt) \
1418 do \
1419 { \
1420 if (tt) \
1421 { \
1422 if (TYPE_P (tt)) \
1423 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1424 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1425 remember_with_vars (t); \
1426 if (TREE_CODE (tt) == INTEGER_CST) \
1427 (tt) = fixup_integer_cst (tt); \
1428 } \
1429 } while (0)
1430
1431 static void lto_fixup_types (tree);
1432
1433 /* Return integer_cst T with updated type. */
1434
1435 static tree
1436 fixup_integer_cst (tree t)
1437 {
1438 tree type = GIMPLE_REGISTER_TYPE (TREE_TYPE (t));
1439
1440 if (type == TREE_TYPE (t))
1441 return t;
1442
1443 /* If overflow was set, streamer_read_integer_cst
1444 produced local copy of T. */
1445 if (TREE_OVERFLOW (t))
1446 {
1447 TREE_TYPE (t) = type;
1448 return t;
1449 }
1450 else
1451 /* Otherwise produce new shared node for the new type. */
1452 return build_int_cst_wide (type, TREE_INT_CST_LOW (t),
1453 TREE_INT_CST_HIGH (t));
1454 }
1455
1456 /* Fix up fields of a tree_typed T. */
1457
1458 static void
1459 lto_ft_typed (tree t)
1460 {
1461 LTO_FIXUP_TREE (TREE_TYPE (t));
1462 }
1463
1464 /* Fix up fields of a tree_common T. */
1465
1466 static void
1467 lto_ft_common (tree t)
1468 {
1469 lto_ft_typed (t);
1470 LTO_FIXUP_TREE (TREE_CHAIN (t));
1471 }
1472
1473 /* Fix up fields of a decl_minimal T. */
1474
1475 static void
1476 lto_ft_decl_minimal (tree t)
1477 {
1478 lto_ft_common (t);
1479 LTO_FIXUP_TREE (DECL_NAME (t));
1480 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1481 }
1482
1483 /* Fix up fields of a decl_common T. */
1484
1485 static void
1486 lto_ft_decl_common (tree t)
1487 {
1488 lto_ft_decl_minimal (t);
1489 LTO_FIXUP_TREE (DECL_SIZE (t));
1490 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1491 LTO_FIXUP_TREE (DECL_INITIAL (t));
1492 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1493 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1494 }
1495
1496 /* Fix up fields of a decl_with_vis T. */
1497
1498 static void
1499 lto_ft_decl_with_vis (tree t)
1500 {
1501 lto_ft_decl_common (t);
1502
1503 /* Accessor macro has side-effects, use field-name here. */
1504 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1505 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1506 }
1507
1508 /* Fix up fields of a decl_non_common T. */
1509
1510 static void
1511 lto_ft_decl_non_common (tree t)
1512 {
1513 lto_ft_decl_with_vis (t);
1514 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1515 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1516 LTO_FIXUP_TREE (DECL_VINDEX (t));
1517 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1518 like for 'typedef enum foo foo'. We have no way of avoiding to
1519 merge them and dwarf2out.c cannot deal with this,
1520 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1521 if (TREE_CODE (t) == TYPE_DECL
1522 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1523 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1524 }
1525
1526 /* Fix up fields of a decl_non_common T. */
1527
1528 static void
1529 lto_ft_function (tree t)
1530 {
1531 lto_ft_decl_non_common (t);
1532 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1533 }
1534
1535 /* Fix up fields of a field_decl T. */
1536
1537 static void
1538 lto_ft_field_decl (tree t)
1539 {
1540 lto_ft_decl_common (t);
1541 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1542 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1543 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1544 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1545 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1546 }
1547
1548 /* Fix up fields of a type T. */
1549
1550 static void
1551 lto_ft_type (tree t)
1552 {
1553 lto_ft_common (t);
1554 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1555 LTO_FIXUP_TREE (TYPE_SIZE (t));
1556 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1557 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1558 LTO_FIXUP_TREE (TYPE_NAME (t));
1559
1560 /* Accessors are for derived node types only. */
1561 if (!POINTER_TYPE_P (t))
1562 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1563 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1564
1565 /* Accessor is for derived node types only. */
1566 LTO_FIXUP_TREE (t->type_non_common.binfo);
1567
1568 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1569 }
1570
1571 /* Fix up fields of a BINFO T. */
1572
1573 static void
1574 lto_ft_binfo (tree t)
1575 {
1576 unsigned HOST_WIDE_INT i, n;
1577 tree base, saved_base;
1578
1579 lto_ft_common (t);
1580 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1581 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1582 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1583 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1584 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1585 for (i = 0; i < n; i++)
1586 {
1587 saved_base = base = BINFO_BASE_ACCESS (t, i);
1588 LTO_FIXUP_TREE (base);
1589 if (base != saved_base)
1590 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1591 }
1592 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1593 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1594 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1595 n = BINFO_N_BASE_BINFOS (t);
1596 for (i = 0; i < n; i++)
1597 {
1598 saved_base = base = BINFO_BASE_BINFO (t, i);
1599 LTO_FIXUP_TREE (base);
1600 if (base != saved_base)
1601 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1602 }
1603 }
1604
1605 /* Fix up fields of a CONSTRUCTOR T. */
1606
1607 static void
1608 lto_ft_constructor (tree t)
1609 {
1610 unsigned HOST_WIDE_INT idx;
1611 constructor_elt *ce;
1612
1613 lto_ft_typed (t);
1614
1615 for (idx = 0;
1616 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1617 idx++)
1618 {
1619 LTO_FIXUP_TREE (ce->index);
1620 LTO_FIXUP_TREE (ce->value);
1621 }
1622 }
1623
1624 /* Fix up fields of an expression tree T. */
1625
1626 static void
1627 lto_ft_expr (tree t)
1628 {
1629 int i;
1630 lto_ft_typed (t);
1631 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1632 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1633 }
1634
1635 /* Given a tree T fixup fields of T by replacing types with their merged
1636 variant and other entities by an equal entity from an earlier compilation
1637 unit, or an entity being canonical in a different way. This includes
1638 for instance integer or string constants. */
1639
1640 static void
1641 lto_fixup_types (tree t)
1642 {
1643 switch (TREE_CODE (t))
1644 {
1645 case IDENTIFIER_NODE:
1646 break;
1647
1648 case TREE_LIST:
1649 LTO_FIXUP_TREE (TREE_VALUE (t));
1650 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1651 LTO_FIXUP_TREE (TREE_CHAIN (t));
1652 break;
1653
1654 case FIELD_DECL:
1655 lto_ft_field_decl (t);
1656 break;
1657
1658 case LABEL_DECL:
1659 case CONST_DECL:
1660 case PARM_DECL:
1661 case RESULT_DECL:
1662 case IMPORTED_DECL:
1663 lto_ft_decl_common (t);
1664 break;
1665
1666 case VAR_DECL:
1667 lto_ft_decl_with_vis (t);
1668 break;
1669
1670 case TYPE_DECL:
1671 lto_ft_decl_non_common (t);
1672 break;
1673
1674 case FUNCTION_DECL:
1675 lto_ft_function (t);
1676 break;
1677
1678 case TREE_BINFO:
1679 lto_ft_binfo (t);
1680 break;
1681
1682 case PLACEHOLDER_EXPR:
1683 lto_ft_common (t);
1684 break;
1685
1686 case BLOCK:
1687 case TRANSLATION_UNIT_DECL:
1688 case OPTIMIZATION_NODE:
1689 case TARGET_OPTION_NODE:
1690 break;
1691
1692 default:
1693 if (TYPE_P (t))
1694 lto_ft_type (t);
1695 else if (TREE_CODE (t) == CONSTRUCTOR)
1696 lto_ft_constructor (t);
1697 else if (CONSTANT_CLASS_P (t))
1698 LTO_FIXUP_TREE (TREE_TYPE (t));
1699 else if (EXPR_P (t))
1700 {
1701 lto_ft_expr (t);
1702 }
1703 else
1704 {
1705 remember_with_vars (t);
1706 }
1707 }
1708 }
1709
1710
1711 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1712
1713 static enum ld_plugin_symbol_resolution
1714 get_resolution (struct data_in *data_in, unsigned index)
1715 {
1716 if (data_in->globals_resolution)
1717 {
1718 ld_plugin_symbol_resolution_t ret;
1719 /* We can have references to not emitted functions in
1720 DECL_FUNCTION_PERSONALITY at least. So we can and have
1721 to indeed return LDPR_UNKNOWN in some cases. */
1722 if (VEC_length (ld_plugin_symbol_resolution_t,
1723 data_in->globals_resolution) <= index)
1724 return LDPR_UNKNOWN;
1725 ret = VEC_index (ld_plugin_symbol_resolution_t,
1726 data_in->globals_resolution,
1727 index);
1728 return ret;
1729 }
1730 else
1731 /* Delay resolution finding until decl merging. */
1732 return LDPR_UNKNOWN;
1733 }
1734
1735 /* Map assigning declarations their resolutions. */
1736 static pointer_map_t *resolution_map;
1737
1738 /* We need to record resolutions until symbol table is read. */
1739 static void
1740 register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1741 {
1742 if (resolution == LDPR_UNKNOWN)
1743 return;
1744 if (!resolution_map)
1745 resolution_map = pointer_map_create ();
1746 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1747 }
1748
1749 /* Register DECL with the global symbol table and change its
1750 name if necessary to avoid name clashes for static globals across
1751 different files. */
1752
1753 static void
1754 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1755 {
1756 tree context;
1757
1758 /* Variable has file scope, not local. Need to ensure static variables
1759 between different files don't clash unexpectedly. */
1760 if (!TREE_PUBLIC (decl)
1761 && !((context = decl_function_context (decl))
1762 && auto_var_in_fn_p (decl, context)))
1763 {
1764 /* ??? We normally pre-mangle names before we serialize them
1765 out. Here, in lto1, we do not know the language, and
1766 thus cannot do the mangling again. Instead, we just
1767 append a suffix to the mangled name. The resulting name,
1768 however, is not a properly-formed mangled name, and will
1769 confuse any attempt to unmangle it. */
1770 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1771 char *label;
1772
1773 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1774 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1775 rest_of_decl_compilation (decl, 1, 0);
1776 VEC_safe_push (tree, gc, lto_global_var_decls, decl);
1777 }
1778
1779 /* If this variable has already been declared, queue the
1780 declaration for merging. */
1781 if (TREE_PUBLIC (decl))
1782 {
1783 unsigned ix;
1784 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1785 gcc_unreachable ();
1786 register_resolution (decl, get_resolution (data_in, ix));
1787 }
1788 }
1789
1790
1791 /* Register DECL with the global symbol table and change its
1792 name if necessary to avoid name clashes for static globals across
1793 different files. DATA_IN contains descriptors and tables for the
1794 file being read. */
1795
1796 static void
1797 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1798 {
1799 /* Need to ensure static entities between different files
1800 don't clash unexpectedly. */
1801 if (!TREE_PUBLIC (decl))
1802 {
1803 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1804 may set the assembler name where it was previously empty. */
1805 tree old_assembler_name = decl->decl_with_vis.assembler_name;
1806
1807 /* FIXME lto: We normally pre-mangle names before we serialize
1808 them out. Here, in lto1, we do not know the language, and
1809 thus cannot do the mangling again. Instead, we just append a
1810 suffix to the mangled name. The resulting name, however, is
1811 not a properly-formed mangled name, and will confuse any
1812 attempt to unmangle it. */
1813 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1814 char *label;
1815
1816 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1817 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1818
1819 /* We may arrive here with the old assembler name not set
1820 if the function body is not needed, e.g., it has been
1821 inlined away and does not appear in the cgraph. */
1822 if (old_assembler_name)
1823 {
1824 tree new_assembler_name = DECL_ASSEMBLER_NAME (decl);
1825
1826 /* Make the original assembler name available for later use.
1827 We may have used it to indicate the section within its
1828 object file where the function body may be found.
1829 FIXME lto: Find a better way to maintain the function decl
1830 to body section mapping so we don't need this hack. */
1831 lto_record_renamed_decl (data_in->file_data,
1832 IDENTIFIER_POINTER (old_assembler_name),
1833 IDENTIFIER_POINTER (new_assembler_name));
1834 }
1835 }
1836
1837 /* If this variable has already been declared, queue the
1838 declaration for merging. */
1839 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1840 {
1841 unsigned ix;
1842 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1843 gcc_unreachable ();
1844 register_resolution (decl, get_resolution (data_in, ix));
1845 }
1846 }
1847
1848
1849 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1850 for one compilation unit) go over all trees starting at index FROM until the
1851 end of the sequence and replace fields of those trees, and the trees
1852 themself with their canonical variants as per gimple_register_type. */
1853
1854 static void
1855 uniquify_nodes (struct data_in *data_in, unsigned from)
1856 {
1857 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1858 unsigned len = VEC_length (tree, cache->nodes);
1859 unsigned i;
1860
1861 /* Go backwards because children streamed for the first time come
1862 as part of their parents, and hence are created after them. */
1863
1864 /* First register all the types in the cache. This makes sure to
1865 have the original structure in the type cycles when registering
1866 them and computing hashes. */
1867 for (i = len; i-- > from;)
1868 {
1869 tree t = VEC_index (tree, cache->nodes, i);
1870 if (t && TYPE_P (t))
1871 {
1872 tree newt = gimple_register_type (t);
1873 /* Mark non-prevailing types so we fix them up. No need
1874 to reset that flag afterwards - nothing that refers
1875 to those types is left and they are collected. */
1876 if (newt != t)
1877 TREE_VISITED (t) = 1;
1878 }
1879 }
1880
1881 /* Second fixup all trees in the new cache entries. */
1882 for (i = len; i-- > from;)
1883 {
1884 tree t = VEC_index (tree, cache->nodes, i);
1885 tree oldt = t;
1886 if (!t)
1887 continue;
1888
1889 /* First fixup the fields of T. */
1890 lto_fixup_types (t);
1891
1892 if (!TYPE_P (t))
1893 continue;
1894
1895 /* Now try to find a canonical variant of T itself. */
1896 t = GIMPLE_REGISTER_TYPE (t);
1897
1898 if (t == oldt)
1899 {
1900 /* The following re-creates proper variant lists while fixing up
1901 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1902 variant list state before fixup is broken. */
1903 tree tem, mv;
1904
1905 #ifdef ENABLE_CHECKING
1906 /* Remove us from our main variant list if we are not the
1907 variant leader. */
1908 if (TYPE_MAIN_VARIANT (t) != t)
1909 {
1910 tem = TYPE_MAIN_VARIANT (t);
1911 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1912 tem = TYPE_NEXT_VARIANT (tem);
1913 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1914 }
1915 #endif
1916
1917 /* Query our new main variant. */
1918 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1919
1920 /* If we were the variant leader and we get replaced ourselves drop
1921 all variants from our list. */
1922 if (TYPE_MAIN_VARIANT (t) == t
1923 && mv != t)
1924 {
1925 tem = t;
1926 while (tem)
1927 {
1928 tree tem2 = TYPE_NEXT_VARIANT (tem);
1929 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1930 tem = tem2;
1931 }
1932 }
1933
1934 /* If we are not our own variant leader link us into our new leaders
1935 variant list. */
1936 if (mv != t)
1937 {
1938 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1939 TYPE_NEXT_VARIANT (mv) = t;
1940 if (RECORD_OR_UNION_TYPE_P (t))
1941 TYPE_BINFO (t) = TYPE_BINFO (mv);
1942 /* Preserve the invariant that type variants share their
1943 TYPE_FIELDS. */
1944 if (RECORD_OR_UNION_TYPE_P (t)
1945 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1946 {
1947 tree f1, f2;
1948 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1949 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1950 {
1951 unsigned ix;
1952 gcc_assert (f1 != f2
1953 && DECL_NAME (f1) == DECL_NAME (f2));
1954 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1955 gcc_unreachable ();
1956 /* If we're going to replace an element which we'd
1957 still visit in the next iterations, we wouldn't
1958 handle it, so do it here. We do have to handle it
1959 even though the field_decl itself will be removed,
1960 as it could refer to e.g. integer_cst which we
1961 wouldn't reach via any other way, hence they
1962 (and their type) would stay uncollected. */
1963 /* ??? We should rather make sure to replace all
1964 references to f2 with f1. That means handling
1965 COMPONENT_REFs and CONSTRUCTOR elements in
1966 lto_fixup_types and special-case the field-decl
1967 operand handling. */
1968 /* ??? Not sure the above is all relevant in this
1969 path canonicalizing TYPE_FIELDS to that of the
1970 main variant. */
1971 if (ix < i)
1972 lto_fixup_types (f2);
1973 streamer_tree_cache_insert_at (cache, f1, ix);
1974 }
1975 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1976 }
1977 }
1978
1979 /* Finally adjust our main variant and fix it up. */
1980 TYPE_MAIN_VARIANT (t) = mv;
1981
1982 /* The following reconstructs the pointer chains
1983 of the new pointed-to type if we are a main variant. We do
1984 not stream those so they are broken before fixup. */
1985 if (TREE_CODE (t) == POINTER_TYPE
1986 && TYPE_MAIN_VARIANT (t) == t)
1987 {
1988 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1989 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1990 }
1991 else if (TREE_CODE (t) == REFERENCE_TYPE
1992 && TYPE_MAIN_VARIANT (t) == t)
1993 {
1994 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1995 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1996 }
1997 }
1998
1999 else
2000 {
2001 if (RECORD_OR_UNION_TYPE_P (t))
2002 {
2003 tree f1, f2;
2004 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
2005 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
2006 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2007 {
2008 unsigned ix;
2009 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
2010 if (!streamer_tree_cache_lookup (cache, f2, &ix))
2011 gcc_unreachable ();
2012 /* If we're going to replace an element which we'd
2013 still visit in the next iterations, we wouldn't
2014 handle it, so do it here. We do have to handle it
2015 even though the field_decl itself will be removed,
2016 as it could refer to e.g. integer_cst which we
2017 wouldn't reach via any other way, hence they
2018 (and their type) would stay uncollected. */
2019 /* ??? We should rather make sure to replace all
2020 references to f2 with f1. That means handling
2021 COMPONENT_REFs and CONSTRUCTOR elements in
2022 lto_fixup_types and special-case the field-decl
2023 operand handling. */
2024 if (ix < i)
2025 lto_fixup_types (f2);
2026 streamer_tree_cache_insert_at (cache, f1, ix);
2027 }
2028 }
2029
2030 /* If we found a tree that is equal to oldt replace it in the
2031 cache, so that further users (in the various LTO sections)
2032 make use of it. */
2033 streamer_tree_cache_insert_at (cache, t, i);
2034 }
2035 }
2036
2037 /* Finally compute the canonical type of all TREE_TYPEs and register
2038 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
2039 From this point there are no longer any types with
2040 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
2041 This step requires the TYPE_POINTER_TO lists being present, so
2042 make sure it is done last. */
2043 for (i = len; i-- > from;)
2044 {
2045 tree t = VEC_index (tree, cache->nodes, i);
2046 if (t == NULL_TREE)
2047 continue;
2048
2049 if (TREE_CODE (t) == VAR_DECL)
2050 lto_register_var_decl_in_symtab (data_in, t);
2051 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
2052 lto_register_function_decl_in_symtab (data_in, t);
2053 else if (!flag_wpa
2054 && TREE_CODE (t) == TYPE_DECL)
2055 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2056 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2057 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2058 }
2059 }
2060
2061
2062 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2063 RESOLUTIONS is the set of symbols picked by the linker (read from the
2064 resolution file when the linker plugin is being used). */
2065
2066 static void
2067 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2068 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
2069 {
2070 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2071 const int decl_offset = sizeof (struct lto_decl_header);
2072 const int main_offset = decl_offset + header->decl_state_size;
2073 const int string_offset = main_offset + header->main_size;
2074 struct lto_input_block ib_main;
2075 struct data_in *data_in;
2076 unsigned int i;
2077 const uint32_t *data_ptr, *data_end;
2078 uint32_t num_decl_states;
2079
2080 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2081 header->main_size);
2082
2083 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2084 header->string_size, resolutions);
2085
2086 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2087 internal types that should not be merged. */
2088
2089 /* Read the global declarations and types. */
2090 while (ib_main.p < ib_main.len)
2091 {
2092 tree t;
2093 unsigned from = VEC_length (tree, data_in->reader_cache->nodes);
2094 t = stream_read_tree (&ib_main, data_in);
2095 gcc_assert (t && ib_main.p <= ib_main.len);
2096 uniquify_nodes (data_in, from);
2097 }
2098
2099 /* Read in lto_in_decl_state objects. */
2100 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2101 data_end =
2102 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2103 num_decl_states = *data_ptr++;
2104
2105 gcc_assert (num_decl_states > 0);
2106 decl_data->global_decl_state = lto_new_in_decl_state ();
2107 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2108 decl_data->global_decl_state);
2109
2110 /* Read in per-function decl states and enter them in hash table. */
2111 decl_data->function_decl_states =
2112 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2113
2114 for (i = 1; i < num_decl_states; i++)
2115 {
2116 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2117 void **slot;
2118
2119 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2120 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2121 gcc_assert (*slot == NULL);
2122 *slot = state;
2123 }
2124
2125 if (data_ptr != data_end)
2126 internal_error ("bytecode stream: garbage at the end of symbols section");
2127
2128 /* Set the current decl state to be the global state. */
2129 decl_data->current_decl_state = decl_data->global_decl_state;
2130
2131 lto_data_in_delete (data_in);
2132 }
2133
2134 /* Custom version of strtoll, which is not portable. */
2135
2136 static HOST_WIDEST_INT
2137 lto_parse_hex (const char *p)
2138 {
2139 HOST_WIDEST_INT ret = 0;
2140
2141 for (; *p != '\0'; ++p)
2142 {
2143 char c = *p;
2144 unsigned char part;
2145 ret <<= 4;
2146 if (c >= '0' && c <= '9')
2147 part = c - '0';
2148 else if (c >= 'a' && c <= 'f')
2149 part = c - 'a' + 10;
2150 else if (c >= 'A' && c <= 'F')
2151 part = c - 'A' + 10;
2152 else
2153 internal_error ("could not parse hex number");
2154 ret |= part;
2155 }
2156
2157 return ret;
2158 }
2159
2160 /* Read resolution for file named FILE_NAME. The resolution is read from
2161 RESOLUTION. */
2162
2163 static void
2164 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2165 {
2166 /* We require that objects in the resolution file are in the same
2167 order as the lto1 command line. */
2168 unsigned int name_len;
2169 char *obj_name;
2170 unsigned int num_symbols;
2171 unsigned int i;
2172 struct lto_file_decl_data *file_data;
2173 splay_tree_node nd = NULL;
2174
2175 if (!resolution)
2176 return;
2177
2178 name_len = strlen (file->filename);
2179 obj_name = XNEWVEC (char, name_len + 1);
2180 fscanf (resolution, " "); /* Read white space. */
2181
2182 fread (obj_name, sizeof (char), name_len, resolution);
2183 obj_name[name_len] = '\0';
2184 if (filename_cmp (obj_name, file->filename) != 0)
2185 internal_error ("unexpected file name %s in linker resolution file. "
2186 "Expected %s", obj_name, file->filename);
2187 if (file->offset != 0)
2188 {
2189 int t;
2190 char offset_p[17];
2191 HOST_WIDEST_INT offset;
2192 t = fscanf (resolution, "@0x%16s", offset_p);
2193 if (t != 1)
2194 internal_error ("could not parse file offset");
2195 offset = lto_parse_hex (offset_p);
2196 if (offset != file->offset)
2197 internal_error ("unexpected offset");
2198 }
2199
2200 free (obj_name);
2201
2202 fscanf (resolution, "%u", &num_symbols);
2203
2204 for (i = 0; i < num_symbols; i++)
2205 {
2206 int t;
2207 unsigned index;
2208 unsigned HOST_WIDE_INT id;
2209 char r_str[27];
2210 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2211 unsigned int j;
2212 unsigned int lto_resolution_str_len =
2213 sizeof (lto_resolution_str) / sizeof (char *);
2214 res_pair rp;
2215
2216 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2217 &index, &id, r_str);
2218 if (t != 3)
2219 internal_error ("invalid line in the resolution file");
2220
2221 for (j = 0; j < lto_resolution_str_len; j++)
2222 {
2223 if (strcmp (lto_resolution_str[j], r_str) == 0)
2224 {
2225 r = (enum ld_plugin_symbol_resolution) j;
2226 break;
2227 }
2228 }
2229 if (j == lto_resolution_str_len)
2230 internal_error ("invalid resolution in the resolution file");
2231
2232 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2233 {
2234 nd = lto_splay_tree_lookup (file_ids, id);
2235 if (nd == NULL)
2236 internal_error ("resolution sub id " HOST_WIDE_INT_PRINT_HEX_PURE
2237 " not in object file", id);
2238 }
2239
2240 file_data = (struct lto_file_decl_data *)nd->value;
2241 /* The indexes are very sparse. To save memory save them in a compact
2242 format that is only unpacked later when the subfile is processed. */
2243 rp.res = r;
2244 rp.index = index;
2245 VEC_safe_push (res_pair, heap, file_data->respairs, rp);
2246 if (file_data->max_index < index)
2247 file_data->max_index = index;
2248 }
2249 }
2250
2251 /* List of file_decl_datas */
2252 struct file_data_list
2253 {
2254 struct lto_file_decl_data *first, *last;
2255 };
2256
2257 /* Is the name for a id'ed LTO section? */
2258
2259 static int
2260 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2261 {
2262 const char *s;
2263
2264 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2265 return 0;
2266 s = strrchr (name, '.');
2267 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2268 }
2269
2270 /* Create file_data of each sub file id */
2271
2272 static int
2273 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2274 struct file_data_list *list)
2275 {
2276 struct lto_section_slot s_slot, *new_slot;
2277 unsigned HOST_WIDE_INT id;
2278 splay_tree_node nd;
2279 void **hash_slot;
2280 char *new_name;
2281 struct lto_file_decl_data *file_data;
2282
2283 if (!lto_section_with_id (ls->name, &id))
2284 return 1;
2285
2286 /* Find hash table of sub module id */
2287 nd = lto_splay_tree_lookup (file_ids, id);
2288 if (nd != NULL)
2289 {
2290 file_data = (struct lto_file_decl_data *)nd->value;
2291 }
2292 else
2293 {
2294 file_data = ggc_alloc_lto_file_decl_data ();
2295 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2296 file_data->id = id;
2297 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2298 lto_splay_tree_insert (file_ids, id, file_data);
2299
2300 /* Maintain list in linker order */
2301 if (!list->first)
2302 list->first = file_data;
2303 if (list->last)
2304 list->last->next = file_data;
2305 list->last = file_data;
2306 }
2307
2308 /* Copy section into sub module hash table */
2309 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2310 s_slot.name = new_name;
2311 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2312 gcc_assert (*hash_slot == NULL);
2313
2314 new_slot = XDUP (struct lto_section_slot, ls);
2315 new_slot->name = new_name;
2316 *hash_slot = new_slot;
2317 return 1;
2318 }
2319
2320 /* Read declarations and other initializations for a FILE_DATA. */
2321
2322 static void
2323 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2324 {
2325 const char *data;
2326 size_t len;
2327 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions = NULL;
2328 int i;
2329 res_pair *rp;
2330
2331 /* Create vector for fast access of resolution. We do this lazily
2332 to save memory. */
2333 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
2334 resolutions,
2335 file_data->max_index + 1);
2336 for (i = 0; VEC_iterate (res_pair, file_data->respairs, i, rp); i++)
2337 VEC_replace (ld_plugin_symbol_resolution_t, resolutions, rp->index, rp->res);
2338 VEC_free (res_pair, heap, file_data->respairs);
2339
2340 file_data->renaming_hash_table = lto_create_renaming_table ();
2341 file_data->file_name = file->filename;
2342 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2343 if (data == NULL)
2344 {
2345 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2346 return;
2347 }
2348 /* Frees resolutions */
2349 lto_read_decls (file_data, data, resolutions);
2350 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2351 }
2352
2353 /* Finalize FILE_DATA in FILE and increase COUNT. */
2354
2355 static int
2356 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2357 int *count)
2358 {
2359 lto_file_finalize (file_data, file);
2360 if (cgraph_dump_file)
2361 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2362 file_data->file_name, file_data->id);
2363 (*count)++;
2364 return 0;
2365 }
2366
2367 /* Generate a TREE representation for all types and external decls
2368 entities in FILE.
2369
2370 Read all of the globals out of the file. Then read the cgraph
2371 and process the .o index into the cgraph nodes so that it can open
2372 the .o file to load the functions and ipa information. */
2373
2374 static struct lto_file_decl_data *
2375 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2376 {
2377 struct lto_file_decl_data *file_data = NULL;
2378 splay_tree file_ids;
2379 htab_t section_hash_table;
2380 struct lto_section_slot *section;
2381 struct file_data_list file_list;
2382 struct lto_section_list section_list;
2383
2384 memset (&section_list, 0, sizeof (struct lto_section_list));
2385 section_hash_table = lto_obj_build_section_table (file, &section_list);
2386
2387 /* Find all sub modules in the object and put their sections into new hash
2388 tables in a splay tree. */
2389 file_ids = lto_splay_tree_new ();
2390 memset (&file_list, 0, sizeof (struct file_data_list));
2391 for (section = section_list.first; section != NULL; section = section->next)
2392 create_subid_section_table (section, file_ids, &file_list);
2393
2394 /* Add resolutions to file ids */
2395 lto_resolution_read (file_ids, resolution_file, file);
2396
2397 /* Finalize each lto file for each submodule in the merged object */
2398 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2399 lto_create_files_from_ids (file, file_data, count);
2400
2401 splay_tree_delete (file_ids);
2402 htab_delete (section_hash_table);
2403
2404 return file_list.first;
2405 }
2406
2407 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2408 #define LTO_MMAP_IO 1
2409 #endif
2410
2411 #if LTO_MMAP_IO
2412 /* Page size of machine is used for mmap and munmap calls. */
2413 static size_t page_mask;
2414 #endif
2415
2416 /* Get the section data of length LEN from FILENAME starting at
2417 OFFSET. The data segment must be freed by the caller when the
2418 caller is finished. Returns NULL if all was not well. */
2419
2420 static char *
2421 lto_read_section_data (struct lto_file_decl_data *file_data,
2422 intptr_t offset, size_t len)
2423 {
2424 char *result;
2425 static int fd = -1;
2426 static char *fd_name;
2427 #if LTO_MMAP_IO
2428 intptr_t computed_len;
2429 intptr_t computed_offset;
2430 intptr_t diff;
2431 #endif
2432
2433 /* Keep a single-entry file-descriptor cache. The last file we
2434 touched will get closed at exit.
2435 ??? Eventually we want to add a more sophisticated larger cache
2436 or rather fix function body streaming to not stream them in
2437 practically random order. */
2438 if (fd != -1
2439 && filename_cmp (fd_name, file_data->file_name) != 0)
2440 {
2441 free (fd_name);
2442 close (fd);
2443 fd = -1;
2444 }
2445 if (fd == -1)
2446 {
2447 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2448 if (fd == -1)
2449 {
2450 fatal_error ("Cannot open %s", file_data->file_name);
2451 return NULL;
2452 }
2453 fd_name = xstrdup (file_data->file_name);
2454 }
2455
2456 #if LTO_MMAP_IO
2457 if (!page_mask)
2458 {
2459 size_t page_size = sysconf (_SC_PAGE_SIZE);
2460 page_mask = ~(page_size - 1);
2461 }
2462
2463 computed_offset = offset & page_mask;
2464 diff = offset - computed_offset;
2465 computed_len = len + diff;
2466
2467 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2468 fd, computed_offset);
2469 if (result == MAP_FAILED)
2470 {
2471 fatal_error ("Cannot map %s", file_data->file_name);
2472 return NULL;
2473 }
2474
2475 return result + diff;
2476 #else
2477 result = (char *) xmalloc (len);
2478 if (lseek (fd, offset, SEEK_SET) != offset
2479 || read (fd, result, len) != (ssize_t) len)
2480 {
2481 free (result);
2482 fatal_error ("Cannot read %s", file_data->file_name);
2483 result = NULL;
2484 }
2485 #ifdef __MINGW32__
2486 /* Native windows doesn't supports delayed unlink on opened file. So
2487 we close file here again. This produces higher I/O load, but at least
2488 it prevents to have dangling file handles preventing unlink. */
2489 free (fd_name);
2490 fd_name = NULL;
2491 close (fd);
2492 fd = -1;
2493 #endif
2494 return result;
2495 #endif
2496 }
2497
2498
2499 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2500 NAME will be NULL unless the section type is for a function
2501 body. */
2502
2503 static const char *
2504 get_section_data (struct lto_file_decl_data *file_data,
2505 enum lto_section_type section_type,
2506 const char *name,
2507 size_t *len)
2508 {
2509 htab_t section_hash_table = file_data->section_hash_table;
2510 struct lto_section_slot *f_slot;
2511 struct lto_section_slot s_slot;
2512 const char *section_name = lto_get_section_name (section_type, name, file_data);
2513 char *data = NULL;
2514
2515 *len = 0;
2516 s_slot.name = section_name;
2517 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2518 if (f_slot)
2519 {
2520 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2521 *len = f_slot->len;
2522 }
2523
2524 free (CONST_CAST (char *, section_name));
2525 return data;
2526 }
2527
2528
2529 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2530 starts at OFFSET and has LEN bytes. */
2531
2532 static void
2533 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2534 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2535 const char *name ATTRIBUTE_UNUSED,
2536 const char *offset, size_t len ATTRIBUTE_UNUSED)
2537 {
2538 #if LTO_MMAP_IO
2539 intptr_t computed_len;
2540 intptr_t computed_offset;
2541 intptr_t diff;
2542 #endif
2543
2544 #if LTO_MMAP_IO
2545 computed_offset = ((intptr_t) offset) & page_mask;
2546 diff = (intptr_t) offset - computed_offset;
2547 computed_len = len + diff;
2548
2549 munmap ((caddr_t) computed_offset, computed_len);
2550 #else
2551 free (CONST_CAST(char *, offset));
2552 #endif
2553 }
2554
2555 static lto_file *current_lto_file;
2556
2557 /* Helper for qsort; compare partitions and return one with smaller size.
2558 We sort from greatest to smallest so parallel build doesn't stale on the
2559 longest compilation being executed too late. */
2560
2561 static int
2562 cmp_partitions_size (const void *a, const void *b)
2563 {
2564 const struct ltrans_partition_def *pa
2565 = *(struct ltrans_partition_def *const *)a;
2566 const struct ltrans_partition_def *pb
2567 = *(struct ltrans_partition_def *const *)b;
2568 return pb->insns - pa->insns;
2569 }
2570
2571 /* Helper for qsort; compare partitions and return one with smaller order. */
2572
2573 static int
2574 cmp_partitions_order (const void *a, const void *b)
2575 {
2576 const struct ltrans_partition_def *pa
2577 = *(struct ltrans_partition_def *const *)a;
2578 const struct ltrans_partition_def *pb
2579 = *(struct ltrans_partition_def *const *)b;
2580 int ordera = -1, orderb = -1;
2581
2582 if (lto_symtab_encoder_size (pa->encoder))
2583 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2584 if (lto_symtab_encoder_size (pb->encoder))
2585 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2586 return orderb - ordera;
2587 }
2588
2589 /* Write all output files in WPA mode and the file with the list of
2590 LTRANS units. */
2591
2592 static void
2593 lto_wpa_write_files (void)
2594 {
2595 unsigned i, n_sets;
2596 lto_file *file;
2597 ltrans_partition part;
2598 FILE *ltrans_output_list_stream;
2599 char *temp_filename;
2600 size_t blen;
2601
2602 /* Open the LTRANS output list. */
2603 if (!ltrans_output_list)
2604 fatal_error ("no LTRANS output list filename provided");
2605 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2606 if (ltrans_output_list_stream == NULL)
2607 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2608
2609 timevar_push (TV_WHOPR_WPA);
2610
2611 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
2612 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2613
2614 /* Find out statics that need to be promoted
2615 to globals with hidden visibility because they are accessed from multiple
2616 partitions. */
2617 lto_promote_cross_file_statics ();
2618
2619 timevar_pop (TV_WHOPR_WPA);
2620
2621 timevar_push (TV_WHOPR_WPA_IO);
2622
2623 /* Generate a prefix for the LTRANS unit files. */
2624 blen = strlen (ltrans_output_list);
2625 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2626 strcpy (temp_filename, ltrans_output_list);
2627 if (blen > sizeof (".out")
2628 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2629 ".out") == 0)
2630 temp_filename[blen - sizeof (".out") + 1] = '\0';
2631 blen = strlen (temp_filename);
2632
2633 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
2634
2635 /* Sort partitions by size so small ones are compiled last.
2636 FIXME: Even when not reordering we may want to output one list for parallel make
2637 and other for final link command. */
2638 VEC_qsort (ltrans_partition, ltrans_partitions,
2639 flag_toplevel_reorder ? cmp_partitions_size : cmp_partitions_order);
2640 for (i = 0; i < n_sets; i++)
2641 {
2642 size_t len;
2643 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
2644
2645 /* Write all the nodes in SET. */
2646 sprintf (temp_filename + blen, "%u.o", i);
2647 file = lto_obj_file_open (temp_filename, true);
2648 if (!file)
2649 fatal_error ("lto_obj_file_open() failed");
2650
2651 if (!quiet_flag)
2652 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2653 if (cgraph_dump_file)
2654 {
2655 lto_symtab_encoder_iterator lsei;
2656
2657 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2658 part->name, temp_filename, part->insns);
2659 fprintf (cgraph_dump_file, " Symbols in partition: ");
2660 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2661 lsei_next_in_partition (&lsei))
2662 {
2663 symtab_node node = lsei_node (lsei);
2664 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2665 }
2666 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2667 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2668 lsei_next (&lsei))
2669 {
2670 symtab_node node = lsei_node (lsei);
2671 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2672 {
2673 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2674 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2675 if (cnode
2676 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2677 fprintf (cgraph_dump_file, "(body included)");
2678 else
2679 {
2680 varpool_node *vnode = dyn_cast <varpool_node> (node);
2681 if (vnode
2682 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2683 fprintf (cgraph_dump_file, "(initializer included)");
2684 }
2685 }
2686 }
2687 fprintf (cgraph_dump_file, "\n");
2688 }
2689 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2690
2691 lto_set_current_out_file (file);
2692
2693 ipa_write_optimization_summaries (part->encoder);
2694
2695 lto_set_current_out_file (NULL);
2696 lto_obj_file_close (file);
2697 free (file);
2698 part->encoder = NULL;
2699
2700 len = strlen (temp_filename);
2701 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2702 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2703 fatal_error ("writing to LTRANS output list %s: %m",
2704 ltrans_output_list);
2705 }
2706
2707 lto_stats.num_output_files += n_sets;
2708
2709 /* Close the LTRANS output list. */
2710 if (fclose (ltrans_output_list_stream))
2711 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2712
2713 free_ltrans_partitions();
2714 free (temp_filename);
2715
2716 timevar_pop (TV_WHOPR_WPA_IO);
2717 }
2718
2719
2720 /* If TT is a variable or function decl replace it with its
2721 prevailing variant. */
2722 #define LTO_SET_PREVAIL(tt) \
2723 do {\
2724 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2725 tt = lto_symtab_prevailing_decl (tt); \
2726 } while (0)
2727
2728 /* Ensure that TT isn't a replacable var of function decl. */
2729 #define LTO_NO_PREVAIL(tt) \
2730 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2731
2732 /* Given a tree T replace all fields referring to variables or functions
2733 with their prevailing variant. */
2734 static void
2735 lto_fixup_prevailing_decls (tree t)
2736 {
2737 enum tree_code code = TREE_CODE (t);
2738 LTO_NO_PREVAIL (TREE_TYPE (t));
2739 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2740 LTO_NO_PREVAIL (TREE_CHAIN (t));
2741 if (DECL_P (t))
2742 {
2743 LTO_NO_PREVAIL (DECL_NAME (t));
2744 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2745 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2746 {
2747 LTO_SET_PREVAIL (DECL_SIZE (t));
2748 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2749 LTO_SET_PREVAIL (DECL_INITIAL (t));
2750 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2751 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2752 }
2753 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2754 {
2755 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2756 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2757 }
2758 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2759 {
2760 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2761 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2762 LTO_NO_PREVAIL (DECL_VINDEX (t));
2763 }
2764 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2765 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2766 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2767 {
2768 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2769 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2770 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2771 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2772 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2773 }
2774 }
2775 else if (TYPE_P (t))
2776 {
2777 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2778 LTO_SET_PREVAIL (TYPE_SIZE (t));
2779 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2780 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2781 LTO_NO_PREVAIL (TYPE_NAME (t));
2782
2783 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2784 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2785 LTO_SET_PREVAIL (t->type_non_common.binfo);
2786
2787 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2788
2789 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2790 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2791 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2792 }
2793 else if (EXPR_P (t))
2794 {
2795 int i;
2796 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2797 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2798 }
2799 else
2800 {
2801 switch (code)
2802 {
2803 case TREE_LIST:
2804 LTO_SET_PREVAIL (TREE_VALUE (t));
2805 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2806 break;
2807 default:
2808 gcc_unreachable ();
2809 }
2810 }
2811 }
2812 #undef LTO_SET_PREVAIL
2813 #undef LTO_NO_PREVAIL
2814
2815 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2816 replaces var and function decls with the corresponding prevailing def. */
2817
2818 static void
2819 lto_fixup_state (struct lto_in_decl_state *state)
2820 {
2821 unsigned i, si;
2822 struct lto_tree_ref_table *table;
2823
2824 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2825 we still need to walk from all DECLs to find the reachable
2826 FUNCTION_DECLs and VAR_DECLs. */
2827 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2828 {
2829 table = &state->streams[si];
2830 for (i = 0; i < table->size; i++)
2831 {
2832 tree *tp = table->trees + i;
2833 if (VAR_OR_FUNCTION_DECL_P (*tp))
2834 *tp = lto_symtab_prevailing_decl (*tp);
2835 }
2836 }
2837 }
2838
2839 /* A callback of htab_traverse. Just extracts a state from SLOT
2840 and calls lto_fixup_state. */
2841
2842 static int
2843 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2844 {
2845 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2846 lto_fixup_state (state);
2847 return 1;
2848 }
2849
2850 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2851 prevailing one. */
2852
2853 static void
2854 lto_fixup_decls (struct lto_file_decl_data **files)
2855 {
2856 unsigned int i;
2857 htab_iterator hi;
2858 tree t;
2859
2860 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2861 lto_fixup_prevailing_decls (t);
2862
2863 for (i = 0; files[i]; i++)
2864 {
2865 struct lto_file_decl_data *file = files[i];
2866 struct lto_in_decl_state *state = file->global_decl_state;
2867 lto_fixup_state (state);
2868
2869 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2870 }
2871 }
2872
2873 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2874
2875 /* Turn file datas for sub files into a single array, so that they look
2876 like separate files for further passes. */
2877
2878 static void
2879 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2880 {
2881 struct lto_file_decl_data *n, *next;
2882 int i, k;
2883
2884 lto_stats.num_input_files = count;
2885 all_file_decl_data
2886 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2887 /* Set the hooks so that all of the ipa passes can read in their data. */
2888 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2889 for (i = 0, k = 0; i < last_file_ix; i++)
2890 {
2891 for (n = orig[i]; n != NULL; n = next)
2892 {
2893 all_file_decl_data[k++] = n;
2894 next = n->next;
2895 n->next = NULL;
2896 }
2897 }
2898 all_file_decl_data[k] = NULL;
2899 gcc_assert (k == count);
2900 }
2901
2902 /* Input file data before flattening (i.e. splitting them to subfiles to support
2903 incremental linking. */
2904 static int real_file_count;
2905 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2906
2907 /* Read all the symbols from the input files FNAMES. NFILES is the
2908 number of files requested in the command line. Instantiate a
2909 global call graph by aggregating all the sub-graphs found in each
2910 file. */
2911
2912 static void
2913 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2914 {
2915 unsigned int i, last_file_ix;
2916 FILE *resolution;
2917 struct cgraph_node *node;
2918 int count = 0;
2919 struct lto_file_decl_data **decl_data;
2920
2921 init_cgraph ();
2922
2923 timevar_push (TV_IPA_LTO_DECL_IN);
2924
2925 real_file_decl_data
2926 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2927 real_file_count = nfiles;
2928
2929 /* Read the resolution file. */
2930 resolution = NULL;
2931 if (resolution_file_name)
2932 {
2933 int t;
2934 unsigned num_objects;
2935
2936 resolution = fopen (resolution_file_name, "r");
2937 if (resolution == NULL)
2938 fatal_error ("could not open symbol resolution file: %m");
2939
2940 t = fscanf (resolution, "%u", &num_objects);
2941 gcc_assert (t == 1);
2942
2943 /* True, since the plugin splits the archives. */
2944 gcc_assert (num_objects == nfiles);
2945 }
2946
2947 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2948 NULL);
2949 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2950 tree_int_map_eq, NULL);
2951 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2952 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2953 (GIMPLE_TYPE_LEADER_SIZE);
2954 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2955
2956 if (!quiet_flag)
2957 fprintf (stderr, "Reading object files:");
2958
2959 /* Read all of the object files specified on the command line. */
2960 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2961 {
2962 struct lto_file_decl_data *file_data = NULL;
2963 if (!quiet_flag)
2964 {
2965 fprintf (stderr, " %s", fnames[i]);
2966 fflush (stderr);
2967 }
2968
2969 current_lto_file = lto_obj_file_open (fnames[i], false);
2970 if (!current_lto_file)
2971 break;
2972
2973 file_data = lto_file_read (current_lto_file, resolution, &count);
2974 if (!file_data)
2975 {
2976 lto_obj_file_close (current_lto_file);
2977 free (current_lto_file);
2978 current_lto_file = NULL;
2979 break;
2980 }
2981
2982 decl_data[last_file_ix++] = file_data;
2983
2984 lto_obj_file_close (current_lto_file);
2985 free (current_lto_file);
2986 current_lto_file = NULL;
2987 ggc_collect ();
2988 }
2989
2990 lto_flatten_files (decl_data, count, last_file_ix);
2991 lto_stats.num_input_files = count;
2992 ggc_free(decl_data);
2993 real_file_decl_data = NULL;
2994
2995 if (resolution_file_name)
2996 fclose (resolution);
2997
2998 /* Free gimple type merging datastructures. */
2999 htab_delete (gimple_types);
3000 gimple_types = NULL;
3001 htab_delete (type_hash_cache);
3002 type_hash_cache = NULL;
3003 free (type_pair_cache);
3004 type_pair_cache = NULL;
3005 gimple_type_leader = NULL;
3006 free_gimple_type_tables ();
3007 ggc_collect ();
3008
3009 /* Set the hooks so that all of the ipa passes can read in their data. */
3010 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3011
3012 timevar_pop (TV_IPA_LTO_DECL_IN);
3013
3014 if (!quiet_flag)
3015 fprintf (stderr, "\nReading the callgraph\n");
3016
3017 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3018 /* Read the symtab. */
3019 input_symtab ();
3020
3021 /* Store resolutions into the symbol table. */
3022 if (resolution_map)
3023 {
3024 void **res;
3025 symtab_node snode;
3026
3027 FOR_EACH_SYMBOL (snode)
3028 if (symtab_real_symbol_p (snode)
3029 && (res = pointer_map_contains (resolution_map,
3030 snode->symbol.decl)))
3031 snode->symbol.resolution
3032 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3033
3034 pointer_map_destroy (resolution_map);
3035 resolution_map = NULL;
3036 }
3037
3038 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3039
3040 if (!quiet_flag)
3041 fprintf (stderr, "Merging declarations\n");
3042
3043 timevar_push (TV_IPA_LTO_DECL_MERGE);
3044 /* Merge global decls. */
3045 lto_symtab_merge_decls ();
3046
3047 /* If there were errors during symbol merging bail out, we have no
3048 good way to recover here. */
3049 if (seen_error ())
3050 fatal_error ("errors during merging of translation units");
3051
3052 /* Fixup all decls. */
3053 lto_fixup_decls (all_file_decl_data);
3054 htab_delete (tree_with_vars);
3055 tree_with_vars = NULL;
3056 ggc_collect ();
3057
3058 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3059 /* Each pass will set the appropriate timer. */
3060
3061 if (!quiet_flag)
3062 fprintf (stderr, "Reading summaries\n");
3063
3064 /* Read the IPA summary data. */
3065 if (flag_ltrans)
3066 ipa_read_optimization_summaries ();
3067 else
3068 ipa_read_summaries ();
3069
3070 for (i = 0; all_file_decl_data[i]; i++)
3071 {
3072 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3073 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3074 all_file_decl_data[i]->symtab_node_encoder = NULL;
3075 }
3076
3077 /* Finally merge the cgraph according to the decl merging decisions. */
3078 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3079 if (cgraph_dump_file)
3080 {
3081 fprintf (cgraph_dump_file, "Before merging:\n");
3082 dump_cgraph (cgraph_dump_file);
3083 dump_varpool (cgraph_dump_file);
3084 }
3085 lto_symtab_merge_cgraph_nodes ();
3086 ggc_collect ();
3087
3088 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3089 summaries computed and needs to apply changes. At the moment WHOPR only
3090 supports inlining, so we can push it here by hand. In future we need to stream
3091 this field into ltrans compilation. */
3092 if (flag_ltrans)
3093 FOR_EACH_DEFINED_FUNCTION (node)
3094 VEC_safe_push (ipa_opt_pass, heap,
3095 node->ipa_transforms_to_apply,
3096 (ipa_opt_pass)&pass_ipa_inline);
3097
3098 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3099
3100 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3101
3102 /* Indicate that the cgraph is built and ready. */
3103 cgraph_function_flags_ready = true;
3104
3105 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3106 ggc_free (all_file_decl_data);
3107 all_file_decl_data = NULL;
3108 }
3109
3110
3111 /* Materialize all the bodies for all the nodes in the callgraph. */
3112
3113 static void
3114 materialize_cgraph (void)
3115 {
3116 tree decl;
3117 struct cgraph_node *node;
3118 unsigned i;
3119 timevar_id_t lto_timer;
3120
3121 if (!quiet_flag)
3122 fprintf (stderr,
3123 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3124
3125 /* Now that we have input the cgraph, we need to clear all of the aux
3126 nodes and read the functions if we are not running in WPA mode. */
3127 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3128
3129 FOR_EACH_FUNCTION (node)
3130 {
3131 if (node->symbol.lto_file_data)
3132 {
3133 lto_materialize_function (node);
3134 lto_stats.num_input_cgraph_nodes++;
3135 }
3136 }
3137
3138 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3139
3140 /* Start the appropriate timer depending on the mode that we are
3141 operating in. */
3142 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3143 : (flag_ltrans) ? TV_WHOPR_LTRANS
3144 : TV_LTO;
3145 timevar_push (lto_timer);
3146
3147 current_function_decl = NULL;
3148 set_cfun (NULL);
3149
3150 /* Inform the middle end about the global variables we have seen. */
3151 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
3152 rest_of_decl_compilation (decl, 1, 0);
3153
3154 if (!quiet_flag)
3155 fprintf (stderr, "\n");
3156
3157 timevar_pop (lto_timer);
3158 }
3159
3160
3161 /* Show various memory usage statistics related to LTO. */
3162 static void
3163 print_lto_report_1 (void)
3164 {
3165 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3166 fprintf (stderr, "%s statistics\n", pfx);
3167
3168 if (gimple_types)
3169 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3170 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3171 (long) htab_size (gimple_types),
3172 (long) htab_elements (gimple_types),
3173 (long) gimple_types->searches,
3174 (long) gimple_types->collisions,
3175 htab_collisions (gimple_types));
3176 else
3177 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3178 if (type_hash_cache)
3179 fprintf (stderr, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3180 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3181 (long) htab_size (type_hash_cache),
3182 (long) htab_elements (type_hash_cache),
3183 (long) type_hash_cache->searches,
3184 (long) type_hash_cache->collisions,
3185 htab_collisions (type_hash_cache));
3186 else
3187 fprintf (stderr, "[%s] GIMPLE type hash table is empty\n", pfx);
3188
3189 print_gimple_types_stats (pfx);
3190 print_lto_report (pfx);
3191 }
3192
3193 /* Perform whole program analysis (WPA) on the callgraph and write out the
3194 optimization plan. */
3195
3196 static void
3197 do_whole_program_analysis (void)
3198 {
3199 symtab_node node;
3200
3201 timevar_start (TV_PHASE_OPT_GEN);
3202
3203 /* Note that since we are in WPA mode, materialize_cgraph will not
3204 actually read in all the function bodies. It only materializes
3205 the decls and cgraph nodes so that analysis can be performed. */
3206 materialize_cgraph ();
3207
3208 /* Reading in the cgraph uses different timers, start timing WPA now. */
3209 timevar_push (TV_WHOPR_WPA);
3210
3211 if (pre_ipa_mem_report)
3212 {
3213 fprintf (stderr, "Memory consumption before IPA\n");
3214 dump_memory_report (false);
3215 }
3216
3217 cgraph_function_flags_ready = true;
3218
3219 if (cgraph_dump_file)
3220 {
3221 dump_cgraph (cgraph_dump_file);
3222 dump_varpool (cgraph_dump_file);
3223 }
3224 bitmap_obstack_initialize (NULL);
3225 cgraph_state = CGRAPH_STATE_IPA_SSA;
3226
3227 execute_ipa_pass_list (all_regular_ipa_passes);
3228
3229 if (cgraph_dump_file)
3230 {
3231 fprintf (cgraph_dump_file, "Optimized ");
3232 dump_cgraph (cgraph_dump_file);
3233 dump_varpool (cgraph_dump_file);
3234 }
3235 #ifdef ENABLE_CHECKING
3236 verify_cgraph ();
3237 #endif
3238 bitmap_obstack_release (NULL);
3239
3240 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3241 timevar_pop (TV_WHOPR_WPA);
3242
3243 timevar_push (TV_WHOPR_PARTITIONING);
3244 if (flag_lto_partition_1to1)
3245 lto_1_to_1_map ();
3246 else if (flag_lto_partition_max)
3247 lto_max_map ();
3248 else
3249 lto_balanced_map ();
3250
3251 /* AUX pointers are used by partitioning code to bookkeep number of
3252 partitions symbol is in. This is no longer needed. */
3253 FOR_EACH_SYMBOL (node)
3254 node->symbol.aux = NULL;
3255
3256 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
3257 ltrans_partitions);
3258 timevar_pop (TV_WHOPR_PARTITIONING);
3259
3260 timevar_stop (TV_PHASE_OPT_GEN);
3261 timevar_start (TV_PHASE_STREAM_OUT);
3262
3263 if (!quiet_flag)
3264 {
3265 fprintf (stderr, "\nStreaming out");
3266 fflush (stderr);
3267 }
3268 lto_wpa_write_files ();
3269 if (!quiet_flag)
3270 fprintf (stderr, "\n");
3271
3272 timevar_stop (TV_PHASE_STREAM_OUT);
3273
3274 ggc_collect ();
3275 if (post_ipa_mem_report)
3276 {
3277 fprintf (stderr, "Memory consumption after IPA\n");
3278 dump_memory_report (false);
3279 }
3280
3281 /* Show the LTO report before launching LTRANS. */
3282 if (flag_lto_report)
3283 print_lto_report_1 ();
3284 if (mem_report_wpa)
3285 dump_memory_report (true);
3286 }
3287
3288
3289 static GTY(()) tree lto_eh_personality_decl;
3290
3291 /* Return the LTO personality function decl. */
3292
3293 tree
3294 lto_eh_personality (void)
3295 {
3296 if (!lto_eh_personality_decl)
3297 {
3298 /* Use the first personality DECL for our personality if we don't
3299 support multiple ones. This ensures that we don't artificially
3300 create the need for them in a single-language program. */
3301 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3302 lto_eh_personality_decl = first_personality_decl;
3303 else
3304 lto_eh_personality_decl = lhd_gcc_personality ();
3305 }
3306
3307 return lto_eh_personality_decl;
3308 }
3309
3310 /* Set the process name based on the LTO mode. */
3311
3312 static void
3313 lto_process_name (void)
3314 {
3315 if (flag_lto)
3316 setproctitle ("lto1-lto");
3317 if (flag_wpa)
3318 setproctitle ("lto1-wpa");
3319 if (flag_ltrans)
3320 setproctitle ("lto1-ltrans");
3321 }
3322
3323
3324 /* Initialize the LTO front end. */
3325
3326 static void
3327 lto_init (void)
3328 {
3329 lto_process_name ();
3330 lto_streamer_hooks_init ();
3331 lto_reader_init ();
3332 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3333 memset (&lto_stats, 0, sizeof (lto_stats));
3334 bitmap_obstack_initialize (NULL);
3335 gimple_register_cfg_hooks ();
3336 }
3337
3338
3339 /* Main entry point for the GIMPLE front end. This front end has
3340 three main personalities:
3341
3342 - LTO (-flto). All the object files on the command line are
3343 loaded in memory and processed as a single translation unit.
3344 This is the traditional link-time optimization behavior.
3345
3346 - WPA (-fwpa). Only the callgraph and summary information for
3347 files in the command file are loaded. A single callgraph
3348 (without function bodies) is instantiated for the whole set of
3349 files. IPA passes are only allowed to analyze the call graph
3350 and make transformation decisions. The callgraph is
3351 partitioned, each partition is written to a new object file
3352 together with the transformation decisions.
3353
3354 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3355 summary files from running again. Since WPA computed summary
3356 information and decided what transformations to apply, LTRANS
3357 simply applies them. */
3358
3359 void
3360 lto_main (void)
3361 {
3362 /* LTO is called as a front end, even though it is not a front end.
3363 Because it is called as a front end, TV_PHASE_PARSING and
3364 TV_PARSE_GLOBAL are active, and we need to turn them off while
3365 doing LTO. Later we turn them back on so they are active up in
3366 toplev.c. */
3367 timevar_pop (TV_PARSE_GLOBAL);
3368 timevar_stop (TV_PHASE_PARSING);
3369
3370 timevar_start (TV_PHASE_SETUP);
3371
3372 /* Initialize the LTO front end. */
3373 lto_init ();
3374
3375 timevar_stop (TV_PHASE_SETUP);
3376 timevar_start (TV_PHASE_STREAM_IN);
3377
3378 /* Read all the symbols and call graph from all the files in the
3379 command line. */
3380 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3381
3382 timevar_stop (TV_PHASE_STREAM_IN);
3383
3384 if (!seen_error ())
3385 {
3386 /* If WPA is enabled analyze the whole call graph and create an
3387 optimization plan. Otherwise, read in all the function
3388 bodies and continue with optimization. */
3389 if (flag_wpa)
3390 do_whole_program_analysis ();
3391 else
3392 {
3393 timevar_start (TV_PHASE_OPT_GEN);
3394
3395 materialize_cgraph ();
3396
3397 /* Let the middle end know that we have read and merged all of
3398 the input files. */
3399 compile ();
3400
3401 timevar_stop (TV_PHASE_OPT_GEN);
3402
3403 /* FIXME lto, if the processes spawned by WPA fail, we miss
3404 the chance to print WPA's report, so WPA will call
3405 print_lto_report before launching LTRANS. If LTRANS was
3406 launched directly by the driver we would not need to do
3407 this. */
3408 if (flag_lto_report)
3409 print_lto_report_1 ();
3410 }
3411 }
3412
3413 /* Here we make LTO pretend to be a parser. */
3414 timevar_start (TV_PHASE_PARSING);
3415 timevar_push (TV_PARSE_GLOBAL);
3416 }
3417
3418 #include "gt-lto-lto.h"