cgraph.c (cgraph_clone_node): Take decl argument and insert clone into hash when...
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
53 enum LTO_cgraph_tags
54 {
55 /* Must leave 0 for the stopper. */
56
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
61 /* Cgraph edges. */
62 LTO_cgraph_edge,
63 LTO_cgraph_indirect_edge
64 };
65
66 /* Create a new cgraph encoder. */
67
68 lto_cgraph_encoder_t
69 lto_cgraph_encoder_new (void)
70 {
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
74 encoder->body = pointer_set_create ();
75 return encoder;
76 }
77
78
79 /* Delete ENCODER and its components. */
80
81 void
82 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
83 {
84 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
85 pointer_map_destroy (encoder->map);
86 pointer_set_destroy (encoder->body);
87 free (encoder);
88 }
89
90
91 /* Return the existing reference number of NODE in the cgraph encoder in
92 output block OB. Assign a new reference if this is the first time
93 NODE is encoded. */
94
95 int
96 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
97 struct cgraph_node *node)
98 {
99 int ref;
100 void **slot;
101
102 slot = pointer_map_contains (encoder->map, node);
103 if (!slot)
104 {
105 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
106 slot = pointer_map_insert (encoder->map, node);
107 *slot = (void *) (intptr_t) ref;
108 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
109 }
110 else
111 ref = (int) (intptr_t) *slot;
112
113 return ref;
114 }
115
116 #define LCC_NOT_FOUND (-1)
117
118 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
119 or LCC_NOT_FOUND if it is not there. */
120
121 int
122 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
123 struct cgraph_node *node)
124 {
125 void **slot = pointer_map_contains (encoder->map, node);
126 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
127 }
128
129
130 /* Return the cgraph node corresponding to REF using ENCODER. */
131
132 struct cgraph_node *
133 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
134 {
135 if (ref == LCC_NOT_FOUND)
136 return NULL;
137
138 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
139 }
140
141
142 /* Return TRUE if we should encode initializer of NODE (if any). */
143
144 bool
145 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
146 struct cgraph_node *node)
147 {
148 return pointer_set_contains (encoder->body, node);
149 }
150
151 /* Return TRUE if we should encode body of NODE (if any). */
152
153 static void
154 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
155 struct cgraph_node *node)
156 {
157 pointer_set_insert (encoder->body, node);
158 }
159
160 /* Create a new varpool encoder. */
161
162 lto_varpool_encoder_t
163 lto_varpool_encoder_new (void)
164 {
165 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
166 encoder->map = pointer_map_create ();
167 encoder->initializer = pointer_set_create ();
168 encoder->nodes = NULL;
169 return encoder;
170 }
171
172
173 /* Delete ENCODER and its components. */
174
175 void
176 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
177 {
178 VEC_free (varpool_node_ptr, heap, encoder->nodes);
179 pointer_map_destroy (encoder->map);
180 pointer_set_destroy (encoder->initializer);
181 free (encoder);
182 }
183
184
185 /* Return the existing reference number of NODE in the varpool encoder in
186 output block OB. Assign a new reference if this is the first time
187 NODE is encoded. */
188
189 int
190 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
191 struct varpool_node *node)
192 {
193 int ref;
194 void **slot;
195
196 slot = pointer_map_contains (encoder->map, node);
197 if (!slot)
198 {
199 ref = VEC_length (varpool_node_ptr, encoder->nodes);
200 slot = pointer_map_insert (encoder->map, node);
201 *slot = (void *) (intptr_t) ref;
202 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
203 }
204 else
205 ref = (int) (intptr_t) *slot;
206
207 return ref;
208 }
209
210 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
211 or LCC_NOT_FOUND if it is not there. */
212
213 int
214 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
215 struct varpool_node *node)
216 {
217 void **slot = pointer_map_contains (encoder->map, node);
218 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
219 }
220
221
222 /* Return the varpool node corresponding to REF using ENCODER. */
223
224 struct varpool_node *
225 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
226 {
227 if (ref == LCC_NOT_FOUND)
228 return NULL;
229
230 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
231 }
232
233
234 /* Return number of encoded nodes in ENCODER. */
235
236 static int
237 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
238 {
239 return VEC_length (varpool_node_ptr, encoder->nodes);
240 }
241
242 /* Return TRUE if we should encode initializer of NODE (if any). */
243
244 bool
245 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
246 struct varpool_node *node)
247 {
248 return pointer_set_contains (encoder->initializer, node);
249 }
250
251 /* Return TRUE if we should encode initializer of NODE (if any). */
252
253 static void
254 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
255 struct varpool_node *node)
256 {
257 pointer_set_insert (encoder->initializer, node);
258 }
259
260 /* Output the cgraph EDGE to OB using ENCODER. */
261
262 static void
263 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
264 lto_cgraph_encoder_t encoder)
265 {
266 unsigned int uid;
267 intptr_t ref;
268 struct bitpack_d *bp;
269
270 if (edge->indirect_unknown_callee)
271 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
272 else
273 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
274
275 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278
279 if (!edge->indirect_unknown_callee)
280 {
281 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
282 gcc_assert (ref != LCC_NOT_FOUND);
283 lto_output_sleb128_stream (ob->main_stream, ref);
284 }
285
286 lto_output_sleb128_stream (ob->main_stream, edge->count);
287
288 bp = bitpack_create ();
289 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
290 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
291 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
292 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
293 bp_pack_value (bp, edge->loop_nest, 30);
294 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
295 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
296 bp_pack_value (bp, edge->can_throw_external, 1);
297 if (edge->indirect_unknown_callee)
298 {
299 int flags = edge->indirect_info->ecf_flags;
300 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
301 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
302 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
303 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
304 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
305 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
306 /* Flags that should not appear on indirect calls. */
307 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
308 | ECF_MAY_BE_ALLOCA
309 | ECF_SIBCALL
310 | ECF_NOVOPS)));
311 }
312 lto_output_bitpack (ob->main_stream, bp);
313 bitpack_delete (bp);
314 }
315
316 /* Return if LIST contain references from other partitions. */
317
318 bool
319 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
320 varpool_node_set vset)
321 {
322 int i;
323 struct ipa_ref *ref;
324 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
325 {
326 if (ref->refering_type == IPA_REF_CGRAPH)
327 {
328 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
329 return true;
330 }
331 else
332 {
333 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
334 vset))
335 return true;
336 }
337 }
338 return false;
339 }
340
341 /* Return true when node is reachable from other partition. */
342
343 bool
344 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
345 {
346 struct cgraph_edge *e;
347 if (!node->analyzed)
348 return false;
349 if (node->global.inlined_to)
350 return false;
351 for (e = node->callers; e; e = e->next_caller)
352 if (!cgraph_node_in_set_p (e->caller, set))
353 return true;
354 return false;
355 }
356
357 /* Return if LIST contain references from other partitions. */
358
359 bool
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
362 {
363 int i;
364 struct ipa_ref *ref;
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
366 {
367 if (ref->refering_type == IPA_REF_CGRAPH)
368 {
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
370 return true;
371 }
372 else
373 {
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
375 vset))
376 return true;
377 }
378 }
379 return false;
380 }
381
382 /* Return true when node is reachable from other partition. */
383
384 bool
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
386 {
387 struct cgraph_edge *e;
388 if (!node->analyzed)
389 return false;
390 if (node->global.inlined_to)
391 return false;
392 for (e = node->callers; e; e = e->next_caller)
393 if (cgraph_node_in_set_p (e->caller, set))
394 return true;
395 return false;
396 }
397
398 /* Output the cgraph NODE to OB. ENCODER is used to find the
399 reference number of NODE->inlined_to. SET is the set of nodes we
400 are writing to the current file. If NODE is not in SET, then NODE
401 is a boundary of a cgraph_node_set and we pretend NODE just has a
402 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
403 that have had their callgraph node written so far. This is used to
404 determine if NODE is a clone of a previously written node. */
405
406 static void
407 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
408 lto_cgraph_encoder_t encoder, cgraph_node_set set,
409 varpool_node_set vset)
410 {
411 unsigned int tag;
412 struct bitpack_d *bp;
413 bool boundary_p;
414 intptr_t ref;
415 bool in_other_partition = false;
416 struct cgraph_node *clone_of;
417
418 boundary_p = !cgraph_node_in_set_p (node, set);
419
420 if (node->analyzed && !boundary_p)
421 tag = LTO_cgraph_analyzed_node;
422 else
423 tag = LTO_cgraph_unavail_node;
424
425 lto_output_uleb128_stream (ob->main_stream, tag);
426
427 /* In WPA mode, we only output part of the call-graph. Also, we
428 fake cgraph node attributes. There are two cases that we care.
429
430 Boundary nodes: There are nodes that are not part of SET but are
431 called from within SET. We artificially make them look like
432 externally visible nodes with no function body.
433
434 Cherry-picked nodes: These are nodes we pulled from other
435 translation units into SET during IPA-inlining. We make them as
436 local static nodes to prevent clashes with other local statics. */
437 if (boundary_p && node->analyzed)
438 {
439 /* Inline clones can not be part of boundary.
440 gcc_assert (!node->global.inlined_to);
441
442 FIXME: At the moment they can be, when partition contains an inline
443 clone that is clone of inline clone from outside partition. We can
444 reshape the clone tree and make other tree to be the root, but it
445 needs a bit extra work and will be promplty done by cgraph_remove_node
446 after reading back. */
447 in_other_partition = 1;
448 }
449
450 clone_of = node->clone_of;
451 while (clone_of
452 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
453 if (clone_of->prev_sibling_clone)
454 clone_of = clone_of->prev_sibling_clone;
455 else
456 clone_of = clone_of->clone_of;
457 if (!clone_of)
458 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
459 else
460 lto_output_sleb128_stream (ob->main_stream, ref);
461
462
463 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
464 lto_output_sleb128_stream (ob->main_stream, node->count);
465
466 bp = bitpack_create ();
467 bp_pack_value (bp, node->local.local, 1);
468 bp_pack_value (bp, node->local.externally_visible, 1);
469 bp_pack_value (bp, node->local.finalized, 1);
470 bp_pack_value (bp, node->local.inlinable, 1);
471 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
472 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
473 bp_pack_value (bp, node->local.vtable_method, 1);
474 bp_pack_value (bp, node->needed, 1);
475 bp_pack_value (bp, node->address_taken, 1);
476 bp_pack_value (bp, node->abstract_and_needed, 1);
477 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
478 && !DECL_EXTERNAL (node->decl)
479 && (reachable_from_other_partition_p (node, set)
480 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
481 bp_pack_value (bp, node->lowered, 1);
482 bp_pack_value (bp, in_other_partition, 1);
483 bp_pack_value (bp, node->alias, 1);
484 bp_pack_value (bp, node->finalized_by_frontend, 1);
485 bp_pack_value (bp, node->frequency, 2);
486 lto_output_bitpack (ob->main_stream, bp);
487 bitpack_delete (bp);
488
489 if (tag == LTO_cgraph_analyzed_node)
490 {
491 lto_output_sleb128_stream (ob->main_stream,
492 node->local.inline_summary.estimated_self_stack_size);
493 lto_output_sleb128_stream (ob->main_stream,
494 node->local.inline_summary.self_size);
495 lto_output_sleb128_stream (ob->main_stream,
496 node->local.inline_summary.size_inlining_benefit);
497 lto_output_sleb128_stream (ob->main_stream,
498 node->local.inline_summary.self_time);
499 lto_output_sleb128_stream (ob->main_stream,
500 node->local.inline_summary.time_inlining_benefit);
501 if (node->global.inlined_to)
502 {
503 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
504 gcc_assert (ref != LCC_NOT_FOUND);
505 }
506 else
507 ref = LCC_NOT_FOUND;
508
509 lto_output_sleb128_stream (ob->main_stream, ref);
510 }
511
512 if (node->same_comdat_group && !boundary_p)
513 {
514 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
515 gcc_assert (ref != LCC_NOT_FOUND);
516 }
517 else
518 ref = LCC_NOT_FOUND;
519 lto_output_sleb128_stream (ob->main_stream, ref);
520
521 if (node->same_body)
522 {
523 struct cgraph_node *alias;
524 unsigned long alias_count = 1;
525 for (alias = node->same_body; alias->next; alias = alias->next)
526 alias_count++;
527 lto_output_uleb128_stream (ob->main_stream, alias_count);
528 do
529 {
530 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
531 alias->decl);
532 if (alias->thunk.thunk_p)
533 {
534 lto_output_uleb128_stream
535 (ob->main_stream,
536 1 + (alias->thunk.this_adjusting != 0) * 2
537 + (alias->thunk.virtual_offset_p != 0) * 4);
538 lto_output_uleb128_stream (ob->main_stream,
539 alias->thunk.fixed_offset);
540 lto_output_uleb128_stream (ob->main_stream,
541 alias->thunk.virtual_value);
542 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
543 alias->thunk.alias);
544 }
545 else
546 {
547 lto_output_uleb128_stream (ob->main_stream, 0);
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
549 alias->thunk.alias);
550 }
551 alias = alias->previous;
552 }
553 while (alias);
554 }
555 else
556 lto_output_uleb128_stream (ob->main_stream, 0);
557 }
558
559 /* Output the varpool NODE to OB.
560 If NODE is not in SET, then NODE is a boundary. */
561
562 static void
563 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
564 cgraph_node_set set, varpool_node_set vset)
565 {
566 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
567 struct bitpack_d *bp;
568 struct varpool_node *alias;
569 int count = 0;
570
571 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
572 bp = bitpack_create ();
573 bp_pack_value (bp, node->externally_visible, 1);
574 bp_pack_value (bp, node->force_output, 1);
575 bp_pack_value (bp, node->finalized, 1);
576 bp_pack_value (bp, node->alias, 1);
577 gcc_assert (!node->alias || !node->extra_name);
578 gcc_assert (node->finalized || !node->analyzed);
579 gcc_assert (node->needed);
580 /* Constant pool initializers can be de-unified into individual ltrans units.
581 FIXME: Alternatively at -Os we may want to avoid generating for them the local
582 labels and share them across LTRANS partitions. */
583 if (DECL_IN_CONSTANT_POOL (node->decl))
584 {
585 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
586 bp_pack_value (bp, 0, 1); /* in_other_partition. */
587 }
588 else
589 {
590 bp_pack_value (bp, node->analyzed
591 && referenced_from_other_partition_p (&node->ref_list,
592 set, vset), 1);
593 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
594 }
595 /* Also emit any extra name aliases. */
596 for (alias = node->extra_name; alias; alias = alias->next)
597 count++;
598 bp_pack_value (bp, count != 0, 1);
599 lto_output_bitpack (ob->main_stream, bp);
600 bitpack_delete (bp);
601
602 if (count)
603 {
604 lto_output_uleb128_stream (ob->main_stream, count);
605 for (alias = node->extra_name; alias; alias = alias->next)
606 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
607 }
608 }
609
610 /* Output the varpool NODE to OB.
611 If NODE is not in SET, then NODE is a boundary. */
612
613 static void
614 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
615 lto_cgraph_encoder_t encoder,
616 lto_varpool_encoder_t varpool_encoder)
617 {
618 struct bitpack_d *bp = bitpack_create ();
619 bp_pack_value (bp, ref->refered_type, 1);
620 bp_pack_value (bp, ref->use, 2);
621 lto_output_bitpack (ob->main_stream, bp);
622 bitpack_delete (bp);
623 if (ref->refered_type == IPA_REF_CGRAPH)
624 {
625 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
626 gcc_assert (nref != LCC_NOT_FOUND);
627 lto_output_sleb128_stream (ob->main_stream, nref);
628 }
629 else
630 {
631 int nref = lto_varpool_encoder_lookup (varpool_encoder,
632 ipa_ref_varpool_node (ref));
633 gcc_assert (nref != LCC_NOT_FOUND);
634 lto_output_sleb128_stream (ob->main_stream, nref);
635 }
636 }
637
638 /* Stream out profile_summary to OB. */
639
640 static void
641 output_profile_summary (struct lto_simple_output_block *ob)
642 {
643 if (profile_info)
644 {
645 /* We do not output num, it is not terribly useful. */
646 gcc_assert (profile_info->runs);
647 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
648 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
649 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
650 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
651 }
652 else
653 lto_output_uleb128_stream (ob->main_stream, 0);
654 }
655
656 /* Add NODE into encoder as well as nodes it is cloned from.
657 Do it in a way so clones appear first. */
658
659 static void
660 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
661 bool include_body)
662 {
663 if (node->clone_of)
664 add_node_to (encoder, node->clone_of, include_body);
665 else if (include_body)
666 lto_set_cgraph_encoder_encode_body (encoder, node);
667 lto_cgraph_encoder_encode (encoder, node);
668 }
669
670 /* Add all references in LIST to encoders. */
671
672 static void
673 add_references (lto_cgraph_encoder_t encoder,
674 lto_varpool_encoder_t varpool_encoder,
675 struct ipa_ref_list *list)
676 {
677 int i;
678 struct ipa_ref *ref;
679 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
680 if (ref->refered_type == IPA_REF_CGRAPH)
681 add_node_to (encoder, ipa_ref_node (ref), false);
682 else
683 {
684 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
685 lto_varpool_encoder_encode (varpool_encoder, vnode);
686 }
687 }
688
689 /* Output all callees or indirect outgoing edges. EDGE must be the first such
690 edge. */
691
692 static void
693 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
694 struct lto_simple_output_block *ob,
695 lto_cgraph_encoder_t encoder)
696 {
697 if (!edge)
698 return;
699
700 /* Output edges in backward direction, so the reconstructed callgraph match
701 and it is easy to associate call sites in the IPA pass summaries. */
702 while (edge->next_callee)
703 edge = edge->next_callee;
704 for (; edge; edge = edge->prev_callee)
705 lto_output_edge (ob, edge, encoder);
706 }
707
708 /* Output the part of the cgraph in SET. */
709
710 static void
711 output_refs (cgraph_node_set set, varpool_node_set vset,
712 lto_cgraph_encoder_t encoder,
713 lto_varpool_encoder_t varpool_encoder)
714 {
715 cgraph_node_set_iterator csi;
716 varpool_node_set_iterator vsi;
717 struct lto_simple_output_block *ob;
718 int count;
719 struct ipa_ref *ref;
720 int i;
721
722 ob = lto_create_simple_output_block (LTO_section_refs);
723
724 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
725 {
726 struct cgraph_node *node = csi_node (csi);
727
728 count = ipa_ref_list_nreferences (&node->ref_list);
729 if (count)
730 {
731 lto_output_uleb128_stream (ob->main_stream, count);
732 lto_output_uleb128_stream (ob->main_stream,
733 lto_cgraph_encoder_lookup (encoder, node));
734 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
735 lto_output_ref (ob, ref, encoder, varpool_encoder);
736 }
737 }
738
739 lto_output_uleb128_stream (ob->main_stream, 0);
740
741 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
742 {
743 struct varpool_node *node = vsi_node (vsi);
744
745 count = ipa_ref_list_nreferences (&node->ref_list);
746 if (count)
747 {
748 lto_output_uleb128_stream (ob->main_stream, count);
749 lto_output_uleb128_stream (ob->main_stream,
750 lto_varpool_encoder_lookup (varpool_encoder,
751 node));
752 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
753 lto_output_ref (ob, ref, encoder, varpool_encoder);
754 }
755 }
756
757 lto_output_uleb128_stream (ob->main_stream, 0);
758
759 lto_destroy_simple_output_block (ob);
760 }
761
762 /* Find out all cgraph and varpool nodes we want to encode in current unit
763 and insert them to encoders. */
764 void
765 compute_ltrans_boundary (struct lto_out_decl_state *state,
766 cgraph_node_set set, varpool_node_set vset)
767 {
768 struct cgraph_node *node;
769 cgraph_node_set_iterator csi;
770 varpool_node_set_iterator vsi;
771 struct cgraph_edge *edge;
772 int i;
773 lto_cgraph_encoder_t encoder;
774 lto_varpool_encoder_t varpool_encoder;
775
776 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
777 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
778
779 /* Go over all the nodes in SET and assign references. */
780 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
781 {
782 node = csi_node (csi);
783 add_node_to (encoder, node, true);
784 add_references (encoder, varpool_encoder, &node->ref_list);
785 }
786 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
787 {
788 struct varpool_node *vnode = vsi_node (vsi);
789 gcc_assert (!vnode->alias);
790 lto_varpool_encoder_encode (varpool_encoder, vnode);
791 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
792 add_references (encoder, varpool_encoder, &vnode->ref_list);
793 }
794 /* Pickle in also the initializer of all referenced readonly variables
795 to help folding. Constant pool variables are not shared, so we must
796 pickle those too. */
797 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
798 {
799 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
800 if (DECL_INITIAL (vnode->decl)
801 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
802 vnode)
803 && (DECL_IN_CONSTANT_POOL (vnode->decl)
804 || TREE_READONLY (vnode->decl)))
805 {
806 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
807 add_references (encoder, varpool_encoder, &vnode->ref_list);
808 }
809 }
810
811 /* Go over all the nodes again to include callees that are not in
812 SET. */
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
814 {
815 node = csi_node (csi);
816 for (edge = node->callees; edge; edge = edge->next_callee)
817 {
818 struct cgraph_node *callee = edge->callee;
819 if (!cgraph_node_in_set_p (callee, set))
820 {
821 /* We should have moved all the inlines. */
822 gcc_assert (!callee->global.inlined_to);
823 add_node_to (encoder, callee, false);
824 }
825 }
826 }
827 }
828
829 /* Output the part of the cgraph in SET. */
830
831 void
832 output_cgraph (cgraph_node_set set, varpool_node_set vset)
833 {
834 struct cgraph_node *node;
835 struct lto_simple_output_block *ob;
836 cgraph_node_set_iterator csi;
837 int i, n_nodes;
838 lto_cgraph_encoder_t encoder;
839 lto_varpool_encoder_t varpool_encoder;
840 struct cgraph_asm_node *can;
841
842 ob = lto_create_simple_output_block (LTO_section_cgraph);
843
844 output_profile_summary (ob);
845
846 /* An encoder for cgraph nodes should have been created by
847 ipa_write_summaries_1. */
848 gcc_assert (ob->decl_state->cgraph_node_encoder);
849 gcc_assert (ob->decl_state->varpool_node_encoder);
850 encoder = ob->decl_state->cgraph_node_encoder;
851 varpool_encoder = ob->decl_state->varpool_node_encoder;
852
853 /* Write out the nodes. We must first output a node and then its clones,
854 otherwise at a time reading back the node there would be nothing to clone
855 from. */
856 n_nodes = lto_cgraph_encoder_size (encoder);
857 for (i = 0; i < n_nodes; i++)
858 {
859 node = lto_cgraph_encoder_deref (encoder, i);
860 lto_output_node (ob, node, encoder, set, vset);
861 }
862
863 /* Go over the nodes in SET again to write edges. */
864 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
865 {
866 node = csi_node (csi);
867 output_outgoing_cgraph_edges (node->callees, ob, encoder);
868 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
869 }
870
871 lto_output_uleb128_stream (ob->main_stream, 0);
872
873 /* Emit toplevel asms. */
874 for (can = cgraph_asm_nodes; can; can = can->next)
875 {
876 int len = TREE_STRING_LENGTH (can->asm_str);
877 lto_output_uleb128_stream (ob->main_stream, len);
878 for (i = 0; i < len; ++i)
879 lto_output_1_stream (ob->main_stream,
880 TREE_STRING_POINTER (can->asm_str)[i]);
881 }
882
883 lto_output_uleb128_stream (ob->main_stream, 0);
884
885 lto_destroy_simple_output_block (ob);
886 output_varpool (set, vset);
887 output_refs (set, vset, encoder, varpool_encoder);
888 }
889
890 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
891 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
892 NODE or to replace the values in it, for instance because the first
893 time we saw it, the function body was not available but now it
894 is. BP is a bitpack with all the bitflags for NODE read from the
895 stream. */
896
897 static void
898 input_overwrite_node (struct lto_file_decl_data *file_data,
899 struct cgraph_node *node,
900 enum LTO_cgraph_tags tag,
901 struct bitpack_d *bp,
902 unsigned int stack_size,
903 unsigned int self_time,
904 unsigned int time_inlining_benefit,
905 unsigned int self_size,
906 unsigned int size_inlining_benefit)
907 {
908 node->aux = (void *) tag;
909 node->local.inline_summary.estimated_self_stack_size = stack_size;
910 node->local.inline_summary.self_time = self_time;
911 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
912 node->local.inline_summary.self_size = self_size;
913 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
914 node->global.time = self_time;
915 node->global.size = self_size;
916 node->global.estimated_stack_size = stack_size;
917 node->global.estimated_growth = INT_MIN;
918 node->local.lto_file_data = file_data;
919
920 node->local.local = bp_unpack_value (bp, 1);
921 node->local.externally_visible = bp_unpack_value (bp, 1);
922 node->local.finalized = bp_unpack_value (bp, 1);
923 node->local.inlinable = bp_unpack_value (bp, 1);
924 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
925 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
926 node->local.vtable_method = bp_unpack_value (bp, 1);
927 node->needed = bp_unpack_value (bp, 1);
928 node->address_taken = bp_unpack_value (bp, 1);
929 node->abstract_and_needed = bp_unpack_value (bp, 1);
930 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
931 node->lowered = bp_unpack_value (bp, 1);
932 node->analyzed = tag == LTO_cgraph_analyzed_node;
933 node->in_other_partition = bp_unpack_value (bp, 1);
934 node->alias = bp_unpack_value (bp, 1);
935 node->finalized_by_frontend = bp_unpack_value (bp, 1);
936 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
937 }
938
939 /* Output the part of the cgraph in SET. */
940
941 static void
942 output_varpool (cgraph_node_set set, varpool_node_set vset)
943 {
944 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
945 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
946 int len = lto_varpool_encoder_size (varpool_encoder), i;
947
948 lto_output_uleb128_stream (ob->main_stream, len);
949
950 /* Write out the nodes. We must first output a node and then its clones,
951 otherwise at a time reading back the node there would be nothing to clone
952 from. */
953 for (i = 0; i < len; i++)
954 {
955 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
956 set, vset);
957 }
958
959 lto_destroy_simple_output_block (ob);
960 }
961
962 /* Read a node from input_block IB. TAG is the node's tag just read.
963 Return the node read or overwriten. */
964
965 static struct cgraph_node *
966 input_node (struct lto_file_decl_data *file_data,
967 struct lto_input_block *ib,
968 enum LTO_cgraph_tags tag,
969 VEC(cgraph_node_ptr, heap) *nodes)
970 {
971 tree fn_decl;
972 struct cgraph_node *node;
973 struct bitpack_d *bp;
974 int stack_size = 0;
975 unsigned decl_index;
976 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
977 int self_time = 0;
978 int self_size = 0;
979 int time_inlining_benefit = 0;
980 int size_inlining_benefit = 0;
981 unsigned long same_body_count = 0;
982 int clone_ref;
983
984 clone_ref = lto_input_sleb128 (ib);
985
986 decl_index = lto_input_uleb128 (ib);
987 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
988
989 if (clone_ref != LCC_NOT_FOUND)
990 {
991 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
992 0, CGRAPH_FREQ_BASE, 0, false, NULL);
993 }
994 else
995 node = cgraph_node (fn_decl);
996
997 node->count = lto_input_sleb128 (ib);
998 bp = lto_input_bitpack (ib);
999
1000 if (tag == LTO_cgraph_analyzed_node)
1001 {
1002 stack_size = lto_input_sleb128 (ib);
1003 self_size = lto_input_sleb128 (ib);
1004 size_inlining_benefit = lto_input_sleb128 (ib);
1005 self_time = lto_input_sleb128 (ib);
1006 time_inlining_benefit = lto_input_sleb128 (ib);
1007
1008 ref = lto_input_sleb128 (ib);
1009 }
1010
1011 ref2 = lto_input_sleb128 (ib);
1012 same_body_count = lto_input_uleb128 (ib);
1013
1014 /* Make sure that we have not read this node before. Nodes that
1015 have already been read will have their tag stored in the 'aux'
1016 field. Since built-in functions can be referenced in multiple
1017 functions, they are expected to be read more than once. */
1018 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1019 internal_error ("bytecode stream: found multiple instances of cgraph "
1020 "node %d", node->uid);
1021
1022 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
1023 time_inlining_benefit, self_size,
1024 size_inlining_benefit);
1025 bitpack_delete (bp);
1026
1027 /* Store a reference for now, and fix up later to be a pointer. */
1028 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1029
1030 /* Store a reference for now, and fix up later to be a pointer. */
1031 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1032
1033 while (same_body_count-- > 0)
1034 {
1035 tree alias_decl;
1036 int type;
1037 decl_index = lto_input_uleb128 (ib);
1038 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1039 type = lto_input_uleb128 (ib);
1040 if (!type)
1041 {
1042 tree real_alias;
1043 decl_index = lto_input_uleb128 (ib);
1044 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1045 cgraph_same_body_alias (alias_decl, real_alias);
1046 }
1047 else
1048 {
1049 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1050 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1051 tree real_alias;
1052 decl_index = lto_input_uleb128 (ib);
1053 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1054 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1055 virtual_value,
1056 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1057 real_alias);
1058 }
1059 }
1060 return node;
1061 }
1062
1063 /* Read a node from input_block IB. TAG is the node's tag just read.
1064 Return the node read or overwriten. */
1065
1066 static struct varpool_node *
1067 input_varpool_node (struct lto_file_decl_data *file_data,
1068 struct lto_input_block *ib)
1069 {
1070 int decl_index;
1071 tree var_decl;
1072 struct varpool_node *node;
1073 struct bitpack_d *bp;
1074 bool aliases_p;
1075 int count;
1076
1077 decl_index = lto_input_uleb128 (ib);
1078 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1079 node = varpool_node (var_decl);
1080
1081 bp = lto_input_bitpack (ib);
1082 node->externally_visible = bp_unpack_value (bp, 1);
1083 node->force_output = bp_unpack_value (bp, 1);
1084 node->finalized = bp_unpack_value (bp, 1);
1085 node->alias = bp_unpack_value (bp, 1);
1086 node->analyzed = node->finalized;
1087 node->used_from_other_partition = bp_unpack_value (bp, 1);
1088 node->in_other_partition = bp_unpack_value (bp, 1);
1089 aliases_p = bp_unpack_value (bp, 1);
1090 if (node->finalized)
1091 varpool_mark_needed_node (node);
1092 bitpack_delete (bp);
1093 if (aliases_p)
1094 {
1095 count = lto_input_uleb128 (ib);
1096 for (; count > 0; count --)
1097 {
1098 tree decl = lto_file_decl_data_get_var_decl (file_data,
1099 lto_input_uleb128 (ib));
1100 varpool_extra_name_alias (decl, var_decl);
1101 }
1102 }
1103 return node;
1104 }
1105
1106 /* Read a node from input_block IB. TAG is the node's tag just read.
1107 Return the node read or overwriten. */
1108
1109 static void
1110 input_ref (struct lto_input_block *ib,
1111 struct cgraph_node *refering_node,
1112 struct varpool_node *refering_varpool_node,
1113 VEC(cgraph_node_ptr, heap) *nodes,
1114 VEC(varpool_node_ptr, heap) *varpool_nodes)
1115 {
1116 struct cgraph_node *node = NULL;
1117 struct varpool_node *varpool_node = NULL;
1118 struct bitpack_d *bp;
1119 enum ipa_ref_type type;
1120 enum ipa_ref_use use;
1121
1122 bp = lto_input_bitpack (ib);
1123 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1124 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1125 bitpack_delete (bp);
1126 if (type == IPA_REF_CGRAPH)
1127 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1128 else
1129 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1130 ipa_record_reference (refering_node, refering_varpool_node,
1131 node, varpool_node, use, NULL);
1132 }
1133
1134 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1135 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1136 edge being read is indirect (in the sense that it has
1137 indirect_unknown_callee set). */
1138
1139 static void
1140 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1141 bool indirect)
1142 {
1143 struct cgraph_node *caller, *callee;
1144 struct cgraph_edge *edge;
1145 unsigned int stmt_id;
1146 gcov_type count;
1147 int freq;
1148 unsigned int nest;
1149 cgraph_inline_failed_t inline_failed;
1150 struct bitpack_d *bp;
1151 enum ld_plugin_symbol_resolution caller_resolution;
1152 int ecf_flags = 0;
1153
1154 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1155 if (caller == NULL || caller->decl == NULL_TREE)
1156 internal_error ("bytecode stream: no caller found while reading edge");
1157
1158 if (!indirect)
1159 {
1160 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1161 if (callee == NULL || callee->decl == NULL_TREE)
1162 internal_error ("bytecode stream: no callee found while reading edge");
1163 }
1164 else
1165 callee = NULL;
1166
1167 count = (gcov_type) lto_input_sleb128 (ib);
1168
1169 bp = lto_input_bitpack (ib);
1170 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1171 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1172 HOST_BITS_PER_INT);
1173 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1174 nest = (unsigned) bp_unpack_value (bp, 30);
1175
1176 /* If the caller was preempted, don't create the edge.
1177 ??? Should we ever have edges from a preempted caller? */
1178 caller_resolution = lto_symtab_get_resolution (caller->decl);
1179 if (caller_resolution == LDPR_PREEMPTED_REG
1180 || caller_resolution == LDPR_PREEMPTED_IR)
1181 return;
1182
1183 if (indirect)
1184 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1185 else
1186 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1187
1188 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1189 edge->lto_stmt_uid = stmt_id;
1190 edge->inline_failed = inline_failed;
1191 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1192 edge->can_throw_external = bp_unpack_value (bp, 1);
1193 if (indirect)
1194 {
1195 if (bp_unpack_value (bp, 1))
1196 ecf_flags |= ECF_CONST;
1197 if (bp_unpack_value (bp, 1))
1198 ecf_flags |= ECF_PURE;
1199 if (bp_unpack_value (bp, 1))
1200 ecf_flags |= ECF_NORETURN;
1201 if (bp_unpack_value (bp, 1))
1202 ecf_flags |= ECF_MALLOC;
1203 if (bp_unpack_value (bp, 1))
1204 ecf_flags |= ECF_NOTHROW;
1205 if (bp_unpack_value (bp, 1))
1206 ecf_flags |= ECF_RETURNS_TWICE;
1207 edge->indirect_info->ecf_flags = ecf_flags;
1208 }
1209 bitpack_delete (bp);
1210 }
1211
1212
1213 /* Read a cgraph from IB using the info in FILE_DATA. */
1214
1215 static VEC(cgraph_node_ptr, heap) *
1216 input_cgraph_1 (struct lto_file_decl_data *file_data,
1217 struct lto_input_block *ib)
1218 {
1219 enum LTO_cgraph_tags tag;
1220 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1221 struct cgraph_node *node;
1222 unsigned i;
1223 unsigned HOST_WIDE_INT len;
1224
1225 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1226 while (tag)
1227 {
1228 if (tag == LTO_cgraph_edge)
1229 input_edge (ib, nodes, false);
1230 else if (tag == LTO_cgraph_indirect_edge)
1231 input_edge (ib, nodes, true);
1232 else
1233 {
1234 node = input_node (file_data, ib, tag,nodes);
1235 if (node == NULL || node->decl == NULL_TREE)
1236 internal_error ("bytecode stream: found empty cgraph node");
1237 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1238 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1239 }
1240
1241 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1242 }
1243
1244 /* Input toplevel asms. */
1245 len = lto_input_uleb128 (ib);
1246 while (len)
1247 {
1248 char *str = (char *)xmalloc (len + 1);
1249 for (i = 0; i < len; ++i)
1250 str[i] = lto_input_1_unsigned (ib);
1251 cgraph_add_asm_node (build_string (len, str));
1252 free (str);
1253
1254 len = lto_input_uleb128 (ib);
1255 }
1256
1257 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1258 {
1259 int ref = (int) (intptr_t) node->global.inlined_to;
1260
1261 /* Fixup inlined_to from reference to pointer. */
1262 if (ref != LCC_NOT_FOUND)
1263 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1264 else
1265 node->global.inlined_to = NULL;
1266
1267 ref = (int) (intptr_t) node->same_comdat_group;
1268
1269 /* Fixup same_comdat_group from reference to pointer. */
1270 if (ref != LCC_NOT_FOUND)
1271 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1272 else
1273 node->same_comdat_group = NULL;
1274 }
1275 return nodes;
1276 }
1277
1278 /* Read a varpool from IB using the info in FILE_DATA. */
1279
1280 static VEC(varpool_node_ptr, heap) *
1281 input_varpool_1 (struct lto_file_decl_data *file_data,
1282 struct lto_input_block *ib)
1283 {
1284 unsigned HOST_WIDE_INT len;
1285 VEC(varpool_node_ptr, heap) *varpool = NULL;
1286
1287 len = lto_input_uleb128 (ib);
1288 while (len)
1289 {
1290 VEC_safe_push (varpool_node_ptr, heap, varpool,
1291 input_varpool_node (file_data, ib));
1292 len--;
1293 }
1294 return varpool;
1295 }
1296
1297 /* Input ipa_refs. */
1298
1299 static void
1300 input_refs (struct lto_input_block *ib,
1301 VEC(cgraph_node_ptr, heap) *nodes,
1302 VEC(varpool_node_ptr, heap) *varpool)
1303 {
1304 int count;
1305 int idx;
1306 while (true)
1307 {
1308 struct cgraph_node *node;
1309 count = lto_input_uleb128 (ib);
1310 if (!count)
1311 break;
1312 idx = lto_input_uleb128 (ib);
1313 node = VEC_index (cgraph_node_ptr, nodes, idx);
1314 while (count)
1315 {
1316 input_ref (ib, node, NULL, nodes, varpool);
1317 count--;
1318 }
1319 }
1320 while (true)
1321 {
1322 struct varpool_node *node;
1323 count = lto_input_uleb128 (ib);
1324 if (!count)
1325 break;
1326 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1327 while (count)
1328 {
1329 input_ref (ib, NULL, node, nodes, varpool);
1330 count--;
1331 }
1332 }
1333 }
1334
1335
1336 static struct gcov_ctr_summary lto_gcov_summary;
1337
1338 /* Input profile_info from IB. */
1339 static void
1340 input_profile_summary (struct lto_input_block *ib)
1341 {
1342 unsigned int runs = lto_input_uleb128 (ib);
1343 if (runs)
1344 {
1345 if (!profile_info)
1346 {
1347 profile_info = &lto_gcov_summary;
1348 lto_gcov_summary.runs = runs;
1349 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1350 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1351 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1352 }
1353 /* We can support this by scaling all counts to nearest common multiple
1354 of all different runs, but it is perhaps not worth the effort. */
1355 else if (profile_info->runs != runs
1356 || profile_info->sum_all != lto_input_sleb128 (ib)
1357 || profile_info->run_max != lto_input_sleb128 (ib)
1358 || profile_info->sum_max != lto_input_sleb128 (ib))
1359 sorry ("Combining units with different profiles is not supported.");
1360 /* We allow some units to have profile and other to not have one. This will
1361 just make unprofiled units to be size optimized that is sane. */
1362 }
1363
1364 }
1365
1366 /* Input and merge the cgraph from each of the .o files passed to
1367 lto1. */
1368
1369 void
1370 input_cgraph (void)
1371 {
1372 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1373 struct lto_file_decl_data *file_data;
1374 unsigned int j = 0;
1375 struct cgraph_node *node;
1376
1377 while ((file_data = file_data_vec[j++]))
1378 {
1379 const char *data;
1380 size_t len;
1381 struct lto_input_block *ib;
1382 VEC(cgraph_node_ptr, heap) *nodes;
1383 VEC(varpool_node_ptr, heap) *varpool;
1384
1385 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1386 &data, &len);
1387 input_profile_summary (ib);
1388 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1389 nodes = input_cgraph_1 (file_data, ib);
1390 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1391 ib, data, len);
1392
1393 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1394 &data, &len);
1395 varpool = input_varpool_1 (file_data, ib);
1396 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1397 ib, data, len);
1398
1399 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1400 &data, &len);
1401 input_refs (ib, nodes, varpool);
1402 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1403 ib, data, len);
1404 VEC_free (cgraph_node_ptr, heap, nodes);
1405 VEC_free (varpool_node_ptr, heap, varpool);
1406 }
1407
1408 /* Clear out the aux field that was used to store enough state to
1409 tell which nodes should be overwritten. */
1410 for (node = cgraph_nodes; node; node = node->next)
1411 {
1412 /* Some nodes may have been created by cgraph_node. This
1413 happens when the callgraph contains nested functions. If the
1414 node for the parent function was never emitted to the gimple
1415 file, cgraph_node will create a node for it when setting the
1416 context of the nested function. */
1417 if (node->local.lto_file_data)
1418 node->aux = NULL;
1419 }
1420 }