cgraph.c (dump_cgraph_node): Dump versionable flag.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
53 enum LTO_cgraph_tags
54 {
55 /* Must leave 0 for the stopper. */
56
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
61 /* Cgraph edges. */
62 LTO_cgraph_edge,
63 LTO_cgraph_indirect_edge
64 };
65
66 /* Create a new cgraph encoder. */
67
68 lto_cgraph_encoder_t
69 lto_cgraph_encoder_new (void)
70 {
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
74 encoder->body = pointer_set_create ();
75 return encoder;
76 }
77
78
79 /* Delete ENCODER and its components. */
80
81 void
82 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
83 {
84 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
85 pointer_map_destroy (encoder->map);
86 pointer_set_destroy (encoder->body);
87 free (encoder);
88 }
89
90
91 /* Return the existing reference number of NODE in the cgraph encoder in
92 output block OB. Assign a new reference if this is the first time
93 NODE is encoded. */
94
95 int
96 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
97 struct cgraph_node *node)
98 {
99 int ref;
100 void **slot;
101
102 slot = pointer_map_contains (encoder->map, node);
103 if (!slot)
104 {
105 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
106 slot = pointer_map_insert (encoder->map, node);
107 *slot = (void *) (intptr_t) ref;
108 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
109 }
110 else
111 ref = (int) (intptr_t) *slot;
112
113 return ref;
114 }
115
116 #define LCC_NOT_FOUND (-1)
117
118 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
119 or LCC_NOT_FOUND if it is not there. */
120
121 int
122 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
123 struct cgraph_node *node)
124 {
125 void **slot = pointer_map_contains (encoder->map, node);
126 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
127 }
128
129
130 /* Return the cgraph node corresponding to REF using ENCODER. */
131
132 struct cgraph_node *
133 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
134 {
135 if (ref == LCC_NOT_FOUND)
136 return NULL;
137
138 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
139 }
140
141
142 /* Return TRUE if we should encode initializer of NODE (if any). */
143
144 bool
145 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
146 struct cgraph_node *node)
147 {
148 return pointer_set_contains (encoder->body, node);
149 }
150
151 /* Return TRUE if we should encode body of NODE (if any). */
152
153 static void
154 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
155 struct cgraph_node *node)
156 {
157 pointer_set_insert (encoder->body, node);
158 }
159
160 /* Create a new varpool encoder. */
161
162 lto_varpool_encoder_t
163 lto_varpool_encoder_new (void)
164 {
165 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
166 encoder->map = pointer_map_create ();
167 encoder->initializer = pointer_set_create ();
168 encoder->nodes = NULL;
169 return encoder;
170 }
171
172
173 /* Delete ENCODER and its components. */
174
175 void
176 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
177 {
178 VEC_free (varpool_node_ptr, heap, encoder->nodes);
179 pointer_map_destroy (encoder->map);
180 pointer_set_destroy (encoder->initializer);
181 free (encoder);
182 }
183
184
185 /* Return the existing reference number of NODE in the varpool encoder in
186 output block OB. Assign a new reference if this is the first time
187 NODE is encoded. */
188
189 int
190 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
191 struct varpool_node *node)
192 {
193 int ref;
194 void **slot;
195
196 slot = pointer_map_contains (encoder->map, node);
197 if (!slot)
198 {
199 ref = VEC_length (varpool_node_ptr, encoder->nodes);
200 slot = pointer_map_insert (encoder->map, node);
201 *slot = (void *) (intptr_t) ref;
202 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
203 }
204 else
205 ref = (int) (intptr_t) *slot;
206
207 return ref;
208 }
209
210 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
211 or LCC_NOT_FOUND if it is not there. */
212
213 int
214 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
215 struct varpool_node *node)
216 {
217 void **slot = pointer_map_contains (encoder->map, node);
218 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
219 }
220
221
222 /* Return the varpool node corresponding to REF using ENCODER. */
223
224 struct varpool_node *
225 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
226 {
227 if (ref == LCC_NOT_FOUND)
228 return NULL;
229
230 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
231 }
232
233
234 /* Return number of encoded nodes in ENCODER. */
235
236 static int
237 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
238 {
239 return VEC_length (varpool_node_ptr, encoder->nodes);
240 }
241
242 /* Return TRUE if we should encode initializer of NODE (if any). */
243
244 bool
245 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
246 struct varpool_node *node)
247 {
248 return pointer_set_contains (encoder->initializer, node);
249 }
250
251 /* Return TRUE if we should encode initializer of NODE (if any). */
252
253 static void
254 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
255 struct varpool_node *node)
256 {
257 pointer_set_insert (encoder->initializer, node);
258 }
259
260 /* Output the cgraph EDGE to OB using ENCODER. */
261
262 static void
263 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
264 lto_cgraph_encoder_t encoder)
265 {
266 unsigned int uid;
267 intptr_t ref;
268 struct bitpack_d *bp;
269
270 if (edge->indirect_unknown_callee)
271 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
272 else
273 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
274
275 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278
279 if (!edge->indirect_unknown_callee)
280 {
281 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
282 gcc_assert (ref != LCC_NOT_FOUND);
283 lto_output_sleb128_stream (ob->main_stream, ref);
284 }
285
286 lto_output_sleb128_stream (ob->main_stream, edge->count);
287
288 bp = bitpack_create ();
289 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
290 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
291 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
292 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
293 bp_pack_value (bp, edge->loop_nest, 30);
294 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
295 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
296 bp_pack_value (bp, edge->can_throw_external, 1);
297 if (edge->indirect_unknown_callee)
298 {
299 int flags = edge->indirect_info->ecf_flags;
300 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
301 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
302 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
303 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
304 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
305 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
306 /* Flags that should not appear on indirect calls. */
307 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
308 | ECF_MAY_BE_ALLOCA
309 | ECF_SIBCALL
310 | ECF_NOVOPS)));
311 }
312 lto_output_bitpack (ob->main_stream, bp);
313 bitpack_delete (bp);
314 }
315
316 /* Return if LIST contain references from other partitions. */
317
318 bool
319 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
320 varpool_node_set vset)
321 {
322 int i;
323 struct ipa_ref *ref;
324 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
325 {
326 if (ref->refering_type == IPA_REF_CGRAPH)
327 {
328 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
329 return true;
330 }
331 else
332 {
333 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
334 vset))
335 return true;
336 }
337 }
338 return false;
339 }
340
341 /* Return true when node is reachable from other partition. */
342
343 bool
344 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
345 {
346 struct cgraph_edge *e;
347 if (!node->analyzed)
348 return false;
349 if (node->global.inlined_to)
350 return false;
351 for (e = node->callers; e; e = e->next_caller)
352 if (!cgraph_node_in_set_p (e->caller, set))
353 return true;
354 return false;
355 }
356
357 /* Return if LIST contain references from other partitions. */
358
359 bool
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
362 {
363 int i;
364 struct ipa_ref *ref;
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
366 {
367 if (ref->refering_type == IPA_REF_CGRAPH)
368 {
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
370 return true;
371 }
372 else
373 {
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
375 vset))
376 return true;
377 }
378 }
379 return false;
380 }
381
382 /* Return true when node is reachable from other partition. */
383
384 bool
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
386 {
387 struct cgraph_edge *e;
388 if (!node->analyzed)
389 return false;
390 if (node->global.inlined_to)
391 return false;
392 for (e = node->callers; e; e = e->next_caller)
393 if (cgraph_node_in_set_p (e->caller, set))
394 return true;
395 return false;
396 }
397
398 /* Output the cgraph NODE to OB. ENCODER is used to find the
399 reference number of NODE->inlined_to. SET is the set of nodes we
400 are writing to the current file. If NODE is not in SET, then NODE
401 is a boundary of a cgraph_node_set and we pretend NODE just has a
402 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
403 that have had their callgraph node written so far. This is used to
404 determine if NODE is a clone of a previously written node. */
405
406 static void
407 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
408 lto_cgraph_encoder_t encoder, cgraph_node_set set,
409 varpool_node_set vset)
410 {
411 unsigned int tag;
412 struct bitpack_d *bp;
413 bool boundary_p;
414 intptr_t ref;
415 bool in_other_partition = false;
416 struct cgraph_node *clone_of;
417
418 boundary_p = !cgraph_node_in_set_p (node, set);
419
420 if (node->analyzed && !boundary_p)
421 tag = LTO_cgraph_analyzed_node;
422 else
423 tag = LTO_cgraph_unavail_node;
424
425 lto_output_uleb128_stream (ob->main_stream, tag);
426
427 /* In WPA mode, we only output part of the call-graph. Also, we
428 fake cgraph node attributes. There are two cases that we care.
429
430 Boundary nodes: There are nodes that are not part of SET but are
431 called from within SET. We artificially make them look like
432 externally visible nodes with no function body.
433
434 Cherry-picked nodes: These are nodes we pulled from other
435 translation units into SET during IPA-inlining. We make them as
436 local static nodes to prevent clashes with other local statics. */
437 if (boundary_p && node->analyzed)
438 {
439 /* Inline clones can not be part of boundary.
440 gcc_assert (!node->global.inlined_to);
441
442 FIXME: At the moment they can be, when partition contains an inline
443 clone that is clone of inline clone from outside partition. We can
444 reshape the clone tree and make other tree to be the root, but it
445 needs a bit extra work and will be promplty done by cgraph_remove_node
446 after reading back. */
447 in_other_partition = 1;
448 }
449
450 clone_of = node->clone_of;
451 while (clone_of
452 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
453 if (clone_of->prev_sibling_clone)
454 clone_of = clone_of->prev_sibling_clone;
455 else
456 clone_of = clone_of->clone_of;
457 if (!clone_of)
458 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
459 else
460 lto_output_sleb128_stream (ob->main_stream, ref);
461
462
463 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
464 lto_output_sleb128_stream (ob->main_stream, node->count);
465
466 bp = bitpack_create ();
467 bp_pack_value (bp, node->local.local, 1);
468 bp_pack_value (bp, node->local.externally_visible, 1);
469 bp_pack_value (bp, node->local.finalized, 1);
470 bp_pack_value (bp, node->local.inlinable, 1);
471 bp_pack_value (bp, node->local.versionable, 1);
472 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
473 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
474 bp_pack_value (bp, node->local.vtable_method, 1);
475 bp_pack_value (bp, node->needed, 1);
476 bp_pack_value (bp, node->address_taken, 1);
477 bp_pack_value (bp, node->abstract_and_needed, 1);
478 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
479 && !DECL_EXTERNAL (node->decl)
480 && (reachable_from_other_partition_p (node, set)
481 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
482 bp_pack_value (bp, node->lowered, 1);
483 bp_pack_value (bp, in_other_partition, 1);
484 bp_pack_value (bp, node->alias, 1);
485 bp_pack_value (bp, node->finalized_by_frontend, 1);
486 bp_pack_value (bp, node->frequency, 2);
487 lto_output_bitpack (ob->main_stream, bp);
488 bitpack_delete (bp);
489
490 if (tag == LTO_cgraph_analyzed_node)
491 {
492 lto_output_sleb128_stream (ob->main_stream,
493 node->local.inline_summary.estimated_self_stack_size);
494 lto_output_sleb128_stream (ob->main_stream,
495 node->local.inline_summary.self_size);
496 lto_output_sleb128_stream (ob->main_stream,
497 node->local.inline_summary.size_inlining_benefit);
498 lto_output_sleb128_stream (ob->main_stream,
499 node->local.inline_summary.self_time);
500 lto_output_sleb128_stream (ob->main_stream,
501 node->local.inline_summary.time_inlining_benefit);
502 if (node->global.inlined_to)
503 {
504 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
505 gcc_assert (ref != LCC_NOT_FOUND);
506 }
507 else
508 ref = LCC_NOT_FOUND;
509
510 lto_output_sleb128_stream (ob->main_stream, ref);
511 }
512
513 if (node->same_comdat_group && !boundary_p)
514 {
515 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
516 gcc_assert (ref != LCC_NOT_FOUND);
517 }
518 else
519 ref = LCC_NOT_FOUND;
520 lto_output_sleb128_stream (ob->main_stream, ref);
521
522 if (node->same_body)
523 {
524 struct cgraph_node *alias;
525 unsigned long alias_count = 1;
526 for (alias = node->same_body; alias->next; alias = alias->next)
527 alias_count++;
528 lto_output_uleb128_stream (ob->main_stream, alias_count);
529 do
530 {
531 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
532 alias->decl);
533 if (alias->thunk.thunk_p)
534 {
535 lto_output_uleb128_stream
536 (ob->main_stream,
537 1 + (alias->thunk.this_adjusting != 0) * 2
538 + (alias->thunk.virtual_offset_p != 0) * 4);
539 lto_output_uleb128_stream (ob->main_stream,
540 alias->thunk.fixed_offset);
541 lto_output_uleb128_stream (ob->main_stream,
542 alias->thunk.virtual_value);
543 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
544 alias->thunk.alias);
545 }
546 else
547 {
548 lto_output_uleb128_stream (ob->main_stream, 0);
549 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
550 alias->thunk.alias);
551 }
552 alias = alias->previous;
553 }
554 while (alias);
555 }
556 else
557 lto_output_uleb128_stream (ob->main_stream, 0);
558 }
559
560 /* Output the varpool NODE to OB.
561 If NODE is not in SET, then NODE is a boundary. */
562
563 static void
564 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
565 cgraph_node_set set, varpool_node_set vset)
566 {
567 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
568 struct bitpack_d *bp;
569 struct varpool_node *alias;
570 int count = 0;
571
572 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
573 bp = bitpack_create ();
574 bp_pack_value (bp, node->externally_visible, 1);
575 bp_pack_value (bp, node->force_output, 1);
576 bp_pack_value (bp, node->finalized, 1);
577 bp_pack_value (bp, node->alias, 1);
578 gcc_assert (!node->alias || !node->extra_name);
579 gcc_assert (node->finalized || !node->analyzed);
580 gcc_assert (node->needed);
581 /* Constant pool initializers can be de-unified into individual ltrans units.
582 FIXME: Alternatively at -Os we may want to avoid generating for them the local
583 labels and share them across LTRANS partitions. */
584 if (DECL_IN_CONSTANT_POOL (node->decl))
585 {
586 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
587 bp_pack_value (bp, 0, 1); /* in_other_partition. */
588 }
589 else
590 {
591 bp_pack_value (bp, node->analyzed
592 && referenced_from_other_partition_p (&node->ref_list,
593 set, vset), 1);
594 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
595 }
596 /* Also emit any extra name aliases. */
597 for (alias = node->extra_name; alias; alias = alias->next)
598 count++;
599 bp_pack_value (bp, count != 0, 1);
600 lto_output_bitpack (ob->main_stream, bp);
601 bitpack_delete (bp);
602
603 if (count)
604 {
605 lto_output_uleb128_stream (ob->main_stream, count);
606 for (alias = node->extra_name; alias; alias = alias->next)
607 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
608 }
609 }
610
611 /* Output the varpool NODE to OB.
612 If NODE is not in SET, then NODE is a boundary. */
613
614 static void
615 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
616 lto_cgraph_encoder_t encoder,
617 lto_varpool_encoder_t varpool_encoder)
618 {
619 struct bitpack_d *bp = bitpack_create ();
620 bp_pack_value (bp, ref->refered_type, 1);
621 bp_pack_value (bp, ref->use, 2);
622 lto_output_bitpack (ob->main_stream, bp);
623 bitpack_delete (bp);
624 if (ref->refered_type == IPA_REF_CGRAPH)
625 {
626 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
627 gcc_assert (nref != LCC_NOT_FOUND);
628 lto_output_sleb128_stream (ob->main_stream, nref);
629 }
630 else
631 {
632 int nref = lto_varpool_encoder_lookup (varpool_encoder,
633 ipa_ref_varpool_node (ref));
634 gcc_assert (nref != LCC_NOT_FOUND);
635 lto_output_sleb128_stream (ob->main_stream, nref);
636 }
637 }
638
639 /* Stream out profile_summary to OB. */
640
641 static void
642 output_profile_summary (struct lto_simple_output_block *ob)
643 {
644 if (profile_info)
645 {
646 /* We do not output num, it is not terribly useful. */
647 gcc_assert (profile_info->runs);
648 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
649 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
650 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
651 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
652 }
653 else
654 lto_output_uleb128_stream (ob->main_stream, 0);
655 }
656
657 /* Add NODE into encoder as well as nodes it is cloned from.
658 Do it in a way so clones appear first. */
659
660 static void
661 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
662 bool include_body)
663 {
664 if (node->clone_of)
665 add_node_to (encoder, node->clone_of, include_body);
666 else if (include_body)
667 lto_set_cgraph_encoder_encode_body (encoder, node);
668 lto_cgraph_encoder_encode (encoder, node);
669 }
670
671 /* Add all references in LIST to encoders. */
672
673 static void
674 add_references (lto_cgraph_encoder_t encoder,
675 lto_varpool_encoder_t varpool_encoder,
676 struct ipa_ref_list *list)
677 {
678 int i;
679 struct ipa_ref *ref;
680 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
681 if (ref->refered_type == IPA_REF_CGRAPH)
682 add_node_to (encoder, ipa_ref_node (ref), false);
683 else
684 {
685 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
686 lto_varpool_encoder_encode (varpool_encoder, vnode);
687 }
688 }
689
690 /* Output all callees or indirect outgoing edges. EDGE must be the first such
691 edge. */
692
693 static void
694 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
695 struct lto_simple_output_block *ob,
696 lto_cgraph_encoder_t encoder)
697 {
698 if (!edge)
699 return;
700
701 /* Output edges in backward direction, so the reconstructed callgraph match
702 and it is easy to associate call sites in the IPA pass summaries. */
703 while (edge->next_callee)
704 edge = edge->next_callee;
705 for (; edge; edge = edge->prev_callee)
706 lto_output_edge (ob, edge, encoder);
707 }
708
709 /* Output the part of the cgraph in SET. */
710
711 static void
712 output_refs (cgraph_node_set set, varpool_node_set vset,
713 lto_cgraph_encoder_t encoder,
714 lto_varpool_encoder_t varpool_encoder)
715 {
716 cgraph_node_set_iterator csi;
717 varpool_node_set_iterator vsi;
718 struct lto_simple_output_block *ob;
719 int count;
720 struct ipa_ref *ref;
721 int i;
722
723 ob = lto_create_simple_output_block (LTO_section_refs);
724
725 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
726 {
727 struct cgraph_node *node = csi_node (csi);
728
729 count = ipa_ref_list_nreferences (&node->ref_list);
730 if (count)
731 {
732 lto_output_uleb128_stream (ob->main_stream, count);
733 lto_output_uleb128_stream (ob->main_stream,
734 lto_cgraph_encoder_lookup (encoder, node));
735 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
736 lto_output_ref (ob, ref, encoder, varpool_encoder);
737 }
738 }
739
740 lto_output_uleb128_stream (ob->main_stream, 0);
741
742 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
743 {
744 struct varpool_node *node = vsi_node (vsi);
745
746 count = ipa_ref_list_nreferences (&node->ref_list);
747 if (count)
748 {
749 lto_output_uleb128_stream (ob->main_stream, count);
750 lto_output_uleb128_stream (ob->main_stream,
751 lto_varpool_encoder_lookup (varpool_encoder,
752 node));
753 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
754 lto_output_ref (ob, ref, encoder, varpool_encoder);
755 }
756 }
757
758 lto_output_uleb128_stream (ob->main_stream, 0);
759
760 lto_destroy_simple_output_block (ob);
761 }
762
763 /* Find out all cgraph and varpool nodes we want to encode in current unit
764 and insert them to encoders. */
765 void
766 compute_ltrans_boundary (struct lto_out_decl_state *state,
767 cgraph_node_set set, varpool_node_set vset)
768 {
769 struct cgraph_node *node;
770 cgraph_node_set_iterator csi;
771 varpool_node_set_iterator vsi;
772 struct cgraph_edge *edge;
773 int i;
774 lto_cgraph_encoder_t encoder;
775 lto_varpool_encoder_t varpool_encoder;
776
777 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
778 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
779
780 /* Go over all the nodes in SET and assign references. */
781 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
782 {
783 node = csi_node (csi);
784 add_node_to (encoder, node, true);
785 add_references (encoder, varpool_encoder, &node->ref_list);
786 }
787 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
788 {
789 struct varpool_node *vnode = vsi_node (vsi);
790 gcc_assert (!vnode->alias);
791 lto_varpool_encoder_encode (varpool_encoder, vnode);
792 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
793 add_references (encoder, varpool_encoder, &vnode->ref_list);
794 }
795 /* Pickle in also the initializer of all referenced readonly variables
796 to help folding. Constant pool variables are not shared, so we must
797 pickle those too. */
798 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
799 {
800 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
801 if (DECL_INITIAL (vnode->decl)
802 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
803 vnode)
804 && (DECL_IN_CONSTANT_POOL (vnode->decl)
805 || TREE_READONLY (vnode->decl)))
806 {
807 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
808 add_references (encoder, varpool_encoder, &vnode->ref_list);
809 }
810 }
811
812 /* Go over all the nodes again to include callees that are not in
813 SET. */
814 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
815 {
816 node = csi_node (csi);
817 for (edge = node->callees; edge; edge = edge->next_callee)
818 {
819 struct cgraph_node *callee = edge->callee;
820 if (!cgraph_node_in_set_p (callee, set))
821 {
822 /* We should have moved all the inlines. */
823 gcc_assert (!callee->global.inlined_to);
824 add_node_to (encoder, callee, false);
825 }
826 }
827 }
828 }
829
830 /* Output the part of the cgraph in SET. */
831
832 void
833 output_cgraph (cgraph_node_set set, varpool_node_set vset)
834 {
835 struct cgraph_node *node;
836 struct lto_simple_output_block *ob;
837 cgraph_node_set_iterator csi;
838 int i, n_nodes;
839 lto_cgraph_encoder_t encoder;
840 lto_varpool_encoder_t varpool_encoder;
841 struct cgraph_asm_node *can;
842
843 ob = lto_create_simple_output_block (LTO_section_cgraph);
844
845 output_profile_summary (ob);
846
847 /* An encoder for cgraph nodes should have been created by
848 ipa_write_summaries_1. */
849 gcc_assert (ob->decl_state->cgraph_node_encoder);
850 gcc_assert (ob->decl_state->varpool_node_encoder);
851 encoder = ob->decl_state->cgraph_node_encoder;
852 varpool_encoder = ob->decl_state->varpool_node_encoder;
853
854 /* Write out the nodes. We must first output a node and then its clones,
855 otherwise at a time reading back the node there would be nothing to clone
856 from. */
857 n_nodes = lto_cgraph_encoder_size (encoder);
858 for (i = 0; i < n_nodes; i++)
859 {
860 node = lto_cgraph_encoder_deref (encoder, i);
861 lto_output_node (ob, node, encoder, set, vset);
862 }
863
864 /* Go over the nodes in SET again to write edges. */
865 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
866 {
867 node = csi_node (csi);
868 output_outgoing_cgraph_edges (node->callees, ob, encoder);
869 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
870 }
871
872 lto_output_uleb128_stream (ob->main_stream, 0);
873
874 /* Emit toplevel asms. */
875 for (can = cgraph_asm_nodes; can; can = can->next)
876 {
877 int len = TREE_STRING_LENGTH (can->asm_str);
878 lto_output_uleb128_stream (ob->main_stream, len);
879 for (i = 0; i < len; ++i)
880 lto_output_1_stream (ob->main_stream,
881 TREE_STRING_POINTER (can->asm_str)[i]);
882 }
883
884 lto_output_uleb128_stream (ob->main_stream, 0);
885
886 lto_destroy_simple_output_block (ob);
887 output_varpool (set, vset);
888 output_refs (set, vset, encoder, varpool_encoder);
889 }
890
891 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
892 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
893 NODE or to replace the values in it, for instance because the first
894 time we saw it, the function body was not available but now it
895 is. BP is a bitpack with all the bitflags for NODE read from the
896 stream. */
897
898 static void
899 input_overwrite_node (struct lto_file_decl_data *file_data,
900 struct cgraph_node *node,
901 enum LTO_cgraph_tags tag,
902 struct bitpack_d *bp,
903 unsigned int stack_size,
904 unsigned int self_time,
905 unsigned int time_inlining_benefit,
906 unsigned int self_size,
907 unsigned int size_inlining_benefit)
908 {
909 node->aux = (void *) tag;
910 node->local.inline_summary.estimated_self_stack_size = stack_size;
911 node->local.inline_summary.self_time = self_time;
912 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
913 node->local.inline_summary.self_size = self_size;
914 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
915 node->global.time = self_time;
916 node->global.size = self_size;
917 node->global.estimated_stack_size = stack_size;
918 node->global.estimated_growth = INT_MIN;
919 node->local.lto_file_data = file_data;
920
921 node->local.local = bp_unpack_value (bp, 1);
922 node->local.externally_visible = bp_unpack_value (bp, 1);
923 node->local.finalized = bp_unpack_value (bp, 1);
924 node->local.inlinable = bp_unpack_value (bp, 1);
925 node->local.versionable = bp_unpack_value (bp, 1);
926 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
927 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
928 node->local.vtable_method = bp_unpack_value (bp, 1);
929 node->needed = bp_unpack_value (bp, 1);
930 node->address_taken = bp_unpack_value (bp, 1);
931 node->abstract_and_needed = bp_unpack_value (bp, 1);
932 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
933 node->lowered = bp_unpack_value (bp, 1);
934 node->analyzed = tag == LTO_cgraph_analyzed_node;
935 node->in_other_partition = bp_unpack_value (bp, 1);
936 node->alias = bp_unpack_value (bp, 1);
937 node->finalized_by_frontend = bp_unpack_value (bp, 1);
938 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
939 }
940
941 /* Output the part of the cgraph in SET. */
942
943 static void
944 output_varpool (cgraph_node_set set, varpool_node_set vset)
945 {
946 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
947 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
948 int len = lto_varpool_encoder_size (varpool_encoder), i;
949
950 lto_output_uleb128_stream (ob->main_stream, len);
951
952 /* Write out the nodes. We must first output a node and then its clones,
953 otherwise at a time reading back the node there would be nothing to clone
954 from. */
955 for (i = 0; i < len; i++)
956 {
957 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
958 set, vset);
959 }
960
961 lto_destroy_simple_output_block (ob);
962 }
963
964 /* Read a node from input_block IB. TAG is the node's tag just read.
965 Return the node read or overwriten. */
966
967 static struct cgraph_node *
968 input_node (struct lto_file_decl_data *file_data,
969 struct lto_input_block *ib,
970 enum LTO_cgraph_tags tag,
971 VEC(cgraph_node_ptr, heap) *nodes)
972 {
973 tree fn_decl;
974 struct cgraph_node *node;
975 struct bitpack_d *bp;
976 int stack_size = 0;
977 unsigned decl_index;
978 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
979 int self_time = 0;
980 int self_size = 0;
981 int time_inlining_benefit = 0;
982 int size_inlining_benefit = 0;
983 unsigned long same_body_count = 0;
984 int clone_ref;
985
986 clone_ref = lto_input_sleb128 (ib);
987
988 decl_index = lto_input_uleb128 (ib);
989 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
990
991 if (clone_ref != LCC_NOT_FOUND)
992 {
993 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
994 0, CGRAPH_FREQ_BASE, 0, false, NULL);
995 }
996 else
997 node = cgraph_node (fn_decl);
998
999 node->count = lto_input_sleb128 (ib);
1000 bp = lto_input_bitpack (ib);
1001
1002 if (tag == LTO_cgraph_analyzed_node)
1003 {
1004 stack_size = lto_input_sleb128 (ib);
1005 self_size = lto_input_sleb128 (ib);
1006 size_inlining_benefit = lto_input_sleb128 (ib);
1007 self_time = lto_input_sleb128 (ib);
1008 time_inlining_benefit = lto_input_sleb128 (ib);
1009
1010 ref = lto_input_sleb128 (ib);
1011 }
1012
1013 ref2 = lto_input_sleb128 (ib);
1014 same_body_count = lto_input_uleb128 (ib);
1015
1016 /* Make sure that we have not read this node before. Nodes that
1017 have already been read will have their tag stored in the 'aux'
1018 field. Since built-in functions can be referenced in multiple
1019 functions, they are expected to be read more than once. */
1020 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1021 internal_error ("bytecode stream: found multiple instances of cgraph "
1022 "node %d", node->uid);
1023
1024 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
1025 time_inlining_benefit, self_size,
1026 size_inlining_benefit);
1027 bitpack_delete (bp);
1028
1029 /* Store a reference for now, and fix up later to be a pointer. */
1030 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1031
1032 /* Store a reference for now, and fix up later to be a pointer. */
1033 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1034
1035 while (same_body_count-- > 0)
1036 {
1037 tree alias_decl;
1038 int type;
1039 decl_index = lto_input_uleb128 (ib);
1040 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1041 type = lto_input_uleb128 (ib);
1042 if (!type)
1043 {
1044 tree real_alias;
1045 decl_index = lto_input_uleb128 (ib);
1046 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1047 cgraph_same_body_alias (alias_decl, real_alias);
1048 }
1049 else
1050 {
1051 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1052 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1053 tree real_alias;
1054 decl_index = lto_input_uleb128 (ib);
1055 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1056 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1057 virtual_value,
1058 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1059 real_alias);
1060 }
1061 }
1062 return node;
1063 }
1064
1065 /* Read a node from input_block IB. TAG is the node's tag just read.
1066 Return the node read or overwriten. */
1067
1068 static struct varpool_node *
1069 input_varpool_node (struct lto_file_decl_data *file_data,
1070 struct lto_input_block *ib)
1071 {
1072 int decl_index;
1073 tree var_decl;
1074 struct varpool_node *node;
1075 struct bitpack_d *bp;
1076 bool aliases_p;
1077 int count;
1078
1079 decl_index = lto_input_uleb128 (ib);
1080 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1081 node = varpool_node (var_decl);
1082
1083 bp = lto_input_bitpack (ib);
1084 node->externally_visible = bp_unpack_value (bp, 1);
1085 node->force_output = bp_unpack_value (bp, 1);
1086 node->finalized = bp_unpack_value (bp, 1);
1087 node->alias = bp_unpack_value (bp, 1);
1088 node->analyzed = node->finalized;
1089 node->used_from_other_partition = bp_unpack_value (bp, 1);
1090 node->in_other_partition = bp_unpack_value (bp, 1);
1091 aliases_p = bp_unpack_value (bp, 1);
1092 if (node->finalized)
1093 varpool_mark_needed_node (node);
1094 bitpack_delete (bp);
1095 if (aliases_p)
1096 {
1097 count = lto_input_uleb128 (ib);
1098 for (; count > 0; count --)
1099 {
1100 tree decl = lto_file_decl_data_get_var_decl (file_data,
1101 lto_input_uleb128 (ib));
1102 varpool_extra_name_alias (decl, var_decl);
1103 }
1104 }
1105 return node;
1106 }
1107
1108 /* Read a node from input_block IB. TAG is the node's tag just read.
1109 Return the node read or overwriten. */
1110
1111 static void
1112 input_ref (struct lto_input_block *ib,
1113 struct cgraph_node *refering_node,
1114 struct varpool_node *refering_varpool_node,
1115 VEC(cgraph_node_ptr, heap) *nodes,
1116 VEC(varpool_node_ptr, heap) *varpool_nodes)
1117 {
1118 struct cgraph_node *node = NULL;
1119 struct varpool_node *varpool_node = NULL;
1120 struct bitpack_d *bp;
1121 enum ipa_ref_type type;
1122 enum ipa_ref_use use;
1123
1124 bp = lto_input_bitpack (ib);
1125 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1126 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1127 bitpack_delete (bp);
1128 if (type == IPA_REF_CGRAPH)
1129 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1130 else
1131 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1132 ipa_record_reference (refering_node, refering_varpool_node,
1133 node, varpool_node, use, NULL);
1134 }
1135
1136 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1137 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1138 edge being read is indirect (in the sense that it has
1139 indirect_unknown_callee set). */
1140
1141 static void
1142 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1143 bool indirect)
1144 {
1145 struct cgraph_node *caller, *callee;
1146 struct cgraph_edge *edge;
1147 unsigned int stmt_id;
1148 gcov_type count;
1149 int freq;
1150 unsigned int nest;
1151 cgraph_inline_failed_t inline_failed;
1152 struct bitpack_d *bp;
1153 enum ld_plugin_symbol_resolution caller_resolution;
1154 int ecf_flags = 0;
1155
1156 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1157 if (caller == NULL || caller->decl == NULL_TREE)
1158 internal_error ("bytecode stream: no caller found while reading edge");
1159
1160 if (!indirect)
1161 {
1162 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1163 if (callee == NULL || callee->decl == NULL_TREE)
1164 internal_error ("bytecode stream: no callee found while reading edge");
1165 }
1166 else
1167 callee = NULL;
1168
1169 count = (gcov_type) lto_input_sleb128 (ib);
1170
1171 bp = lto_input_bitpack (ib);
1172 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1173 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1174 HOST_BITS_PER_INT);
1175 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1176 nest = (unsigned) bp_unpack_value (bp, 30);
1177
1178 /* If the caller was preempted, don't create the edge.
1179 ??? Should we ever have edges from a preempted caller? */
1180 caller_resolution = lto_symtab_get_resolution (caller->decl);
1181 if (caller_resolution == LDPR_PREEMPTED_REG
1182 || caller_resolution == LDPR_PREEMPTED_IR)
1183 return;
1184
1185 if (indirect)
1186 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1187 else
1188 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1189
1190 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1191 edge->lto_stmt_uid = stmt_id;
1192 edge->inline_failed = inline_failed;
1193 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1194 edge->can_throw_external = bp_unpack_value (bp, 1);
1195 if (indirect)
1196 {
1197 if (bp_unpack_value (bp, 1))
1198 ecf_flags |= ECF_CONST;
1199 if (bp_unpack_value (bp, 1))
1200 ecf_flags |= ECF_PURE;
1201 if (bp_unpack_value (bp, 1))
1202 ecf_flags |= ECF_NORETURN;
1203 if (bp_unpack_value (bp, 1))
1204 ecf_flags |= ECF_MALLOC;
1205 if (bp_unpack_value (bp, 1))
1206 ecf_flags |= ECF_NOTHROW;
1207 if (bp_unpack_value (bp, 1))
1208 ecf_flags |= ECF_RETURNS_TWICE;
1209 edge->indirect_info->ecf_flags = ecf_flags;
1210 }
1211 bitpack_delete (bp);
1212 }
1213
1214
1215 /* Read a cgraph from IB using the info in FILE_DATA. */
1216
1217 static VEC(cgraph_node_ptr, heap) *
1218 input_cgraph_1 (struct lto_file_decl_data *file_data,
1219 struct lto_input_block *ib)
1220 {
1221 enum LTO_cgraph_tags tag;
1222 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1223 struct cgraph_node *node;
1224 unsigned i;
1225 unsigned HOST_WIDE_INT len;
1226
1227 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1228 while (tag)
1229 {
1230 if (tag == LTO_cgraph_edge)
1231 input_edge (ib, nodes, false);
1232 else if (tag == LTO_cgraph_indirect_edge)
1233 input_edge (ib, nodes, true);
1234 else
1235 {
1236 node = input_node (file_data, ib, tag,nodes);
1237 if (node == NULL || node->decl == NULL_TREE)
1238 internal_error ("bytecode stream: found empty cgraph node");
1239 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1240 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1241 }
1242
1243 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1244 }
1245
1246 /* Input toplevel asms. */
1247 len = lto_input_uleb128 (ib);
1248 while (len)
1249 {
1250 char *str = (char *)xmalloc (len + 1);
1251 for (i = 0; i < len; ++i)
1252 str[i] = lto_input_1_unsigned (ib);
1253 cgraph_add_asm_node (build_string (len, str));
1254 free (str);
1255
1256 len = lto_input_uleb128 (ib);
1257 }
1258
1259 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1260 {
1261 int ref = (int) (intptr_t) node->global.inlined_to;
1262
1263 /* Fixup inlined_to from reference to pointer. */
1264 if (ref != LCC_NOT_FOUND)
1265 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1266 else
1267 node->global.inlined_to = NULL;
1268
1269 ref = (int) (intptr_t) node->same_comdat_group;
1270
1271 /* Fixup same_comdat_group from reference to pointer. */
1272 if (ref != LCC_NOT_FOUND)
1273 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1274 else
1275 node->same_comdat_group = NULL;
1276 }
1277 return nodes;
1278 }
1279
1280 /* Read a varpool from IB using the info in FILE_DATA. */
1281
1282 static VEC(varpool_node_ptr, heap) *
1283 input_varpool_1 (struct lto_file_decl_data *file_data,
1284 struct lto_input_block *ib)
1285 {
1286 unsigned HOST_WIDE_INT len;
1287 VEC(varpool_node_ptr, heap) *varpool = NULL;
1288
1289 len = lto_input_uleb128 (ib);
1290 while (len)
1291 {
1292 VEC_safe_push (varpool_node_ptr, heap, varpool,
1293 input_varpool_node (file_data, ib));
1294 len--;
1295 }
1296 return varpool;
1297 }
1298
1299 /* Input ipa_refs. */
1300
1301 static void
1302 input_refs (struct lto_input_block *ib,
1303 VEC(cgraph_node_ptr, heap) *nodes,
1304 VEC(varpool_node_ptr, heap) *varpool)
1305 {
1306 int count;
1307 int idx;
1308 while (true)
1309 {
1310 struct cgraph_node *node;
1311 count = lto_input_uleb128 (ib);
1312 if (!count)
1313 break;
1314 idx = lto_input_uleb128 (ib);
1315 node = VEC_index (cgraph_node_ptr, nodes, idx);
1316 while (count)
1317 {
1318 input_ref (ib, node, NULL, nodes, varpool);
1319 count--;
1320 }
1321 }
1322 while (true)
1323 {
1324 struct varpool_node *node;
1325 count = lto_input_uleb128 (ib);
1326 if (!count)
1327 break;
1328 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1329 while (count)
1330 {
1331 input_ref (ib, NULL, node, nodes, varpool);
1332 count--;
1333 }
1334 }
1335 }
1336
1337
1338 static struct gcov_ctr_summary lto_gcov_summary;
1339
1340 /* Input profile_info from IB. */
1341 static void
1342 input_profile_summary (struct lto_input_block *ib)
1343 {
1344 unsigned int runs = lto_input_uleb128 (ib);
1345 if (runs)
1346 {
1347 if (!profile_info)
1348 {
1349 profile_info = &lto_gcov_summary;
1350 lto_gcov_summary.runs = runs;
1351 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1352 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1353 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1354 }
1355 /* We can support this by scaling all counts to nearest common multiple
1356 of all different runs, but it is perhaps not worth the effort. */
1357 else if (profile_info->runs != runs
1358 || profile_info->sum_all != lto_input_sleb128 (ib)
1359 || profile_info->run_max != lto_input_sleb128 (ib)
1360 || profile_info->sum_max != lto_input_sleb128 (ib))
1361 sorry ("Combining units with different profiles is not supported.");
1362 /* We allow some units to have profile and other to not have one. This will
1363 just make unprofiled units to be size optimized that is sane. */
1364 }
1365
1366 }
1367
1368 /* Input and merge the cgraph from each of the .o files passed to
1369 lto1. */
1370
1371 void
1372 input_cgraph (void)
1373 {
1374 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1375 struct lto_file_decl_data *file_data;
1376 unsigned int j = 0;
1377 struct cgraph_node *node;
1378
1379 while ((file_data = file_data_vec[j++]))
1380 {
1381 const char *data;
1382 size_t len;
1383 struct lto_input_block *ib;
1384 VEC(cgraph_node_ptr, heap) *nodes;
1385 VEC(varpool_node_ptr, heap) *varpool;
1386
1387 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1388 &data, &len);
1389 input_profile_summary (ib);
1390 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1391 nodes = input_cgraph_1 (file_data, ib);
1392 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1393 ib, data, len);
1394
1395 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1396 &data, &len);
1397 varpool = input_varpool_1 (file_data, ib);
1398 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1399 ib, data, len);
1400
1401 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1402 &data, &len);
1403 input_refs (ib, nodes, varpool);
1404 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1405 ib, data, len);
1406 VEC_free (cgraph_node_ptr, heap, nodes);
1407 VEC_free (varpool_node_ptr, heap, varpool);
1408 }
1409
1410 /* Clear out the aux field that was used to store enough state to
1411 tell which nodes should be overwritten. */
1412 for (node = cgraph_nodes; node; node = node->next)
1413 {
1414 /* Some nodes may have been created by cgraph_node. This
1415 happens when the callgraph contains nested functions. If the
1416 node for the parent function was never emitted to the gimple
1417 file, cgraph_node will create a node for it when setting the
1418 context of the nested function. */
1419 if (node->local.lto_file_data)
1420 node->aux = NULL;
1421 }
1422 }