lto-cgraph.c (input_overwrite_node): Do not set DECL_EXTERNAL when processing clone.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_cgraph_tags
57 {
58 /* Must leave 0 for the stopper. */
59
60 /* Cgraph node without body available. */
61 LTO_cgraph_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_cgraph_analyzed_node,
64 /* Cgraph edges. */
65 LTO_cgraph_edge,
66 LTO_cgraph_indirect_edge
67 };
68
69 /* Create a new cgraph encoder. */
70
71 lto_cgraph_encoder_t
72 lto_cgraph_encoder_new (void)
73 {
74 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
75 encoder->map = pointer_map_create ();
76 encoder->nodes = NULL;
77 encoder->body = pointer_set_create ();
78 return encoder;
79 }
80
81
82 /* Delete ENCODER and its components. */
83
84 void
85 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
86 {
87 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
88 pointer_map_destroy (encoder->map);
89 pointer_set_destroy (encoder->body);
90 free (encoder);
91 }
92
93
94 /* Return the existing reference number of NODE in the cgraph encoder in
95 output block OB. Assign a new reference if this is the first time
96 NODE is encoded. */
97
98 int
99 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
100 struct cgraph_node *node)
101 {
102 int ref;
103 void **slot;
104
105 slot = pointer_map_contains (encoder->map, node);
106 if (!slot)
107 {
108 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
109 slot = pointer_map_insert (encoder->map, node);
110 *slot = (void *) (intptr_t) ref;
111 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
112 }
113 else
114 ref = (int) (intptr_t) *slot;
115
116 return ref;
117 }
118
119 #define LCC_NOT_FOUND (-1)
120
121 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
122 or LCC_NOT_FOUND if it is not there. */
123
124 int
125 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
126 struct cgraph_node *node)
127 {
128 void **slot = pointer_map_contains (encoder->map, node);
129 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 }
131
132
133 /* Return the cgraph node corresponding to REF using ENCODER. */
134
135 struct cgraph_node *
136 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
137 {
138 if (ref == LCC_NOT_FOUND)
139 return NULL;
140
141 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 }
143
144
145 /* Return TRUE if we should encode initializer of NODE (if any). */
146
147 bool
148 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
149 struct cgraph_node *node)
150 {
151 return pointer_set_contains (encoder->body, node);
152 }
153
154 /* Return TRUE if we should encode body of NODE (if any). */
155
156 static void
157 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
158 struct cgraph_node *node)
159 {
160 pointer_set_insert (encoder->body, node);
161 }
162
163 /* Create a new varpool encoder. */
164
165 lto_varpool_encoder_t
166 lto_varpool_encoder_new (void)
167 {
168 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
169 encoder->map = pointer_map_create ();
170 encoder->initializer = pointer_set_create ();
171 encoder->nodes = NULL;
172 return encoder;
173 }
174
175
176 /* Delete ENCODER and its components. */
177
178 void
179 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
180 {
181 VEC_free (varpool_node_ptr, heap, encoder->nodes);
182 pointer_map_destroy (encoder->map);
183 pointer_set_destroy (encoder->initializer);
184 free (encoder);
185 }
186
187
188 /* Return the existing reference number of NODE in the varpool encoder in
189 output block OB. Assign a new reference if this is the first time
190 NODE is encoded. */
191
192 int
193 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
194 struct varpool_node *node)
195 {
196 int ref;
197 void **slot;
198
199 slot = pointer_map_contains (encoder->map, node);
200 if (!slot)
201 {
202 ref = VEC_length (varpool_node_ptr, encoder->nodes);
203 slot = pointer_map_insert (encoder->map, node);
204 *slot = (void *) (intptr_t) ref;
205 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
206 }
207 else
208 ref = (int) (intptr_t) *slot;
209
210 return ref;
211 }
212
213 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
214 or LCC_NOT_FOUND if it is not there. */
215
216 int
217 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
218 struct varpool_node *node)
219 {
220 void **slot = pointer_map_contains (encoder->map, node);
221 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 }
223
224
225 /* Return the varpool node corresponding to REF using ENCODER. */
226
227 struct varpool_node *
228 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
229 {
230 if (ref == LCC_NOT_FOUND)
231 return NULL;
232
233 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 }
235
236
237 /* Return TRUE if we should encode initializer of NODE (if any). */
238
239 bool
240 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
241 struct varpool_node *node)
242 {
243 return pointer_set_contains (encoder->initializer, node);
244 }
245
246 /* Return TRUE if we should encode initializer of NODE (if any). */
247
248 static void
249 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
250 struct varpool_node *node)
251 {
252 pointer_set_insert (encoder->initializer, node);
253 }
254
255 /* Output the cgraph EDGE to OB using ENCODER. */
256
257 static void
258 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
259 lto_cgraph_encoder_t encoder)
260 {
261 unsigned int uid;
262 intptr_t ref;
263 struct bitpack_d bp;
264
265 if (edge->indirect_unknown_callee)
266 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
267 else
268 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
269
270 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
271 gcc_assert (ref != LCC_NOT_FOUND);
272 lto_output_sleb128_stream (ob->main_stream, ref);
273
274 if (!edge->indirect_unknown_callee)
275 {
276 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 lto_output_sleb128_stream (ob->main_stream, ref);
279 }
280
281 lto_output_sleb128_stream (ob->main_stream, edge->count);
282
283 bp = bitpack_create (ob->main_stream);
284 uid = (!gimple_has_body_p (edge->caller->decl)
285 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
286 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
289 bp_pack_value (&bp, edge->loop_nest, 30);
290 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
291 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
292 bp_pack_value (&bp, edge->can_throw_external, 1);
293 if (edge->indirect_unknown_callee)
294 {
295 int flags = edge->indirect_info->ecf_flags;
296 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
302 /* Flags that should not appear on indirect calls. */
303 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
304 | ECF_MAY_BE_ALLOCA
305 | ECF_SIBCALL
306 | ECF_NOVOPS)));
307 }
308 lto_output_bitpack (&bp);
309 }
310
311 /* Return if LIST contain references from other partitions. */
312
313 bool
314 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
315 varpool_node_set vset)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
320 {
321 if (ref->refering_type == IPA_REF_CGRAPH)
322 {
323 if (ipa_ref_refering_node (ref)->in_other_partition
324 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
325 return true;
326 }
327 else
328 {
329 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
330 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
331 vset))
332 return true;
333 }
334 }
335 return false;
336 }
337
338 /* Return true when node is reachable from other partition. */
339
340 bool
341 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
342 {
343 struct cgraph_edge *e;
344 if (!node->analyzed)
345 return false;
346 if (node->global.inlined_to)
347 return false;
348 for (e = node->callers; e; e = e->next_caller)
349 if (e->caller->in_other_partition
350 || !cgraph_node_in_set_p (e->caller, set))
351 return true;
352 return false;
353 }
354
355 /* Return if LIST contain references from other partitions. */
356
357 bool
358 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
359 varpool_node_set vset)
360 {
361 int i;
362 struct ipa_ref *ref;
363 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
364 {
365 if (ref->refering_type == IPA_REF_CGRAPH)
366 {
367 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
368 return true;
369 }
370 else
371 {
372 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
373 vset))
374 return true;
375 }
376 }
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
384 {
385 struct cgraph_edge *e;
386 if (!node->analyzed)
387 return false;
388 if (node->global.inlined_to)
389 return false;
390 for (e = node->callers; e; e = e->next_caller)
391 if (cgraph_node_in_set_p (e->caller, set))
392 return true;
393 return false;
394 }
395
396 /* Output the cgraph NODE to OB. ENCODER is used to find the
397 reference number of NODE->inlined_to. SET is the set of nodes we
398 are writing to the current file. If NODE is not in SET, then NODE
399 is a boundary of a cgraph_node_set and we pretend NODE just has a
400 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
401 that have had their callgraph node written so far. This is used to
402 determine if NODE is a clone of a previously written node. */
403
404 static void
405 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
406 lto_cgraph_encoder_t encoder, cgraph_node_set set,
407 varpool_node_set vset)
408 {
409 unsigned int tag;
410 struct bitpack_d bp;
411 bool boundary_p;
412 intptr_t ref;
413 bool in_other_partition = false;
414 struct cgraph_node *clone_of;
415
416 boundary_p = !cgraph_node_in_set_p (node, set);
417
418 if (node->analyzed && !boundary_p)
419 tag = LTO_cgraph_analyzed_node;
420 else
421 tag = LTO_cgraph_unavail_node;
422
423 lto_output_uleb128_stream (ob->main_stream, tag);
424
425 /* In WPA mode, we only output part of the call-graph. Also, we
426 fake cgraph node attributes. There are two cases that we care.
427
428 Boundary nodes: There are nodes that are not part of SET but are
429 called from within SET. We artificially make them look like
430 externally visible nodes with no function body.
431
432 Cherry-picked nodes: These are nodes we pulled from other
433 translation units into SET during IPA-inlining. We make them as
434 local static nodes to prevent clashes with other local statics. */
435 if (boundary_p && node->analyzed)
436 {
437 /* Inline clones can not be part of boundary.
438 gcc_assert (!node->global.inlined_to);
439
440 FIXME: At the moment they can be, when partition contains an inline
441 clone that is clone of inline clone from outside partition. We can
442 reshape the clone tree and make other tree to be the root, but it
443 needs a bit extra work and will be promplty done by cgraph_remove_node
444 after reading back. */
445 in_other_partition = 1;
446 }
447
448 clone_of = node->clone_of;
449 while (clone_of
450 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
451 if (clone_of->prev_sibling_clone)
452 clone_of = clone_of->prev_sibling_clone;
453 else
454 clone_of = clone_of->clone_of;
455
456 if (LTO_cgraph_analyzed_node)
457 gcc_assert (clone_of || !node->clone_of);
458 if (!clone_of)
459 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
460 else
461 lto_output_sleb128_stream (ob->main_stream, ref);
462
463
464 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
465 lto_output_sleb128_stream (ob->main_stream, node->count);
466
467 if (tag == LTO_cgraph_analyzed_node)
468 {
469 lto_output_sleb128_stream (ob->main_stream,
470 node->local.inline_summary.estimated_self_stack_size);
471 lto_output_sleb128_stream (ob->main_stream,
472 node->local.inline_summary.self_size);
473 lto_output_sleb128_stream (ob->main_stream,
474 node->local.inline_summary.size_inlining_benefit);
475 lto_output_sleb128_stream (ob->main_stream,
476 node->local.inline_summary.self_time);
477 lto_output_sleb128_stream (ob->main_stream,
478 node->local.inline_summary.time_inlining_benefit);
479 if (node->global.inlined_to)
480 {
481 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
482 gcc_assert (ref != LCC_NOT_FOUND);
483 }
484 else
485 ref = LCC_NOT_FOUND;
486
487 lto_output_sleb128_stream (ob->main_stream, ref);
488 }
489
490 if (node->same_comdat_group && !boundary_p)
491 {
492 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
493 gcc_assert (ref != LCC_NOT_FOUND);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 lto_output_sleb128_stream (ob->main_stream, ref);
498
499 bp = bitpack_create (ob->main_stream);
500 bp_pack_value (&bp, node->local.local, 1);
501 bp_pack_value (&bp, node->local.externally_visible, 1);
502 bp_pack_value (&bp, node->local.finalized, 1);
503 bp_pack_value (&bp, node->local.inlinable, 1);
504 bp_pack_value (&bp, node->local.versionable, 1);
505 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
506 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
507 bp_pack_value (&bp, node->local.vtable_method, 1);
508 bp_pack_value (&bp, node->needed, 1);
509 bp_pack_value (&bp, node->address_taken, 1);
510 bp_pack_value (&bp, node->abstract_and_needed, 1);
511 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
512 && !DECL_EXTERNAL (node->decl)
513 && !DECL_COMDAT (node->decl)
514 && (reachable_from_other_partition_p (node, set)
515 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
516 bp_pack_value (&bp, node->lowered, 1);
517 bp_pack_value (&bp, in_other_partition, 1);
518 bp_pack_value (&bp, node->alias, 1);
519 bp_pack_value (&bp, node->finalized_by_frontend, 1);
520 bp_pack_value (&bp, node->frequency, 2);
521 lto_output_bitpack (&bp);
522
523 if (node->same_body)
524 {
525 struct cgraph_node *alias;
526 unsigned long alias_count = 1;
527 for (alias = node->same_body; alias->next; alias = alias->next)
528 alias_count++;
529 lto_output_uleb128_stream (ob->main_stream, alias_count);
530 do
531 {
532 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
533 alias->decl);
534 if (alias->thunk.thunk_p)
535 {
536 lto_output_uleb128_stream
537 (ob->main_stream,
538 1 + (alias->thunk.this_adjusting != 0) * 2
539 + (alias->thunk.virtual_offset_p != 0) * 4);
540 lto_output_uleb128_stream (ob->main_stream,
541 alias->thunk.fixed_offset);
542 lto_output_uleb128_stream (ob->main_stream,
543 alias->thunk.virtual_value);
544 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
545 alias->thunk.alias);
546 }
547 else
548 {
549 lto_output_uleb128_stream (ob->main_stream, 0);
550 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
551 alias->thunk.alias);
552 }
553 alias = alias->previous;
554 }
555 while (alias);
556 }
557 else
558 lto_output_uleb128_stream (ob->main_stream, 0);
559 }
560
561 /* Output the varpool NODE to OB.
562 If NODE is not in SET, then NODE is a boundary. */
563
564 static void
565 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
566 lto_varpool_encoder_t varpool_encoder,
567 cgraph_node_set set, varpool_node_set vset)
568 {
569 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
570 struct bitpack_d bp;
571 struct varpool_node *alias;
572 int count = 0;
573 int ref;
574
575 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
576 bp = bitpack_create (ob->main_stream);
577 bp_pack_value (&bp, node->externally_visible, 1);
578 bp_pack_value (&bp, node->force_output, 1);
579 bp_pack_value (&bp, node->finalized, 1);
580 bp_pack_value (&bp, node->alias, 1);
581 bp_pack_value (&bp, node->const_value_known, 1);
582 gcc_assert (!node->alias || !node->extra_name);
583 gcc_assert (node->finalized || !node->analyzed);
584 gcc_assert (node->needed);
585 /* Constant pool initializers can be de-unified into individual ltrans units.
586 FIXME: Alternatively at -Os we may want to avoid generating for them the local
587 labels and share them across LTRANS partitions. */
588 if (DECL_IN_CONSTANT_POOL (node->decl)
589 && !DECL_COMDAT (node->decl))
590 {
591 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
592 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
593 }
594 else
595 {
596 bp_pack_value (&bp, node->analyzed
597 && referenced_from_other_partition_p (&node->ref_list,
598 set, vset), 1);
599 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
600 }
601 /* Also emit any extra name aliases. */
602 for (alias = node->extra_name; alias; alias = alias->next)
603 count++;
604 bp_pack_value (&bp, count != 0, 1);
605 lto_output_bitpack (&bp);
606 if (node->same_comdat_group && !boundary_p)
607 {
608 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
609 gcc_assert (ref != LCC_NOT_FOUND);
610 }
611 else
612 ref = LCC_NOT_FOUND;
613 lto_output_sleb128_stream (ob->main_stream, ref);
614
615 if (count)
616 {
617 lto_output_uleb128_stream (ob->main_stream, count);
618 for (alias = node->extra_name; alias; alias = alias->next)
619 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
620 }
621 }
622
623 /* Output the varpool NODE to OB.
624 If NODE is not in SET, then NODE is a boundary. */
625
626 static void
627 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
628 lto_cgraph_encoder_t encoder,
629 lto_varpool_encoder_t varpool_encoder)
630 {
631 struct bitpack_d bp;
632 bp = bitpack_create (ob->main_stream);
633 bp_pack_value (&bp, ref->refered_type, 1);
634 bp_pack_value (&bp, ref->use, 2);
635 lto_output_bitpack (&bp);
636 if (ref->refered_type == IPA_REF_CGRAPH)
637 {
638 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
639 gcc_assert (nref != LCC_NOT_FOUND);
640 lto_output_sleb128_stream (ob->main_stream, nref);
641 }
642 else
643 {
644 int nref = lto_varpool_encoder_lookup (varpool_encoder,
645 ipa_ref_varpool_node (ref));
646 gcc_assert (nref != LCC_NOT_FOUND);
647 lto_output_sleb128_stream (ob->main_stream, nref);
648 }
649 }
650
651 /* Stream out profile_summary to OB. */
652
653 static void
654 output_profile_summary (struct lto_simple_output_block *ob)
655 {
656 if (profile_info)
657 {
658 /* We do not output num, it is not terribly useful. */
659 gcc_assert (profile_info->runs);
660 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
661 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
662 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
663 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
664 }
665 else
666 lto_output_uleb128_stream (ob->main_stream, 0);
667 }
668
669 /* Add NODE into encoder as well as nodes it is cloned from.
670 Do it in a way so clones appear first. */
671
672 static void
673 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
674 bool include_body)
675 {
676 if (node->clone_of)
677 add_node_to (encoder, node->clone_of, include_body);
678 else if (include_body)
679 lto_set_cgraph_encoder_encode_body (encoder, node);
680 lto_cgraph_encoder_encode (encoder, node);
681 }
682
683 /* Add all references in LIST to encoders. */
684
685 static void
686 add_references (lto_cgraph_encoder_t encoder,
687 lto_varpool_encoder_t varpool_encoder,
688 struct ipa_ref_list *list)
689 {
690 int i;
691 struct ipa_ref *ref;
692 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
693 if (ref->refered_type == IPA_REF_CGRAPH)
694 add_node_to (encoder, ipa_ref_node (ref), false);
695 else
696 {
697 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
698 lto_varpool_encoder_encode (varpool_encoder, vnode);
699 }
700 }
701
702 /* Output all callees or indirect outgoing edges. EDGE must be the first such
703 edge. */
704
705 static void
706 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
707 struct lto_simple_output_block *ob,
708 lto_cgraph_encoder_t encoder)
709 {
710 if (!edge)
711 return;
712
713 /* Output edges in backward direction, so the reconstructed callgraph match
714 and it is easy to associate call sites in the IPA pass summaries. */
715 while (edge->next_callee)
716 edge = edge->next_callee;
717 for (; edge; edge = edge->prev_callee)
718 lto_output_edge (ob, edge, encoder);
719 }
720
721 /* Output the part of the cgraph in SET. */
722
723 static void
724 output_refs (cgraph_node_set set, varpool_node_set vset,
725 lto_cgraph_encoder_t encoder,
726 lto_varpool_encoder_t varpool_encoder)
727 {
728 cgraph_node_set_iterator csi;
729 varpool_node_set_iterator vsi;
730 struct lto_simple_output_block *ob;
731 int count;
732 struct ipa_ref *ref;
733 int i;
734
735 ob = lto_create_simple_output_block (LTO_section_refs);
736
737 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
738 {
739 struct cgraph_node *node = csi_node (csi);
740
741 count = ipa_ref_list_nreferences (&node->ref_list);
742 if (count)
743 {
744 lto_output_uleb128_stream (ob->main_stream, count);
745 lto_output_uleb128_stream (ob->main_stream,
746 lto_cgraph_encoder_lookup (encoder, node));
747 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
748 lto_output_ref (ob, ref, encoder, varpool_encoder);
749 }
750 }
751
752 lto_output_uleb128_stream (ob->main_stream, 0);
753
754 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
755 {
756 struct varpool_node *node = vsi_node (vsi);
757
758 count = ipa_ref_list_nreferences (&node->ref_list);
759 if (count)
760 {
761 lto_output_uleb128_stream (ob->main_stream, count);
762 lto_output_uleb128_stream (ob->main_stream,
763 lto_varpool_encoder_lookup (varpool_encoder,
764 node));
765 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
766 lto_output_ref (ob, ref, encoder, varpool_encoder);
767 }
768 }
769
770 lto_output_uleb128_stream (ob->main_stream, 0);
771
772 lto_destroy_simple_output_block (ob);
773 }
774
775 /* Find out all cgraph and varpool nodes we want to encode in current unit
776 and insert them to encoders. */
777 void
778 compute_ltrans_boundary (struct lto_out_decl_state *state,
779 cgraph_node_set set, varpool_node_set vset)
780 {
781 struct cgraph_node *node;
782 cgraph_node_set_iterator csi;
783 varpool_node_set_iterator vsi;
784 struct cgraph_edge *edge;
785 int i;
786 lto_cgraph_encoder_t encoder;
787 lto_varpool_encoder_t varpool_encoder;
788
789 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
790 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
791
792 /* Go over all the nodes in SET and assign references. */
793 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
794 {
795 node = csi_node (csi);
796 add_node_to (encoder, node, true);
797 add_references (encoder, varpool_encoder, &node->ref_list);
798 }
799 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
800 {
801 struct varpool_node *vnode = vsi_node (vsi);
802 gcc_assert (!vnode->alias);
803 lto_varpool_encoder_encode (varpool_encoder, vnode);
804 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
805 add_references (encoder, varpool_encoder, &vnode->ref_list);
806 }
807 /* Pickle in also the initializer of all referenced readonly variables
808 to help folding. Constant pool variables are not shared, so we must
809 pickle those too. */
810 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
811 {
812 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
813 if (DECL_INITIAL (vnode->decl)
814 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
815 vnode)
816 && (DECL_IN_CONSTANT_POOL (vnode->decl)
817 || TREE_READONLY (vnode->decl)))
818 {
819 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
820 add_references (encoder, varpool_encoder, &vnode->ref_list);
821 }
822 }
823
824 /* Go over all the nodes again to include callees that are not in
825 SET. */
826 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
827 {
828 node = csi_node (csi);
829 for (edge = node->callees; edge; edge = edge->next_callee)
830 {
831 struct cgraph_node *callee = edge->callee;
832 if (!cgraph_node_in_set_p (callee, set))
833 {
834 /* We should have moved all the inlines. */
835 gcc_assert (!callee->global.inlined_to);
836 add_node_to (encoder, callee, false);
837 }
838 }
839 }
840 }
841
842 /* Output the part of the cgraph in SET. */
843
844 void
845 output_cgraph (cgraph_node_set set, varpool_node_set vset)
846 {
847 struct cgraph_node *node;
848 struct lto_simple_output_block *ob;
849 cgraph_node_set_iterator csi;
850 int i, n_nodes;
851 lto_cgraph_encoder_t encoder;
852 lto_varpool_encoder_t varpool_encoder;
853 struct cgraph_asm_node *can;
854 static bool asm_nodes_output = false;
855
856 if (flag_wpa)
857 output_cgraph_opt_summary ();
858
859 ob = lto_create_simple_output_block (LTO_section_cgraph);
860
861 output_profile_summary (ob);
862
863 /* An encoder for cgraph nodes should have been created by
864 ipa_write_summaries_1. */
865 gcc_assert (ob->decl_state->cgraph_node_encoder);
866 gcc_assert (ob->decl_state->varpool_node_encoder);
867 encoder = ob->decl_state->cgraph_node_encoder;
868 varpool_encoder = ob->decl_state->varpool_node_encoder;
869
870 /* Write out the nodes. We must first output a node and then its clones,
871 otherwise at a time reading back the node there would be nothing to clone
872 from. */
873 n_nodes = lto_cgraph_encoder_size (encoder);
874 for (i = 0; i < n_nodes; i++)
875 {
876 node = lto_cgraph_encoder_deref (encoder, i);
877 lto_output_node (ob, node, encoder, set, vset);
878 }
879
880 /* Go over the nodes in SET again to write edges. */
881 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
882 {
883 node = csi_node (csi);
884 output_outgoing_cgraph_edges (node->callees, ob, encoder);
885 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
886 }
887
888 lto_output_uleb128_stream (ob->main_stream, 0);
889
890 /* Emit toplevel asms.
891 When doing WPA we must output every asm just once. Since we do not partition asm
892 nodes at all, output them to first output. This is kind of hack, but should work
893 well. */
894 if (!asm_nodes_output)
895 {
896 asm_nodes_output = true;
897 for (can = cgraph_asm_nodes; can; can = can->next)
898 {
899 int len = TREE_STRING_LENGTH (can->asm_str);
900 lto_output_uleb128_stream (ob->main_stream, len);
901 for (i = 0; i < len; ++i)
902 lto_output_1_stream (ob->main_stream,
903 TREE_STRING_POINTER (can->asm_str)[i]);
904 }
905 }
906
907 lto_output_uleb128_stream (ob->main_stream, 0);
908
909 lto_destroy_simple_output_block (ob);
910 output_varpool (set, vset);
911 output_refs (set, vset, encoder, varpool_encoder);
912 }
913
914 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
915 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
916 NODE or to replace the values in it, for instance because the first
917 time we saw it, the function body was not available but now it
918 is. BP is a bitpack with all the bitflags for NODE read from the
919 stream. */
920
921 static void
922 input_overwrite_node (struct lto_file_decl_data *file_data,
923 struct cgraph_node *node,
924 enum LTO_cgraph_tags tag,
925 struct bitpack_d *bp,
926 unsigned int stack_size,
927 unsigned int self_time,
928 unsigned int time_inlining_benefit,
929 unsigned int self_size,
930 unsigned int size_inlining_benefit)
931 {
932 node->aux = (void *) tag;
933 node->local.inline_summary.estimated_self_stack_size = stack_size;
934 node->local.inline_summary.self_time = self_time;
935 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
936 node->local.inline_summary.self_size = self_size;
937 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
938 node->global.time = self_time;
939 node->global.size = self_size;
940 node->global.estimated_stack_size = stack_size;
941 node->global.estimated_growth = INT_MIN;
942 node->local.lto_file_data = file_data;
943
944 node->local.local = bp_unpack_value (bp, 1);
945 node->local.externally_visible = bp_unpack_value (bp, 1);
946 node->local.finalized = bp_unpack_value (bp, 1);
947 node->local.inlinable = bp_unpack_value (bp, 1);
948 node->local.versionable = bp_unpack_value (bp, 1);
949 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
950 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
951 node->local.vtable_method = bp_unpack_value (bp, 1);
952 node->needed = bp_unpack_value (bp, 1);
953 node->address_taken = bp_unpack_value (bp, 1);
954 node->abstract_and_needed = bp_unpack_value (bp, 1);
955 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
956 node->lowered = bp_unpack_value (bp, 1);
957 node->analyzed = tag == LTO_cgraph_analyzed_node;
958 node->in_other_partition = bp_unpack_value (bp, 1);
959 if (node->in_other_partition
960 /* Avoid updating decl when we are seeing just inline clone.
961 When inlining function that has functions already inlined into it,
962 we produce clones of inline clones.
963
964 WPA partitioning might put each clone into different unit and
965 we might end up streaming inline clone from other partition
966 to support clone we are interested in. */
967 && (!node->clone_of
968 || node->clone_of->decl != node->decl))
969 {
970 DECL_EXTERNAL (node->decl) = 1;
971 TREE_STATIC (node->decl) = 0;
972 }
973 node->alias = bp_unpack_value (bp, 1);
974 node->finalized_by_frontend = bp_unpack_value (bp, 1);
975 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
976 }
977
978 /* Output the part of the cgraph in SET. */
979
980 static void
981 output_varpool (cgraph_node_set set, varpool_node_set vset)
982 {
983 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
984 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
985 int len = lto_varpool_encoder_size (varpool_encoder), i;
986
987 lto_output_uleb128_stream (ob->main_stream, len);
988
989 /* Write out the nodes. We must first output a node and then its clones,
990 otherwise at a time reading back the node there would be nothing to clone
991 from. */
992 for (i = 0; i < len; i++)
993 {
994 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
995 varpool_encoder,
996 set, vset);
997 }
998
999 lto_destroy_simple_output_block (ob);
1000 }
1001
1002 /* Read a node from input_block IB. TAG is the node's tag just read.
1003 Return the node read or overwriten. */
1004
1005 static struct cgraph_node *
1006 input_node (struct lto_file_decl_data *file_data,
1007 struct lto_input_block *ib,
1008 enum LTO_cgraph_tags tag,
1009 VEC(cgraph_node_ptr, heap) *nodes)
1010 {
1011 tree fn_decl;
1012 struct cgraph_node *node;
1013 struct bitpack_d bp;
1014 int stack_size = 0;
1015 unsigned decl_index;
1016 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1017 int self_time = 0;
1018 int self_size = 0;
1019 int time_inlining_benefit = 0;
1020 int size_inlining_benefit = 0;
1021 unsigned long same_body_count = 0;
1022 int clone_ref;
1023
1024 clone_ref = lto_input_sleb128 (ib);
1025
1026 decl_index = lto_input_uleb128 (ib);
1027 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1028
1029 if (clone_ref != LCC_NOT_FOUND)
1030 {
1031 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1032 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1033 }
1034 else
1035 node = cgraph_node (fn_decl);
1036
1037 node->count = lto_input_sleb128 (ib);
1038
1039 if (tag == LTO_cgraph_analyzed_node)
1040 {
1041 stack_size = lto_input_sleb128 (ib);
1042 self_size = lto_input_sleb128 (ib);
1043 size_inlining_benefit = lto_input_sleb128 (ib);
1044 self_time = lto_input_sleb128 (ib);
1045 time_inlining_benefit = lto_input_sleb128 (ib);
1046
1047 ref = lto_input_sleb128 (ib);
1048 }
1049
1050 ref2 = lto_input_sleb128 (ib);
1051
1052 /* Make sure that we have not read this node before. Nodes that
1053 have already been read will have their tag stored in the 'aux'
1054 field. Since built-in functions can be referenced in multiple
1055 functions, they are expected to be read more than once. */
1056 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1057 internal_error ("bytecode stream: found multiple instances of cgraph "
1058 "node %d", node->uid);
1059
1060 bp = lto_input_bitpack (ib);
1061 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1062 time_inlining_benefit, self_size,
1063 size_inlining_benefit);
1064
1065 /* Store a reference for now, and fix up later to be a pointer. */
1066 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1067
1068 /* Store a reference for now, and fix up later to be a pointer. */
1069 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1070
1071 same_body_count = lto_input_uleb128 (ib);
1072 while (same_body_count-- > 0)
1073 {
1074 tree alias_decl;
1075 int type;
1076 decl_index = lto_input_uleb128 (ib);
1077 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1078 type = lto_input_uleb128 (ib);
1079 if (!type)
1080 {
1081 tree real_alias;
1082 decl_index = lto_input_uleb128 (ib);
1083 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1084 cgraph_same_body_alias (alias_decl, real_alias);
1085 }
1086 else
1087 {
1088 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1089 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1090 tree real_alias;
1091 decl_index = lto_input_uleb128 (ib);
1092 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1093 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1094 virtual_value,
1095 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1096 real_alias);
1097 }
1098 }
1099 return node;
1100 }
1101
1102 /* Read a node from input_block IB. TAG is the node's tag just read.
1103 Return the node read or overwriten. */
1104
1105 static struct varpool_node *
1106 input_varpool_node (struct lto_file_decl_data *file_data,
1107 struct lto_input_block *ib)
1108 {
1109 int decl_index;
1110 tree var_decl;
1111 struct varpool_node *node;
1112 struct bitpack_d bp;
1113 bool aliases_p;
1114 int count;
1115 int ref = LCC_NOT_FOUND;
1116
1117 decl_index = lto_input_uleb128 (ib);
1118 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1119 node = varpool_node (var_decl);
1120 node->lto_file_data = file_data;
1121
1122 bp = lto_input_bitpack (ib);
1123 node->externally_visible = bp_unpack_value (&bp, 1);
1124 node->force_output = bp_unpack_value (&bp, 1);
1125 node->finalized = bp_unpack_value (&bp, 1);
1126 node->alias = bp_unpack_value (&bp, 1);
1127 node->const_value_known = bp_unpack_value (&bp, 1);
1128 node->analyzed = node->finalized;
1129 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1130 node->in_other_partition = bp_unpack_value (&bp, 1);
1131 if (node->in_other_partition)
1132 {
1133 DECL_EXTERNAL (node->decl) = 1;
1134 TREE_STATIC (node->decl) = 0;
1135 }
1136 aliases_p = bp_unpack_value (&bp, 1);
1137 if (node->finalized)
1138 varpool_mark_needed_node (node);
1139 ref = lto_input_sleb128 (ib);
1140 /* Store a reference for now, and fix up later to be a pointer. */
1141 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1142 if (aliases_p)
1143 {
1144 count = lto_input_uleb128 (ib);
1145 for (; count > 0; count --)
1146 {
1147 tree decl = lto_file_decl_data_get_var_decl (file_data,
1148 lto_input_uleb128 (ib));
1149 varpool_extra_name_alias (decl, var_decl);
1150 }
1151 }
1152 return node;
1153 }
1154
1155 /* Read a node from input_block IB. TAG is the node's tag just read.
1156 Return the node read or overwriten. */
1157
1158 static void
1159 input_ref (struct lto_input_block *ib,
1160 struct cgraph_node *refering_node,
1161 struct varpool_node *refering_varpool_node,
1162 VEC(cgraph_node_ptr, heap) *nodes,
1163 VEC(varpool_node_ptr, heap) *varpool_nodes)
1164 {
1165 struct cgraph_node *node = NULL;
1166 struct varpool_node *varpool_node = NULL;
1167 struct bitpack_d bp;
1168 enum ipa_ref_type type;
1169 enum ipa_ref_use use;
1170
1171 bp = lto_input_bitpack (ib);
1172 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1173 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1174 if (type == IPA_REF_CGRAPH)
1175 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1176 else
1177 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1178 ipa_record_reference (refering_node, refering_varpool_node,
1179 node, varpool_node, use, NULL);
1180 }
1181
1182 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1183 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1184 edge being read is indirect (in the sense that it has
1185 indirect_unknown_callee set). */
1186
1187 static void
1188 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1189 bool indirect)
1190 {
1191 struct cgraph_node *caller, *callee;
1192 struct cgraph_edge *edge;
1193 unsigned int stmt_id;
1194 gcov_type count;
1195 int freq;
1196 unsigned int nest;
1197 cgraph_inline_failed_t inline_failed;
1198 struct bitpack_d bp;
1199 int ecf_flags = 0;
1200
1201 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1202 if (caller == NULL || caller->decl == NULL_TREE)
1203 internal_error ("bytecode stream: no caller found while reading edge");
1204
1205 if (!indirect)
1206 {
1207 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1208 if (callee == NULL || callee->decl == NULL_TREE)
1209 internal_error ("bytecode stream: no callee found while reading edge");
1210 }
1211 else
1212 callee = NULL;
1213
1214 count = (gcov_type) lto_input_sleb128 (ib);
1215
1216 bp = lto_input_bitpack (ib);
1217 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1218 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1219 HOST_BITS_PER_INT);
1220 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1221 nest = (unsigned) bp_unpack_value (&bp, 30);
1222
1223 if (indirect)
1224 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1225 else
1226 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1227
1228 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1229 edge->lto_stmt_uid = stmt_id;
1230 edge->inline_failed = inline_failed;
1231 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1232 edge->can_throw_external = bp_unpack_value (&bp, 1);
1233 if (indirect)
1234 {
1235 if (bp_unpack_value (&bp, 1))
1236 ecf_flags |= ECF_CONST;
1237 if (bp_unpack_value (&bp, 1))
1238 ecf_flags |= ECF_PURE;
1239 if (bp_unpack_value (&bp, 1))
1240 ecf_flags |= ECF_NORETURN;
1241 if (bp_unpack_value (&bp, 1))
1242 ecf_flags |= ECF_MALLOC;
1243 if (bp_unpack_value (&bp, 1))
1244 ecf_flags |= ECF_NOTHROW;
1245 if (bp_unpack_value (&bp, 1))
1246 ecf_flags |= ECF_RETURNS_TWICE;
1247 edge->indirect_info->ecf_flags = ecf_flags;
1248 }
1249 }
1250
1251
1252 /* Read a cgraph from IB using the info in FILE_DATA. */
1253
1254 static VEC(cgraph_node_ptr, heap) *
1255 input_cgraph_1 (struct lto_file_decl_data *file_data,
1256 struct lto_input_block *ib)
1257 {
1258 enum LTO_cgraph_tags tag;
1259 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1260 struct cgraph_node *node;
1261 unsigned i;
1262 unsigned HOST_WIDE_INT len;
1263
1264 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1265 while (tag)
1266 {
1267 if (tag == LTO_cgraph_edge)
1268 input_edge (ib, nodes, false);
1269 else if (tag == LTO_cgraph_indirect_edge)
1270 input_edge (ib, nodes, true);
1271 else
1272 {
1273 node = input_node (file_data, ib, tag,nodes);
1274 if (node == NULL || node->decl == NULL_TREE)
1275 internal_error ("bytecode stream: found empty cgraph node");
1276 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1277 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1278 }
1279
1280 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1281 }
1282
1283 /* Input toplevel asms. */
1284 len = lto_input_uleb128 (ib);
1285 while (len)
1286 {
1287 char *str = (char *)xmalloc (len + 1);
1288 for (i = 0; i < len; ++i)
1289 str[i] = lto_input_1_unsigned (ib);
1290 cgraph_add_asm_node (build_string (len, str));
1291 free (str);
1292
1293 len = lto_input_uleb128 (ib);
1294 }
1295
1296 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1297 {
1298 int ref = (int) (intptr_t) node->global.inlined_to;
1299
1300 /* Fixup inlined_to from reference to pointer. */
1301 if (ref != LCC_NOT_FOUND)
1302 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1303 else
1304 node->global.inlined_to = NULL;
1305
1306 ref = (int) (intptr_t) node->same_comdat_group;
1307
1308 /* Fixup same_comdat_group from reference to pointer. */
1309 if (ref != LCC_NOT_FOUND)
1310 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1311 else
1312 node->same_comdat_group = NULL;
1313 }
1314 return nodes;
1315 }
1316
1317 /* Read a varpool from IB using the info in FILE_DATA. */
1318
1319 static VEC(varpool_node_ptr, heap) *
1320 input_varpool_1 (struct lto_file_decl_data *file_data,
1321 struct lto_input_block *ib)
1322 {
1323 unsigned HOST_WIDE_INT len;
1324 VEC(varpool_node_ptr, heap) *varpool = NULL;
1325 int i;
1326 struct varpool_node *node;
1327
1328 len = lto_input_uleb128 (ib);
1329 while (len)
1330 {
1331 VEC_safe_push (varpool_node_ptr, heap, varpool,
1332 input_varpool_node (file_data, ib));
1333 len--;
1334 }
1335 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1336 {
1337 int ref = (int) (intptr_t) node->same_comdat_group;
1338
1339 /* Fixup same_comdat_group from reference to pointer. */
1340 if (ref != LCC_NOT_FOUND)
1341 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1342 else
1343 node->same_comdat_group = NULL;
1344 }
1345 return varpool;
1346 }
1347
1348 /* Input ipa_refs. */
1349
1350 static void
1351 input_refs (struct lto_input_block *ib,
1352 VEC(cgraph_node_ptr, heap) *nodes,
1353 VEC(varpool_node_ptr, heap) *varpool)
1354 {
1355 int count;
1356 int idx;
1357 while (true)
1358 {
1359 struct cgraph_node *node;
1360 count = lto_input_uleb128 (ib);
1361 if (!count)
1362 break;
1363 idx = lto_input_uleb128 (ib);
1364 node = VEC_index (cgraph_node_ptr, nodes, idx);
1365 while (count)
1366 {
1367 input_ref (ib, node, NULL, nodes, varpool);
1368 count--;
1369 }
1370 }
1371 while (true)
1372 {
1373 struct varpool_node *node;
1374 count = lto_input_uleb128 (ib);
1375 if (!count)
1376 break;
1377 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1378 while (count)
1379 {
1380 input_ref (ib, NULL, node, nodes, varpool);
1381 count--;
1382 }
1383 }
1384 }
1385
1386
1387 static struct gcov_ctr_summary lto_gcov_summary;
1388
1389 /* Input profile_info from IB. */
1390 static void
1391 input_profile_summary (struct lto_input_block *ib)
1392 {
1393 unsigned int runs = lto_input_uleb128 (ib);
1394 if (runs)
1395 {
1396 if (!profile_info)
1397 {
1398 profile_info = &lto_gcov_summary;
1399 lto_gcov_summary.runs = runs;
1400 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1401 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1402 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1403 }
1404 /* We can support this by scaling all counts to nearest common multiple
1405 of all different runs, but it is perhaps not worth the effort. */
1406 else if (profile_info->runs != runs
1407 || profile_info->sum_all != lto_input_sleb128 (ib)
1408 || profile_info->run_max != lto_input_sleb128 (ib)
1409 || profile_info->sum_max != lto_input_sleb128 (ib))
1410 sorry ("Combining units with different profiles is not supported.");
1411 /* We allow some units to have profile and other to not have one. This will
1412 just make unprofiled units to be size optimized that is sane. */
1413 }
1414
1415 }
1416
1417 /* Input and merge the cgraph from each of the .o files passed to
1418 lto1. */
1419
1420 void
1421 input_cgraph (void)
1422 {
1423 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1424 struct lto_file_decl_data *file_data;
1425 unsigned int j = 0;
1426 struct cgraph_node *node;
1427
1428 while ((file_data = file_data_vec[j++]))
1429 {
1430 const char *data;
1431 size_t len;
1432 struct lto_input_block *ib;
1433 VEC(cgraph_node_ptr, heap) *nodes;
1434 VEC(varpool_node_ptr, heap) *varpool;
1435
1436 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1437 &data, &len);
1438 input_profile_summary (ib);
1439 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1440 nodes = input_cgraph_1 (file_data, ib);
1441 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1442 ib, data, len);
1443
1444 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1445 &data, &len);
1446 varpool = input_varpool_1 (file_data, ib);
1447 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1448 ib, data, len);
1449
1450 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1451 &data, &len);
1452 input_refs (ib, nodes, varpool);
1453 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1454 ib, data, len);
1455 if (flag_ltrans)
1456 input_cgraph_opt_summary (nodes);
1457 VEC_free (cgraph_node_ptr, heap, nodes);
1458 VEC_free (varpool_node_ptr, heap, varpool);
1459 }
1460
1461 /* Clear out the aux field that was used to store enough state to
1462 tell which nodes should be overwritten. */
1463 for (node = cgraph_nodes; node; node = node->next)
1464 {
1465 /* Some nodes may have been created by cgraph_node. This
1466 happens when the callgraph contains nested functions. If the
1467 node for the parent function was never emitted to the gimple
1468 file, cgraph_node will create a node for it when setting the
1469 context of the nested function. */
1470 if (node->local.lto_file_data)
1471 node->aux = NULL;
1472 }
1473 }
1474
1475 /* True when we need optimization summary for NODE. */
1476
1477 static int
1478 output_cgraph_opt_summary_p (struct cgraph_node *node)
1479 {
1480 if (!node->clone_of)
1481 return false;
1482 return (node->clone.tree_map
1483 || node->clone.args_to_skip
1484 || node->clone.combined_args_to_skip);
1485 }
1486
1487 /* Output optimization summary for NODE to OB. */
1488
1489 static void
1490 output_node_opt_summary (struct output_block *ob,
1491 struct cgraph_node *node)
1492 {
1493 unsigned int index;
1494 bitmap_iterator bi;
1495 struct ipa_replace_map *map;
1496 struct bitpack_d bp;
1497 int i;
1498
1499 lto_output_uleb128_stream (ob->main_stream,
1500 bitmap_count_bits (node->clone.args_to_skip));
1501 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1502 lto_output_uleb128_stream (ob->main_stream, index);
1503 lto_output_uleb128_stream (ob->main_stream,
1504 bitmap_count_bits (node->clone.combined_args_to_skip));
1505 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1506 lto_output_uleb128_stream (ob->main_stream, index);
1507 lto_output_uleb128_stream (ob->main_stream,
1508 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1509 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1510 {
1511 int parm_num;
1512 tree parm;
1513
1514 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1515 parm = DECL_CHAIN (parm), parm_num++)
1516 if (map->old_tree == parm)
1517 break;
1518 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1519 mechanism to store function local declarations into summaries. */
1520 gcc_assert (parm);
1521 lto_output_uleb128_stream (ob->main_stream, parm_num);
1522 lto_output_tree (ob, map->new_tree, true);
1523 bp = bitpack_create (ob->main_stream);
1524 bp_pack_value (&bp, map->replace_p, 1);
1525 bp_pack_value (&bp, map->ref_p, 1);
1526 lto_output_bitpack (&bp);
1527 }
1528 }
1529
1530 /* Output optimization summaries stored in callgraph.
1531 At the moment it is the clone info structure. */
1532
1533 static void
1534 output_cgraph_opt_summary (void)
1535 {
1536 struct cgraph_node *node;
1537 int i, n_nodes;
1538 lto_cgraph_encoder_t encoder;
1539 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1540 unsigned count = 0;
1541
1542 ob->cgraph_node = NULL;
1543 encoder = ob->decl_state->cgraph_node_encoder;
1544 n_nodes = lto_cgraph_encoder_size (encoder);
1545 for (i = 0; i < n_nodes; i++)
1546 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i)))
1547 count++;
1548 lto_output_uleb128_stream (ob->main_stream, count);
1549 for (i = 0; i < n_nodes; i++)
1550 {
1551 node = lto_cgraph_encoder_deref (encoder, i);
1552 if (output_cgraph_opt_summary_p (node))
1553 {
1554 lto_output_uleb128_stream (ob->main_stream, i);
1555 output_node_opt_summary (ob, node);
1556 }
1557 }
1558 produce_asm (ob, NULL);
1559 destroy_output_block (ob);
1560 }
1561
1562 /* Input optimiation summary of NODE. */
1563
1564 static void
1565 input_node_opt_summary (struct cgraph_node *node,
1566 struct lto_input_block *ib_main,
1567 struct data_in *data_in)
1568 {
1569 int i;
1570 int count;
1571 int bit;
1572 struct bitpack_d bp;
1573
1574 count = lto_input_uleb128 (ib_main);
1575 if (count)
1576 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1577 for (i = 0; i < count; i++)
1578 {
1579 bit = lto_input_uleb128 (ib_main);
1580 bitmap_set_bit (node->clone.args_to_skip, bit);
1581 }
1582 count = lto_input_uleb128 (ib_main);
1583 if (count)
1584 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1585 for (i = 0; i < count; i++)
1586 {
1587 bit = lto_input_uleb128 (ib_main);
1588 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1589 }
1590 count = lto_input_uleb128 (ib_main);
1591 for (i = 0; i < count; i++)
1592 {
1593 int parm_num;
1594 tree parm;
1595 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1596
1597 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1598 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1599 parm = DECL_CHAIN (parm))
1600 parm_num --;
1601 map->parm_num = lto_input_uleb128 (ib_main);
1602 map->old_tree = NULL;
1603 map->new_tree = lto_input_tree (ib_main, data_in);
1604 bp = lto_input_bitpack (ib_main);
1605 map->replace_p = bp_unpack_value (&bp, 1);
1606 map->ref_p = bp_unpack_value (&bp, 1);
1607 }
1608 }
1609
1610 /* Read section in file FILE_DATA of length LEN with data DATA. */
1611
1612 static void
1613 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1614 const char *data, size_t len, VEC (cgraph_node_ptr,
1615 heap) * nodes)
1616 {
1617 const struct lto_function_header *header =
1618 (const struct lto_function_header *) data;
1619 const int32_t cfg_offset = sizeof (struct lto_function_header);
1620 const int32_t main_offset = cfg_offset + header->cfg_size;
1621 const int32_t string_offset = main_offset + header->main_size;
1622 struct data_in *data_in;
1623 struct lto_input_block ib_main;
1624 unsigned int i;
1625 unsigned int count;
1626
1627 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1628 header->main_size);
1629
1630 data_in =
1631 lto_data_in_create (file_data, (const char *) data + string_offset,
1632 header->string_size, NULL);
1633 count = lto_input_uleb128 (&ib_main);
1634
1635 for (i = 0; i < count; i++)
1636 {
1637 int ref = lto_input_uleb128 (&ib_main);
1638 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1639 &ib_main, data_in);
1640 }
1641 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
1642 len);
1643 lto_data_in_delete (data_in);
1644 }
1645
1646 /* Input optimization summary of cgraph. */
1647
1648 static void
1649 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1650 {
1651 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1652 struct lto_file_decl_data *file_data;
1653 unsigned int j = 0;
1654
1655 while ((file_data = file_data_vec[j++]))
1656 {
1657 size_t len;
1658 const char *data =
1659 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1660 &len);
1661
1662 if (data)
1663 input_cgraph_opt_section (file_data, data, len, nodes);
1664 }
1665 }