cgrpah.h (struct cgraph_node): Remove finalized_by_frontend.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "gcov-io.h"
47
48 static void output_varpool (cgraph_node_set, varpool_node_set);
49 static void output_cgraph_opt_summary (cgraph_node_set set);
50 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
51
52
53 /* Cgraph streaming is organized as set of record whose type
54 is indicated by a tag. */
55 enum LTO_cgraph_tags
56 {
57 /* Must leave 0 for the stopper. */
58
59 /* Cgraph node without body available. */
60 LTO_cgraph_unavail_node = 1,
61 /* Cgraph node with function body. */
62 LTO_cgraph_analyzed_node,
63 /* Cgraph edges. */
64 LTO_cgraph_edge,
65 LTO_cgraph_indirect_edge
66 };
67
68 /* Create a new cgraph encoder. */
69
70 lto_cgraph_encoder_t
71 lto_cgraph_encoder_new (void)
72 {
73 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
74 encoder->map = pointer_map_create ();
75 encoder->nodes = NULL;
76 encoder->body = pointer_set_create ();
77 return encoder;
78 }
79
80
81 /* Delete ENCODER and its components. */
82
83 void
84 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
85 {
86 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
87 pointer_map_destroy (encoder->map);
88 pointer_set_destroy (encoder->body);
89 free (encoder);
90 }
91
92
93 /* Return the existing reference number of NODE in the cgraph encoder in
94 output block OB. Assign a new reference if this is the first time
95 NODE is encoded. */
96
97 int
98 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
99 struct cgraph_node *node)
100 {
101 int ref;
102 void **slot;
103
104 slot = pointer_map_contains (encoder->map, node);
105 if (!slot)
106 {
107 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
108 slot = pointer_map_insert (encoder->map, node);
109 *slot = (void *) (intptr_t) ref;
110 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 }
112 else
113 ref = (int) (intptr_t) *slot;
114
115 return ref;
116 }
117
118 #define LCC_NOT_FOUND (-1)
119
120 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
121 or LCC_NOT_FOUND if it is not there. */
122
123 int
124 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
125 struct cgraph_node *node)
126 {
127 void **slot = pointer_map_contains (encoder->map, node);
128 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
129 }
130
131
132 /* Return the cgraph node corresponding to REF using ENCODER. */
133
134 struct cgraph_node *
135 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
136 {
137 if (ref == LCC_NOT_FOUND)
138 return NULL;
139
140 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
141 }
142
143
144 /* Return TRUE if we should encode initializer of NODE (if any). */
145
146 bool
147 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
148 struct cgraph_node *node)
149 {
150 return pointer_set_contains (encoder->body, node);
151 }
152
153 /* Return TRUE if we should encode body of NODE (if any). */
154
155 static void
156 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
157 struct cgraph_node *node)
158 {
159 pointer_set_insert (encoder->body, node);
160 }
161
162 /* Create a new varpool encoder. */
163
164 lto_varpool_encoder_t
165 lto_varpool_encoder_new (void)
166 {
167 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
168 encoder->map = pointer_map_create ();
169 encoder->initializer = pointer_set_create ();
170 encoder->nodes = NULL;
171 return encoder;
172 }
173
174
175 /* Delete ENCODER and its components. */
176
177 void
178 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
179 {
180 VEC_free (varpool_node_ptr, heap, encoder->nodes);
181 pointer_map_destroy (encoder->map);
182 pointer_set_destroy (encoder->initializer);
183 free (encoder);
184 }
185
186
187 /* Return the existing reference number of NODE in the varpool encoder in
188 output block OB. Assign a new reference if this is the first time
189 NODE is encoded. */
190
191 int
192 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
193 struct varpool_node *node)
194 {
195 int ref;
196 void **slot;
197
198 slot = pointer_map_contains (encoder->map, node);
199 if (!slot)
200 {
201 ref = VEC_length (varpool_node_ptr, encoder->nodes);
202 slot = pointer_map_insert (encoder->map, node);
203 *slot = (void *) (intptr_t) ref;
204 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 }
206 else
207 ref = (int) (intptr_t) *slot;
208
209 return ref;
210 }
211
212 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
213 or LCC_NOT_FOUND if it is not there. */
214
215 int
216 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
217 struct varpool_node *node)
218 {
219 void **slot = pointer_map_contains (encoder->map, node);
220 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
221 }
222
223
224 /* Return the varpool node corresponding to REF using ENCODER. */
225
226 struct varpool_node *
227 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
228 {
229 if (ref == LCC_NOT_FOUND)
230 return NULL;
231
232 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
233 }
234
235
236 /* Return TRUE if we should encode initializer of NODE (if any). */
237
238 bool
239 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
240 struct varpool_node *node)
241 {
242 return pointer_set_contains (encoder->initializer, node);
243 }
244
245 /* Return TRUE if we should encode initializer of NODE (if any). */
246
247 static void
248 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
249 struct varpool_node *node)
250 {
251 pointer_set_insert (encoder->initializer, node);
252 }
253
254 /* Output the cgraph EDGE to OB using ENCODER. */
255
256 static void
257 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
258 lto_cgraph_encoder_t encoder)
259 {
260 unsigned int uid;
261 intptr_t ref;
262 struct bitpack_d bp;
263
264 if (edge->indirect_unknown_callee)
265 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
266 else
267 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
268
269 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
272
273 if (!edge->indirect_unknown_callee)
274 {
275 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278 }
279
280 lto_output_sleb128_stream (ob->main_stream, edge->count);
281
282 bp = bitpack_create (ob->main_stream);
283 uid = (!gimple_has_body_p (edge->caller->decl)
284 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
285 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
286 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->call_stmt_size, HOST_BITS_PER_INT);
289 bp_pack_value (&bp, edge->call_stmt_time, HOST_BITS_PER_INT);
290 bp_pack_value (&bp, edge->loop_nest, 30);
291 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
292 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
293 bp_pack_value (&bp, edge->can_throw_external, 1);
294 if (edge->indirect_unknown_callee)
295 {
296 int flags = edge->indirect_info->ecf_flags;
297 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
302 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
303 /* Flags that should not appear on indirect calls. */
304 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
305 | ECF_MAY_BE_ALLOCA
306 | ECF_SIBCALL
307 | ECF_LEAF
308 | ECF_NOVOPS)));
309 }
310 lto_output_bitpack (&bp);
311 }
312
313 /* Return if LIST contain references from other partitions. */
314
315 bool
316 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
317 varpool_node_set vset)
318 {
319 int i;
320 struct ipa_ref *ref;
321 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
322 {
323 if (ref->refering_type == IPA_REF_CGRAPH)
324 {
325 if (ipa_ref_refering_node (ref)->in_other_partition
326 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
327 return true;
328 }
329 else
330 {
331 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
332 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
333 vset))
334 return true;
335 }
336 }
337 return false;
338 }
339
340 /* Return true when node is reachable from other partition. */
341
342 bool
343 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
344 {
345 struct cgraph_edge *e;
346 if (!node->analyzed)
347 return false;
348 if (node->global.inlined_to)
349 return false;
350 for (e = node->callers; e; e = e->next_caller)
351 if (e->caller->in_other_partition
352 || !cgraph_node_in_set_p (e->caller, set))
353 return true;
354 return false;
355 }
356
357 /* Return if LIST contain references from other partitions. */
358
359 bool
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
362 {
363 int i;
364 struct ipa_ref *ref;
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
366 {
367 if (ref->refering_type == IPA_REF_CGRAPH)
368 {
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
370 return true;
371 }
372 else
373 {
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
375 vset))
376 return true;
377 }
378 }
379 return false;
380 }
381
382 /* Return true when node is reachable from other partition. */
383
384 bool
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
386 {
387 struct cgraph_edge *e;
388 for (e = node->callers; e; e = e->next_caller)
389 if (cgraph_node_in_set_p (e->caller, set))
390 return true;
391 return false;
392 }
393
394 /* Output the cgraph NODE to OB. ENCODER is used to find the
395 reference number of NODE->inlined_to. SET is the set of nodes we
396 are writing to the current file. If NODE is not in SET, then NODE
397 is a boundary of a cgraph_node_set and we pretend NODE just has a
398 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
399 that have had their callgraph node written so far. This is used to
400 determine if NODE is a clone of a previously written node. */
401
402 static void
403 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
404 lto_cgraph_encoder_t encoder, cgraph_node_set set,
405 varpool_node_set vset)
406 {
407 unsigned int tag;
408 struct bitpack_d bp;
409 bool boundary_p;
410 intptr_t ref;
411 bool in_other_partition = false;
412 struct cgraph_node *clone_of;
413
414 boundary_p = !cgraph_node_in_set_p (node, set);
415
416 if (node->analyzed && !boundary_p)
417 tag = LTO_cgraph_analyzed_node;
418 else
419 tag = LTO_cgraph_unavail_node;
420
421 lto_output_uleb128_stream (ob->main_stream, tag);
422
423 /* In WPA mode, we only output part of the call-graph. Also, we
424 fake cgraph node attributes. There are two cases that we care.
425
426 Boundary nodes: There are nodes that are not part of SET but are
427 called from within SET. We artificially make them look like
428 externally visible nodes with no function body.
429
430 Cherry-picked nodes: These are nodes we pulled from other
431 translation units into SET during IPA-inlining. We make them as
432 local static nodes to prevent clashes with other local statics. */
433 if (boundary_p && node->analyzed)
434 {
435 /* Inline clones can not be part of boundary.
436 gcc_assert (!node->global.inlined_to);
437
438 FIXME: At the moment they can be, when partition contains an inline
439 clone that is clone of inline clone from outside partition. We can
440 reshape the clone tree and make other tree to be the root, but it
441 needs a bit extra work and will be promplty done by cgraph_remove_node
442 after reading back. */
443 in_other_partition = 1;
444 }
445
446 clone_of = node->clone_of;
447 while (clone_of
448 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
449 if (clone_of->prev_sibling_clone)
450 clone_of = clone_of->prev_sibling_clone;
451 else
452 clone_of = clone_of->clone_of;
453
454 if (LTO_cgraph_analyzed_node)
455 gcc_assert (clone_of || !node->clone_of);
456 if (!clone_of)
457 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
458 else
459 lto_output_sleb128_stream (ob->main_stream, ref);
460
461
462 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
463 lto_output_sleb128_stream (ob->main_stream, node->count);
464 lto_output_sleb128_stream (ob->main_stream, node->count_materialization_scale);
465
466 if (tag == LTO_cgraph_analyzed_node)
467 {
468 if (node->global.inlined_to)
469 {
470 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
471 gcc_assert (ref != LCC_NOT_FOUND);
472 }
473 else
474 ref = LCC_NOT_FOUND;
475
476 lto_output_sleb128_stream (ob->main_stream, ref);
477 }
478
479 if (node->same_comdat_group && !boundary_p)
480 {
481 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
482 gcc_assert (ref != LCC_NOT_FOUND);
483 }
484 else
485 ref = LCC_NOT_FOUND;
486 lto_output_sleb128_stream (ob->main_stream, ref);
487
488 bp = bitpack_create (ob->main_stream);
489 bp_pack_value (&bp, node->local.local, 1);
490 bp_pack_value (&bp, node->local.externally_visible, 1);
491 bp_pack_value (&bp, node->local.finalized, 1);
492 bp_pack_value (&bp, node->local.can_change_signature, 1);
493 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
494 bp_pack_value (&bp, node->needed, 1);
495 bp_pack_value (&bp, node->address_taken, 1);
496 bp_pack_value (&bp, node->abstract_and_needed, 1);
497 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
498 && !DECL_EXTERNAL (node->decl)
499 && !DECL_COMDAT (node->decl)
500 && (reachable_from_other_partition_p (node, set)
501 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
502 bp_pack_value (&bp, node->lowered, 1);
503 bp_pack_value (&bp, in_other_partition, 1);
504 bp_pack_value (&bp, node->alias, 1);
505 bp_pack_value (&bp, node->frequency, 2);
506 bp_pack_value (&bp, node->only_called_at_startup, 1);
507 bp_pack_value (&bp, node->only_called_at_exit, 1);
508 lto_output_bitpack (&bp);
509 lto_output_uleb128_stream (ob->main_stream, node->resolution);
510
511 if (node->same_body)
512 {
513 struct cgraph_node *alias;
514 unsigned long alias_count = 1;
515 for (alias = node->same_body; alias->next; alias = alias->next)
516 alias_count++;
517 lto_output_uleb128_stream (ob->main_stream, alias_count);
518 do
519 {
520 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
521 alias->decl);
522 if (alias->thunk.thunk_p)
523 {
524 lto_output_uleb128_stream
525 (ob->main_stream,
526 1 + (alias->thunk.this_adjusting != 0) * 2
527 + (alias->thunk.virtual_offset_p != 0) * 4);
528 lto_output_uleb128_stream (ob->main_stream,
529 alias->thunk.fixed_offset);
530 lto_output_uleb128_stream (ob->main_stream,
531 alias->thunk.virtual_value);
532 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
533 alias->thunk.alias);
534 }
535 else
536 {
537 lto_output_uleb128_stream (ob->main_stream, 0);
538 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
539 alias->thunk.alias);
540 }
541 gcc_assert (cgraph_get_node (alias->thunk.alias) == node);
542 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
543 alias = alias->previous;
544 }
545 while (alias);
546 }
547 else
548 lto_output_uleb128_stream (ob->main_stream, 0);
549 }
550
551 /* Output the varpool NODE to OB.
552 If NODE is not in SET, then NODE is a boundary. */
553
554 static void
555 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
556 lto_varpool_encoder_t varpool_encoder,
557 cgraph_node_set set, varpool_node_set vset)
558 {
559 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
560 struct bitpack_d bp;
561 struct varpool_node *alias;
562 int count = 0;
563 int ref;
564
565 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
566 bp = bitpack_create (ob->main_stream);
567 bp_pack_value (&bp, node->externally_visible, 1);
568 bp_pack_value (&bp, node->force_output, 1);
569 bp_pack_value (&bp, node->finalized, 1);
570 bp_pack_value (&bp, node->alias, 1);
571 gcc_assert (!node->alias || !node->extra_name);
572 gcc_assert (node->finalized || !node->analyzed);
573 gcc_assert (node->needed);
574 /* Constant pool initializers can be de-unified into individual ltrans units.
575 FIXME: Alternatively at -Os we may want to avoid generating for them the local
576 labels and share them across LTRANS partitions. */
577 if (DECL_IN_CONSTANT_POOL (node->decl)
578 && !DECL_COMDAT (node->decl))
579 {
580 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
581 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
582 }
583 else
584 {
585 bp_pack_value (&bp, node->analyzed
586 && referenced_from_other_partition_p (&node->ref_list,
587 set, vset), 1);
588 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
589 }
590 /* Also emit any extra name aliases. */
591 for (alias = node->extra_name; alias; alias = alias->next)
592 count++;
593 bp_pack_value (&bp, count != 0, 1);
594 lto_output_bitpack (&bp);
595 if (node->same_comdat_group && !boundary_p)
596 {
597 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
598 gcc_assert (ref != LCC_NOT_FOUND);
599 }
600 else
601 ref = LCC_NOT_FOUND;
602 lto_output_sleb128_stream (ob->main_stream, ref);
603 lto_output_uleb128_stream (ob->main_stream, node->resolution);
604
605 if (count)
606 {
607 lto_output_uleb128_stream (ob->main_stream, count);
608 for (alias = node->extra_name; alias; alias = alias->next)
609 {
610 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
611 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
612 }
613 }
614 }
615
616 /* Output the varpool NODE to OB.
617 If NODE is not in SET, then NODE is a boundary. */
618
619 static void
620 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
621 lto_cgraph_encoder_t encoder,
622 lto_varpool_encoder_t varpool_encoder)
623 {
624 struct bitpack_d bp;
625 bp = bitpack_create (ob->main_stream);
626 bp_pack_value (&bp, ref->refered_type, 1);
627 bp_pack_value (&bp, ref->use, 2);
628 lto_output_bitpack (&bp);
629 if (ref->refered_type == IPA_REF_CGRAPH)
630 {
631 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
632 gcc_assert (nref != LCC_NOT_FOUND);
633 lto_output_sleb128_stream (ob->main_stream, nref);
634 }
635 else
636 {
637 int nref = lto_varpool_encoder_lookup (varpool_encoder,
638 ipa_ref_varpool_node (ref));
639 gcc_assert (nref != LCC_NOT_FOUND);
640 lto_output_sleb128_stream (ob->main_stream, nref);
641 }
642 }
643
644 /* Stream out profile_summary to OB. */
645
646 static void
647 output_profile_summary (struct lto_simple_output_block *ob)
648 {
649 if (profile_info)
650 {
651 /* We do not output num, sum_all and run_max, they are not used by
652 GCC profile feedback and they are difficult to merge from multiple
653 units. */
654 gcc_assert (profile_info->runs);
655 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
656 lto_output_uleb128_stream (ob->main_stream, profile_info->sum_max);
657 }
658 else
659 lto_output_uleb128_stream (ob->main_stream, 0);
660 }
661
662 /* Add NODE into encoder as well as nodes it is cloned from.
663 Do it in a way so clones appear first. */
664
665 static void
666 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
667 bool include_body)
668 {
669 if (node->clone_of)
670 add_node_to (encoder, node->clone_of, include_body);
671 else if (include_body)
672 lto_set_cgraph_encoder_encode_body (encoder, node);
673 lto_cgraph_encoder_encode (encoder, node);
674 }
675
676 /* Add all references in LIST to encoders. */
677
678 static void
679 add_references (lto_cgraph_encoder_t encoder,
680 lto_varpool_encoder_t varpool_encoder,
681 struct ipa_ref_list *list)
682 {
683 int i;
684 struct ipa_ref *ref;
685 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
686 if (ref->refered_type == IPA_REF_CGRAPH)
687 add_node_to (encoder, ipa_ref_node (ref), false);
688 else
689 {
690 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
691 lto_varpool_encoder_encode (varpool_encoder, vnode);
692 }
693 }
694
695 /* Output all callees or indirect outgoing edges. EDGE must be the first such
696 edge. */
697
698 static void
699 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
700 struct lto_simple_output_block *ob,
701 lto_cgraph_encoder_t encoder)
702 {
703 if (!edge)
704 return;
705
706 /* Output edges in backward direction, so the reconstructed callgraph match
707 and it is easy to associate call sites in the IPA pass summaries. */
708 while (edge->next_callee)
709 edge = edge->next_callee;
710 for (; edge; edge = edge->prev_callee)
711 lto_output_edge (ob, edge, encoder);
712 }
713
714 /* Output the part of the cgraph in SET. */
715
716 static void
717 output_refs (cgraph_node_set set, varpool_node_set vset,
718 lto_cgraph_encoder_t encoder,
719 lto_varpool_encoder_t varpool_encoder)
720 {
721 cgraph_node_set_iterator csi;
722 varpool_node_set_iterator vsi;
723 struct lto_simple_output_block *ob;
724 int count;
725 struct ipa_ref *ref;
726 int i;
727
728 ob = lto_create_simple_output_block (LTO_section_refs);
729
730 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
731 {
732 struct cgraph_node *node = csi_node (csi);
733
734 count = ipa_ref_list_nreferences (&node->ref_list);
735 if (count)
736 {
737 lto_output_uleb128_stream (ob->main_stream, count);
738 lto_output_uleb128_stream (ob->main_stream,
739 lto_cgraph_encoder_lookup (encoder, node));
740 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
741 lto_output_ref (ob, ref, encoder, varpool_encoder);
742 }
743 }
744
745 lto_output_uleb128_stream (ob->main_stream, 0);
746
747 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
748 {
749 struct varpool_node *node = vsi_node (vsi);
750
751 count = ipa_ref_list_nreferences (&node->ref_list);
752 if (count)
753 {
754 lto_output_uleb128_stream (ob->main_stream, count);
755 lto_output_uleb128_stream (ob->main_stream,
756 lto_varpool_encoder_lookup (varpool_encoder,
757 node));
758 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
759 lto_output_ref (ob, ref, encoder, varpool_encoder);
760 }
761 }
762
763 lto_output_uleb128_stream (ob->main_stream, 0);
764
765 lto_destroy_simple_output_block (ob);
766 }
767
768 /* Find out all cgraph and varpool nodes we want to encode in current unit
769 and insert them to encoders. */
770 void
771 compute_ltrans_boundary (struct lto_out_decl_state *state,
772 cgraph_node_set set, varpool_node_set vset)
773 {
774 struct cgraph_node *node;
775 cgraph_node_set_iterator csi;
776 varpool_node_set_iterator vsi;
777 struct cgraph_edge *edge;
778 int i;
779 lto_cgraph_encoder_t encoder;
780 lto_varpool_encoder_t varpool_encoder;
781
782 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
783 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
784
785 /* Go over all the nodes in SET and assign references. */
786 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
787 {
788 node = csi_node (csi);
789 add_node_to (encoder, node, true);
790 add_references (encoder, varpool_encoder, &node->ref_list);
791 }
792 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
793 {
794 struct varpool_node *vnode = vsi_node (vsi);
795 gcc_assert (!vnode->alias);
796 lto_varpool_encoder_encode (varpool_encoder, vnode);
797 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
798 add_references (encoder, varpool_encoder, &vnode->ref_list);
799 }
800 /* Pickle in also the initializer of all referenced readonly variables
801 to help folding. Constant pool variables are not shared, so we must
802 pickle those too. */
803 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
804 {
805 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
806 if (DECL_INITIAL (vnode->decl)
807 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
808 vnode)
809 && const_value_known_p (vnode->decl))
810 {
811 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
812 add_references (encoder, varpool_encoder, &vnode->ref_list);
813 }
814 }
815
816 /* Go over all the nodes again to include callees that are not in
817 SET. */
818 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
819 {
820 node = csi_node (csi);
821 for (edge = node->callees; edge; edge = edge->next_callee)
822 {
823 struct cgraph_node *callee = edge->callee;
824 if (!cgraph_node_in_set_p (callee, set))
825 {
826 /* We should have moved all the inlines. */
827 gcc_assert (!callee->global.inlined_to);
828 add_node_to (encoder, callee, false);
829 }
830 }
831 }
832 }
833
834 /* Output the part of the cgraph in SET. */
835
836 void
837 output_cgraph (cgraph_node_set set, varpool_node_set vset)
838 {
839 struct cgraph_node *node;
840 struct lto_simple_output_block *ob;
841 cgraph_node_set_iterator csi;
842 int i, n_nodes;
843 lto_cgraph_encoder_t encoder;
844 lto_varpool_encoder_t varpool_encoder;
845 struct cgraph_asm_node *can;
846 static bool asm_nodes_output = false;
847
848 if (flag_wpa)
849 output_cgraph_opt_summary (set);
850
851 ob = lto_create_simple_output_block (LTO_section_cgraph);
852
853 output_profile_summary (ob);
854
855 /* An encoder for cgraph nodes should have been created by
856 ipa_write_summaries_1. */
857 gcc_assert (ob->decl_state->cgraph_node_encoder);
858 gcc_assert (ob->decl_state->varpool_node_encoder);
859 encoder = ob->decl_state->cgraph_node_encoder;
860 varpool_encoder = ob->decl_state->varpool_node_encoder;
861
862 /* Write out the nodes. We must first output a node and then its clones,
863 otherwise at a time reading back the node there would be nothing to clone
864 from. */
865 n_nodes = lto_cgraph_encoder_size (encoder);
866 for (i = 0; i < n_nodes; i++)
867 {
868 node = lto_cgraph_encoder_deref (encoder, i);
869 lto_output_node (ob, node, encoder, set, vset);
870 }
871
872 /* Go over the nodes in SET again to write edges. */
873 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
874 {
875 node = csi_node (csi);
876 output_outgoing_cgraph_edges (node->callees, ob, encoder);
877 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
878 }
879
880 lto_output_uleb128_stream (ob->main_stream, 0);
881
882 /* Emit toplevel asms.
883 When doing WPA we must output every asm just once. Since we do not partition asm
884 nodes at all, output them to first output. This is kind of hack, but should work
885 well. */
886 if (!asm_nodes_output)
887 {
888 asm_nodes_output = true;
889 for (can = cgraph_asm_nodes; can; can = can->next)
890 {
891 int len = TREE_STRING_LENGTH (can->asm_str);
892 lto_output_uleb128_stream (ob->main_stream, len);
893 for (i = 0; i < len; ++i)
894 lto_output_1_stream (ob->main_stream,
895 TREE_STRING_POINTER (can->asm_str)[i]);
896 }
897 }
898
899 lto_output_uleb128_stream (ob->main_stream, 0);
900
901 lto_destroy_simple_output_block (ob);
902 output_varpool (set, vset);
903 output_refs (set, vset, encoder, varpool_encoder);
904 }
905
906 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
907 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
908 NODE or to replace the values in it, for instance because the first
909 time we saw it, the function body was not available but now it
910 is. BP is a bitpack with all the bitflags for NODE read from the
911 stream. */
912
913 static void
914 input_overwrite_node (struct lto_file_decl_data *file_data,
915 struct cgraph_node *node,
916 enum LTO_cgraph_tags tag,
917 struct bitpack_d *bp,
918 enum ld_plugin_symbol_resolution resolution)
919 {
920 node->aux = (void *) tag;
921 node->local.lto_file_data = file_data;
922
923 node->local.local = bp_unpack_value (bp, 1);
924 node->local.externally_visible = bp_unpack_value (bp, 1);
925 node->local.finalized = bp_unpack_value (bp, 1);
926 node->local.can_change_signature = bp_unpack_value (bp, 1);
927 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
928 node->needed = bp_unpack_value (bp, 1);
929 node->address_taken = bp_unpack_value (bp, 1);
930 node->abstract_and_needed = bp_unpack_value (bp, 1);
931 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
932 node->lowered = bp_unpack_value (bp, 1);
933 node->analyzed = tag == LTO_cgraph_analyzed_node;
934 node->in_other_partition = bp_unpack_value (bp, 1);
935 if (node->in_other_partition
936 /* Avoid updating decl when we are seeing just inline clone.
937 When inlining function that has functions already inlined into it,
938 we produce clones of inline clones.
939
940 WPA partitioning might put each clone into different unit and
941 we might end up streaming inline clone from other partition
942 to support clone we are interested in. */
943 && (!node->clone_of
944 || node->clone_of->decl != node->decl))
945 {
946 DECL_EXTERNAL (node->decl) = 1;
947 TREE_STATIC (node->decl) = 0;
948 }
949 node->alias = bp_unpack_value (bp, 1);
950 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
951 node->only_called_at_startup = bp_unpack_value (bp, 1);
952 node->only_called_at_exit = bp_unpack_value (bp, 1);
953 node->resolution = resolution;
954 }
955
956 /* Output the part of the cgraph in SET. */
957
958 static void
959 output_varpool (cgraph_node_set set, varpool_node_set vset)
960 {
961 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
962 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
963 int len = lto_varpool_encoder_size (varpool_encoder), i;
964
965 lto_output_uleb128_stream (ob->main_stream, len);
966
967 /* Write out the nodes. We must first output a node and then its clones,
968 otherwise at a time reading back the node there would be nothing to clone
969 from. */
970 for (i = 0; i < len; i++)
971 {
972 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
973 varpool_encoder,
974 set, vset);
975 }
976
977 lto_destroy_simple_output_block (ob);
978 }
979
980 /* Read a node from input_block IB. TAG is the node's tag just read.
981 Return the node read or overwriten. */
982
983 static struct cgraph_node *
984 input_node (struct lto_file_decl_data *file_data,
985 struct lto_input_block *ib,
986 enum LTO_cgraph_tags tag,
987 VEC(cgraph_node_ptr, heap) *nodes)
988 {
989 tree fn_decl;
990 struct cgraph_node *node;
991 struct bitpack_d bp;
992 unsigned decl_index;
993 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
994 unsigned long same_body_count = 0;
995 int clone_ref;
996 enum ld_plugin_symbol_resolution resolution;
997
998 clone_ref = lto_input_sleb128 (ib);
999
1000 decl_index = lto_input_uleb128 (ib);
1001 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1002
1003 if (clone_ref != LCC_NOT_FOUND)
1004 {
1005 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1006 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1007 }
1008 else
1009 node = cgraph_get_create_node (fn_decl);
1010
1011 node->count = lto_input_sleb128 (ib);
1012 node->count_materialization_scale = lto_input_sleb128 (ib);
1013
1014 if (tag == LTO_cgraph_analyzed_node)
1015 ref = lto_input_sleb128 (ib);
1016
1017 ref2 = lto_input_sleb128 (ib);
1018
1019 /* Make sure that we have not read this node before. Nodes that
1020 have already been read will have their tag stored in the 'aux'
1021 field. Since built-in functions can be referenced in multiple
1022 functions, they are expected to be read more than once. */
1023 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1024 internal_error ("bytecode stream: found multiple instances of cgraph "
1025 "node %d", node->uid);
1026
1027 bp = lto_input_bitpack (ib);
1028 resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1029 input_overwrite_node (file_data, node, tag, &bp, resolution);
1030
1031 /* Store a reference for now, and fix up later to be a pointer. */
1032 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1033
1034 /* Store a reference for now, and fix up later to be a pointer. */
1035 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1036
1037 same_body_count = lto_input_uleb128 (ib);
1038 while (same_body_count-- > 0)
1039 {
1040 tree alias_decl;
1041 int type;
1042 struct cgraph_node *alias;
1043 decl_index = lto_input_uleb128 (ib);
1044 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1045 type = lto_input_uleb128 (ib);
1046 if (!type)
1047 {
1048 tree real_alias;
1049 decl_index = lto_input_uleb128 (ib);
1050 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1051 alias = cgraph_same_body_alias (node, alias_decl, real_alias);
1052 }
1053 else
1054 {
1055 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1056 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1057 tree real_alias;
1058 decl_index = lto_input_uleb128 (ib);
1059 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1060 alias = cgraph_add_thunk (node, alias_decl, fn_decl, type & 2, fixed_offset,
1061 virtual_value,
1062 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1063 real_alias);
1064 }
1065 gcc_assert (alias);
1066 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1067 }
1068 return node;
1069 }
1070
1071 /* Read a node from input_block IB. TAG is the node's tag just read.
1072 Return the node read or overwriten. */
1073
1074 static struct varpool_node *
1075 input_varpool_node (struct lto_file_decl_data *file_data,
1076 struct lto_input_block *ib)
1077 {
1078 int decl_index;
1079 tree var_decl;
1080 struct varpool_node *node;
1081 struct bitpack_d bp;
1082 bool aliases_p;
1083 int count;
1084 int ref = LCC_NOT_FOUND;
1085
1086 decl_index = lto_input_uleb128 (ib);
1087 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1088 node = varpool_node (var_decl);
1089 node->lto_file_data = file_data;
1090
1091 bp = lto_input_bitpack (ib);
1092 node->externally_visible = bp_unpack_value (&bp, 1);
1093 node->force_output = bp_unpack_value (&bp, 1);
1094 node->finalized = bp_unpack_value (&bp, 1);
1095 node->alias = bp_unpack_value (&bp, 1);
1096 node->analyzed = node->finalized;
1097 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1098 node->in_other_partition = bp_unpack_value (&bp, 1);
1099 if (node->in_other_partition)
1100 {
1101 DECL_EXTERNAL (node->decl) = 1;
1102 TREE_STATIC (node->decl) = 0;
1103 }
1104 aliases_p = bp_unpack_value (&bp, 1);
1105 if (node->finalized)
1106 varpool_mark_needed_node (node);
1107 ref = lto_input_sleb128 (ib);
1108 /* Store a reference for now, and fix up later to be a pointer. */
1109 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1110 node->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1111 if (aliases_p)
1112 {
1113 count = lto_input_uleb128 (ib);
1114 for (; count > 0; count --)
1115 {
1116 tree decl = lto_file_decl_data_get_var_decl (file_data,
1117 lto_input_uleb128 (ib));
1118 struct varpool_node *alias;
1119 alias = varpool_extra_name_alias (decl, var_decl);
1120 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1121 }
1122 }
1123 return node;
1124 }
1125
1126 /* Read a node from input_block IB. TAG is the node's tag just read.
1127 Return the node read or overwriten. */
1128
1129 static void
1130 input_ref (struct lto_input_block *ib,
1131 struct cgraph_node *refering_node,
1132 struct varpool_node *refering_varpool_node,
1133 VEC(cgraph_node_ptr, heap) *nodes,
1134 VEC(varpool_node_ptr, heap) *varpool_nodes)
1135 {
1136 struct cgraph_node *node = NULL;
1137 struct varpool_node *varpool_node = NULL;
1138 struct bitpack_d bp;
1139 enum ipa_ref_type type;
1140 enum ipa_ref_use use;
1141
1142 bp = lto_input_bitpack (ib);
1143 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1144 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1145 if (type == IPA_REF_CGRAPH)
1146 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1147 else
1148 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1149 ipa_record_reference (refering_node, refering_varpool_node,
1150 node, varpool_node, use, NULL);
1151 }
1152
1153 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1154 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1155 edge being read is indirect (in the sense that it has
1156 indirect_unknown_callee set). */
1157
1158 static void
1159 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1160 bool indirect)
1161 {
1162 struct cgraph_node *caller, *callee;
1163 struct cgraph_edge *edge;
1164 unsigned int stmt_id;
1165 gcov_type count;
1166 int freq;
1167 unsigned int nest;
1168 cgraph_inline_failed_t inline_failed;
1169 struct bitpack_d bp;
1170 int ecf_flags = 0;
1171 int call_stmt_time, call_stmt_size;
1172
1173 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1174 if (caller == NULL || caller->decl == NULL_TREE)
1175 internal_error ("bytecode stream: no caller found while reading edge");
1176
1177 if (!indirect)
1178 {
1179 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1180 if (callee == NULL || callee->decl == NULL_TREE)
1181 internal_error ("bytecode stream: no callee found while reading edge");
1182 }
1183 else
1184 callee = NULL;
1185
1186 count = (gcov_type) lto_input_sleb128 (ib);
1187
1188 bp = lto_input_bitpack (ib);
1189 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1190 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1191 HOST_BITS_PER_INT);
1192 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1193 call_stmt_size = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1194 call_stmt_time = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1195 nest = (unsigned) bp_unpack_value (&bp, 30);
1196
1197 if (indirect)
1198 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1199 else
1200 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1201
1202 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1203 edge->lto_stmt_uid = stmt_id;
1204 edge->inline_failed = inline_failed;
1205 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1206 edge->can_throw_external = bp_unpack_value (&bp, 1);
1207 edge->call_stmt_size = call_stmt_size;
1208 edge->call_stmt_time = call_stmt_time;
1209 if (indirect)
1210 {
1211 if (bp_unpack_value (&bp, 1))
1212 ecf_flags |= ECF_CONST;
1213 if (bp_unpack_value (&bp, 1))
1214 ecf_flags |= ECF_PURE;
1215 if (bp_unpack_value (&bp, 1))
1216 ecf_flags |= ECF_NORETURN;
1217 if (bp_unpack_value (&bp, 1))
1218 ecf_flags |= ECF_MALLOC;
1219 if (bp_unpack_value (&bp, 1))
1220 ecf_flags |= ECF_NOTHROW;
1221 if (bp_unpack_value (&bp, 1))
1222 ecf_flags |= ECF_RETURNS_TWICE;
1223 edge->indirect_info->ecf_flags = ecf_flags;
1224 }
1225 }
1226
1227
1228 /* Read a cgraph from IB using the info in FILE_DATA. */
1229
1230 static VEC(cgraph_node_ptr, heap) *
1231 input_cgraph_1 (struct lto_file_decl_data *file_data,
1232 struct lto_input_block *ib)
1233 {
1234 enum LTO_cgraph_tags tag;
1235 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1236 struct cgraph_node *node;
1237 unsigned i;
1238 unsigned HOST_WIDE_INT len;
1239
1240 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1241 while (tag)
1242 {
1243 if (tag == LTO_cgraph_edge)
1244 input_edge (ib, nodes, false);
1245 else if (tag == LTO_cgraph_indirect_edge)
1246 input_edge (ib, nodes, true);
1247 else
1248 {
1249 node = input_node (file_data, ib, tag,nodes);
1250 if (node == NULL || node->decl == NULL_TREE)
1251 internal_error ("bytecode stream: found empty cgraph node");
1252 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1253 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1254 }
1255
1256 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1257 }
1258
1259 /* Input toplevel asms. */
1260 len = lto_input_uleb128 (ib);
1261 while (len)
1262 {
1263 char *str = (char *)xmalloc (len + 1);
1264 for (i = 0; i < len; ++i)
1265 str[i] = lto_input_1_unsigned (ib);
1266 cgraph_add_asm_node (build_string (len, str));
1267 free (str);
1268
1269 len = lto_input_uleb128 (ib);
1270 }
1271 /* AUX pointers should be all non-zero for nodes read from the stream. */
1272 #ifdef ENABLE_CHECKING
1273 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1274 gcc_assert (node->aux);
1275 #endif
1276 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1277 {
1278 int ref = (int) (intptr_t) node->global.inlined_to;
1279
1280 /* We share declaration of builtins, so we may read same node twice. */
1281 if (!node->aux)
1282 continue;
1283 node->aux = NULL;
1284
1285 /* Fixup inlined_to from reference to pointer. */
1286 if (ref != LCC_NOT_FOUND)
1287 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1288 else
1289 node->global.inlined_to = NULL;
1290
1291 ref = (int) (intptr_t) node->same_comdat_group;
1292
1293 /* Fixup same_comdat_group from reference to pointer. */
1294 if (ref != LCC_NOT_FOUND)
1295 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1296 else
1297 node->same_comdat_group = NULL;
1298 }
1299 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1300 node->aux = (void *)1;
1301 return nodes;
1302 }
1303
1304 /* Read a varpool from IB using the info in FILE_DATA. */
1305
1306 static VEC(varpool_node_ptr, heap) *
1307 input_varpool_1 (struct lto_file_decl_data *file_data,
1308 struct lto_input_block *ib)
1309 {
1310 unsigned HOST_WIDE_INT len;
1311 VEC(varpool_node_ptr, heap) *varpool = NULL;
1312 int i;
1313 struct varpool_node *node;
1314
1315 len = lto_input_uleb128 (ib);
1316 while (len)
1317 {
1318 VEC_safe_push (varpool_node_ptr, heap, varpool,
1319 input_varpool_node (file_data, ib));
1320 len--;
1321 }
1322 #ifdef ENABLE_CHECKING
1323 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1324 gcc_assert (!node->aux);
1325 #endif
1326 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1327 {
1328 int ref = (int) (intptr_t) node->same_comdat_group;
1329 /* We share declaration of builtins, so we may read same node twice. */
1330 if (node->aux)
1331 continue;
1332 node->aux = (void *)1;
1333
1334 /* Fixup same_comdat_group from reference to pointer. */
1335 if (ref != LCC_NOT_FOUND)
1336 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1337 else
1338 node->same_comdat_group = NULL;
1339 }
1340 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1341 node->aux = NULL;
1342 return varpool;
1343 }
1344
1345 /* Input ipa_refs. */
1346
1347 static void
1348 input_refs (struct lto_input_block *ib,
1349 VEC(cgraph_node_ptr, heap) *nodes,
1350 VEC(varpool_node_ptr, heap) *varpool)
1351 {
1352 int count;
1353 int idx;
1354 while (true)
1355 {
1356 struct cgraph_node *node;
1357 count = lto_input_uleb128 (ib);
1358 if (!count)
1359 break;
1360 idx = lto_input_uleb128 (ib);
1361 node = VEC_index (cgraph_node_ptr, nodes, idx);
1362 while (count)
1363 {
1364 input_ref (ib, node, NULL, nodes, varpool);
1365 count--;
1366 }
1367 }
1368 while (true)
1369 {
1370 struct varpool_node *node;
1371 count = lto_input_uleb128 (ib);
1372 if (!count)
1373 break;
1374 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1375 while (count)
1376 {
1377 input_ref (ib, NULL, node, nodes, varpool);
1378 count--;
1379 }
1380 }
1381 }
1382
1383
1384 static struct gcov_ctr_summary lto_gcov_summary;
1385
1386 /* Input profile_info from IB. */
1387 static void
1388 input_profile_summary (struct lto_input_block *ib,
1389 struct lto_file_decl_data *file_data)
1390 {
1391 unsigned int runs = lto_input_uleb128 (ib);
1392 if (runs)
1393 {
1394 file_data->profile_info.runs = runs;
1395 file_data->profile_info.sum_max = lto_input_uleb128 (ib);
1396 }
1397
1398 }
1399
1400 /* Rescale profile summaries to the same number of runs in the whole unit. */
1401
1402 static void
1403 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1404 {
1405 struct lto_file_decl_data *file_data;
1406 unsigned int j;
1407 gcov_unsigned_t max_runs = 0;
1408 struct cgraph_node *node;
1409 struct cgraph_edge *edge;
1410
1411 /* Find unit with maximal number of runs. If we ever get serious about
1412 roundoff errors, we might also consider computing smallest common
1413 multiply. */
1414 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1415 if (max_runs < file_data->profile_info.runs)
1416 max_runs = file_data->profile_info.runs;
1417
1418 if (!max_runs)
1419 return;
1420
1421 /* Simple overflow check. We probably don't need to support that many train
1422 runs. Such a large value probably imply data corruption anyway. */
1423 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1424 {
1425 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1426 INT_MAX / REG_BR_PROB_BASE);
1427 return;
1428 }
1429
1430 profile_info = &lto_gcov_summary;
1431 lto_gcov_summary.runs = max_runs;
1432 lto_gcov_summary.sum_max = 0;
1433
1434 /* Rescale all units to the maximal number of runs.
1435 sum_max can not be easily merged, as we have no idea what files come from
1436 the same run. We do not use the info anyway, so leave it 0. */
1437 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1438 if (file_data->profile_info.runs)
1439 {
1440 int scale = ((REG_BR_PROB_BASE * max_runs
1441 + file_data->profile_info.runs / 2)
1442 / file_data->profile_info.runs);
1443 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1444 (file_data->profile_info.sum_max
1445 * scale
1446 + REG_BR_PROB_BASE / 2)
1447 / REG_BR_PROB_BASE);
1448 }
1449
1450 /* Watch roundoff errors. */
1451 if (lto_gcov_summary.sum_max < max_runs)
1452 lto_gcov_summary.sum_max = max_runs;
1453
1454 /* If merging already happent at WPA time, we are done. */
1455 if (flag_ltrans)
1456 return;
1457
1458 /* Now compute count_materialization_scale of each node.
1459 During LTRANS we already have values of count_materialization_scale
1460 computed, so just update them. */
1461 for (node = cgraph_nodes; node; node = node->next)
1462 if (node->local.lto_file_data
1463 && node->local.lto_file_data->profile_info.runs)
1464 {
1465 int scale;
1466
1467 scale =
1468 ((node->count_materialization_scale * max_runs
1469 + node->local.lto_file_data->profile_info.runs / 2)
1470 / node->local.lto_file_data->profile_info.runs);
1471 node->count_materialization_scale = scale;
1472 if (scale < 0)
1473 fatal_error ("Profile information in %s corrupted",
1474 file_data->file_name);
1475
1476 if (scale == REG_BR_PROB_BASE)
1477 continue;
1478 for (edge = node->callees; edge; edge = edge->next_callee)
1479 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1480 / REG_BR_PROB_BASE);
1481 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1482 / REG_BR_PROB_BASE);
1483 }
1484 }
1485
1486 /* Input and merge the cgraph from each of the .o files passed to
1487 lto1. */
1488
1489 void
1490 input_cgraph (void)
1491 {
1492 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1493 struct lto_file_decl_data *file_data;
1494 unsigned int j = 0;
1495 struct cgraph_node *node;
1496
1497 while ((file_data = file_data_vec[j++]))
1498 {
1499 const char *data;
1500 size_t len;
1501 struct lto_input_block *ib;
1502 VEC(cgraph_node_ptr, heap) *nodes;
1503 VEC(varpool_node_ptr, heap) *varpool;
1504
1505 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1506 &data, &len);
1507 if (!ib)
1508 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1509 input_profile_summary (ib, file_data);
1510 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1511 nodes = input_cgraph_1 (file_data, ib);
1512 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1513 ib, data, len);
1514
1515 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1516 &data, &len);
1517 if (!ib)
1518 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1519 varpool = input_varpool_1 (file_data, ib);
1520 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1521 ib, data, len);
1522
1523 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1524 &data, &len);
1525 if (!ib)
1526 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1527 input_refs (ib, nodes, varpool);
1528 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1529 ib, data, len);
1530 if (flag_ltrans)
1531 input_cgraph_opt_summary (nodes);
1532 VEC_free (cgraph_node_ptr, heap, nodes);
1533 VEC_free (varpool_node_ptr, heap, varpool);
1534 }
1535
1536 merge_profile_summaries (file_data_vec);
1537
1538 /* Clear out the aux field that was used to store enough state to
1539 tell which nodes should be overwritten. */
1540 for (node = cgraph_nodes; node; node = node->next)
1541 {
1542 /* Some nodes may have been created by cgraph_node. This
1543 happens when the callgraph contains nested functions. If the
1544 node for the parent function was never emitted to the gimple
1545 file, cgraph_node will create a node for it when setting the
1546 context of the nested function. */
1547 if (node->local.lto_file_data)
1548 node->aux = NULL;
1549 }
1550 }
1551
1552 /* True when we need optimization summary for NODE. */
1553
1554 static int
1555 output_cgraph_opt_summary_p (struct cgraph_node *node, cgraph_node_set set)
1556 {
1557 struct cgraph_edge *e;
1558
1559 if (cgraph_node_in_set_p (node, set))
1560 {
1561 for (e = node->callees; e; e = e->next_callee)
1562 if (e->indirect_info
1563 && e->indirect_info->thunk_delta != 0)
1564 return true;
1565
1566 for (e = node->indirect_calls; e; e = e->next_callee)
1567 if (e->indirect_info->thunk_delta != 0)
1568 return true;
1569 }
1570
1571 return (node->clone_of
1572 && (node->clone.tree_map
1573 || node->clone.args_to_skip
1574 || node->clone.combined_args_to_skip));
1575 }
1576
1577 /* Output optimization summary for EDGE to OB. */
1578 static void
1579 output_edge_opt_summary (struct output_block *ob,
1580 struct cgraph_edge *edge)
1581 {
1582 if (edge->indirect_info)
1583 lto_output_sleb128_stream (ob->main_stream,
1584 edge->indirect_info->thunk_delta);
1585 else
1586 lto_output_sleb128_stream (ob->main_stream, 0);
1587 }
1588
1589 /* Output optimization summary for NODE to OB. */
1590
1591 static void
1592 output_node_opt_summary (struct output_block *ob,
1593 struct cgraph_node *node,
1594 cgraph_node_set set)
1595 {
1596 unsigned int index;
1597 bitmap_iterator bi;
1598 struct ipa_replace_map *map;
1599 struct bitpack_d bp;
1600 int i;
1601 struct cgraph_edge *e;
1602
1603 lto_output_uleb128_stream (ob->main_stream,
1604 bitmap_count_bits (node->clone.args_to_skip));
1605 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1606 lto_output_uleb128_stream (ob->main_stream, index);
1607 lto_output_uleb128_stream (ob->main_stream,
1608 bitmap_count_bits (node->clone.combined_args_to_skip));
1609 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1610 lto_output_uleb128_stream (ob->main_stream, index);
1611 lto_output_uleb128_stream (ob->main_stream,
1612 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1613 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1614 {
1615 int parm_num;
1616 tree parm;
1617
1618 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1619 parm = DECL_CHAIN (parm), parm_num++)
1620 if (map->old_tree == parm)
1621 break;
1622 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1623 mechanism to store function local declarations into summaries. */
1624 gcc_assert (parm);
1625 lto_output_uleb128_stream (ob->main_stream, parm_num);
1626 lto_output_tree (ob, map->new_tree, true);
1627 bp = bitpack_create (ob->main_stream);
1628 bp_pack_value (&bp, map->replace_p, 1);
1629 bp_pack_value (&bp, map->ref_p, 1);
1630 lto_output_bitpack (&bp);
1631 }
1632
1633 if (cgraph_node_in_set_p (node, set))
1634 {
1635 for (e = node->callees; e; e = e->next_callee)
1636 output_edge_opt_summary (ob, e);
1637 for (e = node->indirect_calls; e; e = e->next_callee)
1638 output_edge_opt_summary (ob, e);
1639 }
1640 }
1641
1642 /* Output optimization summaries stored in callgraph.
1643 At the moment it is the clone info structure. */
1644
1645 static void
1646 output_cgraph_opt_summary (cgraph_node_set set)
1647 {
1648 struct cgraph_node *node;
1649 int i, n_nodes;
1650 lto_cgraph_encoder_t encoder;
1651 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1652 unsigned count = 0;
1653
1654 ob->cgraph_node = NULL;
1655 encoder = ob->decl_state->cgraph_node_encoder;
1656 n_nodes = lto_cgraph_encoder_size (encoder);
1657 for (i = 0; i < n_nodes; i++)
1658 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1659 set))
1660 count++;
1661 lto_output_uleb128_stream (ob->main_stream, count);
1662 for (i = 0; i < n_nodes; i++)
1663 {
1664 node = lto_cgraph_encoder_deref (encoder, i);
1665 if (output_cgraph_opt_summary_p (node, set))
1666 {
1667 lto_output_uleb128_stream (ob->main_stream, i);
1668 output_node_opt_summary (ob, node, set);
1669 }
1670 }
1671 produce_asm (ob, NULL);
1672 destroy_output_block (ob);
1673 }
1674
1675 /* Input optimisation summary of EDGE. */
1676
1677 static void
1678 input_edge_opt_summary (struct cgraph_edge *edge,
1679 struct lto_input_block *ib_main)
1680 {
1681 HOST_WIDE_INT thunk_delta;
1682 thunk_delta = lto_input_sleb128 (ib_main);
1683 if (thunk_delta != 0)
1684 {
1685 gcc_assert (!edge->indirect_info);
1686 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1687 edge->indirect_info->thunk_delta = thunk_delta;
1688 }
1689 }
1690
1691 /* Input optimisation summary of NODE. */
1692
1693 static void
1694 input_node_opt_summary (struct cgraph_node *node,
1695 struct lto_input_block *ib_main,
1696 struct data_in *data_in)
1697 {
1698 int i;
1699 int count;
1700 int bit;
1701 struct bitpack_d bp;
1702 struct cgraph_edge *e;
1703
1704 count = lto_input_uleb128 (ib_main);
1705 if (count)
1706 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1707 for (i = 0; i < count; i++)
1708 {
1709 bit = lto_input_uleb128 (ib_main);
1710 bitmap_set_bit (node->clone.args_to_skip, bit);
1711 }
1712 count = lto_input_uleb128 (ib_main);
1713 if (count)
1714 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1715 for (i = 0; i < count; i++)
1716 {
1717 bit = lto_input_uleb128 (ib_main);
1718 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1719 }
1720 count = lto_input_uleb128 (ib_main);
1721 for (i = 0; i < count; i++)
1722 {
1723 int parm_num;
1724 tree parm;
1725 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1726
1727 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1728 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1729 parm = DECL_CHAIN (parm))
1730 parm_num --;
1731 map->parm_num = lto_input_uleb128 (ib_main);
1732 map->old_tree = NULL;
1733 map->new_tree = lto_input_tree (ib_main, data_in);
1734 bp = lto_input_bitpack (ib_main);
1735 map->replace_p = bp_unpack_value (&bp, 1);
1736 map->ref_p = bp_unpack_value (&bp, 1);
1737 }
1738 for (e = node->callees; e; e = e->next_callee)
1739 input_edge_opt_summary (e, ib_main);
1740 for (e = node->indirect_calls; e; e = e->next_callee)
1741 input_edge_opt_summary (e, ib_main);
1742 }
1743
1744 /* Read section in file FILE_DATA of length LEN with data DATA. */
1745
1746 static void
1747 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1748 const char *data, size_t len, VEC (cgraph_node_ptr,
1749 heap) * nodes)
1750 {
1751 const struct lto_function_header *header =
1752 (const struct lto_function_header *) data;
1753 const int32_t cfg_offset = sizeof (struct lto_function_header);
1754 const int32_t main_offset = cfg_offset + header->cfg_size;
1755 const int32_t string_offset = main_offset + header->main_size;
1756 struct data_in *data_in;
1757 struct lto_input_block ib_main;
1758 unsigned int i;
1759 unsigned int count;
1760
1761 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1762 header->main_size);
1763
1764 data_in =
1765 lto_data_in_create (file_data, (const char *) data + string_offset,
1766 header->string_size, NULL);
1767 count = lto_input_uleb128 (&ib_main);
1768
1769 for (i = 0; i < count; i++)
1770 {
1771 int ref = lto_input_uleb128 (&ib_main);
1772 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1773 &ib_main, data_in);
1774 }
1775 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1776 len);
1777 lto_data_in_delete (data_in);
1778 }
1779
1780 /* Input optimization summary of cgraph. */
1781
1782 static void
1783 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1784 {
1785 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1786 struct lto_file_decl_data *file_data;
1787 unsigned int j = 0;
1788
1789 while ((file_data = file_data_vec[j++]))
1790 {
1791 size_t len;
1792 const char *data =
1793 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1794 &len);
1795
1796 if (data)
1797 input_cgraph_opt_section (file_data, data, len, nodes);
1798 }
1799 }