re PR middle-end/47663 (Very simple wrapper not inlined)
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "gcov-io.h"
47
48 static void output_varpool (cgraph_node_set, varpool_node_set);
49 static void output_cgraph_opt_summary (cgraph_node_set set);
50 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
51
52
53 /* Cgraph streaming is organized as set of record whose type
54 is indicated by a tag. */
55 enum LTO_cgraph_tags
56 {
57 /* Must leave 0 for the stopper. */
58
59 /* Cgraph node without body available. */
60 LTO_cgraph_unavail_node = 1,
61 /* Cgraph node with function body. */
62 LTO_cgraph_analyzed_node,
63 /* Cgraph edges. */
64 LTO_cgraph_edge,
65 LTO_cgraph_indirect_edge
66 };
67
68 /* Create a new cgraph encoder. */
69
70 lto_cgraph_encoder_t
71 lto_cgraph_encoder_new (void)
72 {
73 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
74 encoder->map = pointer_map_create ();
75 encoder->nodes = NULL;
76 encoder->body = pointer_set_create ();
77 return encoder;
78 }
79
80
81 /* Delete ENCODER and its components. */
82
83 void
84 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
85 {
86 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
87 pointer_map_destroy (encoder->map);
88 pointer_set_destroy (encoder->body);
89 free (encoder);
90 }
91
92
93 /* Return the existing reference number of NODE in the cgraph encoder in
94 output block OB. Assign a new reference if this is the first time
95 NODE is encoded. */
96
97 int
98 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
99 struct cgraph_node *node)
100 {
101 int ref;
102 void **slot;
103
104 slot = pointer_map_contains (encoder->map, node);
105 if (!slot)
106 {
107 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
108 slot = pointer_map_insert (encoder->map, node);
109 *slot = (void *) (intptr_t) ref;
110 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 }
112 else
113 ref = (int) (intptr_t) *slot;
114
115 return ref;
116 }
117
118 #define LCC_NOT_FOUND (-1)
119
120 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
121 or LCC_NOT_FOUND if it is not there. */
122
123 int
124 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
125 struct cgraph_node *node)
126 {
127 void **slot = pointer_map_contains (encoder->map, node);
128 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
129 }
130
131
132 /* Return the cgraph node corresponding to REF using ENCODER. */
133
134 struct cgraph_node *
135 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
136 {
137 if (ref == LCC_NOT_FOUND)
138 return NULL;
139
140 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
141 }
142
143
144 /* Return TRUE if we should encode initializer of NODE (if any). */
145
146 bool
147 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
148 struct cgraph_node *node)
149 {
150 return pointer_set_contains (encoder->body, node);
151 }
152
153 /* Return TRUE if we should encode body of NODE (if any). */
154
155 static void
156 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
157 struct cgraph_node *node)
158 {
159 pointer_set_insert (encoder->body, node);
160 }
161
162 /* Create a new varpool encoder. */
163
164 lto_varpool_encoder_t
165 lto_varpool_encoder_new (void)
166 {
167 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
168 encoder->map = pointer_map_create ();
169 encoder->initializer = pointer_set_create ();
170 encoder->nodes = NULL;
171 return encoder;
172 }
173
174
175 /* Delete ENCODER and its components. */
176
177 void
178 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
179 {
180 VEC_free (varpool_node_ptr, heap, encoder->nodes);
181 pointer_map_destroy (encoder->map);
182 pointer_set_destroy (encoder->initializer);
183 free (encoder);
184 }
185
186
187 /* Return the existing reference number of NODE in the varpool encoder in
188 output block OB. Assign a new reference if this is the first time
189 NODE is encoded. */
190
191 int
192 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
193 struct varpool_node *node)
194 {
195 int ref;
196 void **slot;
197
198 slot = pointer_map_contains (encoder->map, node);
199 if (!slot)
200 {
201 ref = VEC_length (varpool_node_ptr, encoder->nodes);
202 slot = pointer_map_insert (encoder->map, node);
203 *slot = (void *) (intptr_t) ref;
204 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 }
206 else
207 ref = (int) (intptr_t) *slot;
208
209 return ref;
210 }
211
212 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
213 or LCC_NOT_FOUND if it is not there. */
214
215 int
216 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
217 struct varpool_node *node)
218 {
219 void **slot = pointer_map_contains (encoder->map, node);
220 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
221 }
222
223
224 /* Return the varpool node corresponding to REF using ENCODER. */
225
226 struct varpool_node *
227 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
228 {
229 if (ref == LCC_NOT_FOUND)
230 return NULL;
231
232 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
233 }
234
235
236 /* Return TRUE if we should encode initializer of NODE (if any). */
237
238 bool
239 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
240 struct varpool_node *node)
241 {
242 return pointer_set_contains (encoder->initializer, node);
243 }
244
245 /* Return TRUE if we should encode initializer of NODE (if any). */
246
247 static void
248 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
249 struct varpool_node *node)
250 {
251 pointer_set_insert (encoder->initializer, node);
252 }
253
254 /* Output the cgraph EDGE to OB using ENCODER. */
255
256 static void
257 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
258 lto_cgraph_encoder_t encoder)
259 {
260 unsigned int uid;
261 intptr_t ref;
262 struct bitpack_d bp;
263
264 if (edge->indirect_unknown_callee)
265 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
266 else
267 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
268
269 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
272
273 if (!edge->indirect_unknown_callee)
274 {
275 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278 }
279
280 lto_output_sleb128_stream (ob->main_stream, edge->count);
281
282 bp = bitpack_create (ob->main_stream);
283 uid = (!gimple_has_body_p (edge->caller->decl)
284 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
285 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
286 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->call_stmt_size, HOST_BITS_PER_INT);
289 bp_pack_value (&bp, edge->call_stmt_time, HOST_BITS_PER_INT);
290 bp_pack_value (&bp, edge->loop_nest, 30);
291 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
292 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
293 bp_pack_value (&bp, edge->can_throw_external, 1);
294 if (edge->indirect_unknown_callee)
295 {
296 int flags = edge->indirect_info->ecf_flags;
297 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
302 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
303 /* Flags that should not appear on indirect calls. */
304 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
305 | ECF_MAY_BE_ALLOCA
306 | ECF_SIBCALL
307 | ECF_LEAF
308 | ECF_NOVOPS)));
309 }
310 lto_output_bitpack (&bp);
311 }
312
313 /* Return if LIST contain references from other partitions. */
314
315 bool
316 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
317 varpool_node_set vset)
318 {
319 int i;
320 struct ipa_ref *ref;
321 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
322 {
323 if (ref->refering_type == IPA_REF_CGRAPH)
324 {
325 if (ipa_ref_refering_node (ref)->in_other_partition
326 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
327 return true;
328 }
329 else
330 {
331 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
332 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
333 vset))
334 return true;
335 }
336 }
337 return false;
338 }
339
340 /* Return true when node is reachable from other partition. */
341
342 bool
343 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
344 {
345 struct cgraph_edge *e;
346 if (!node->analyzed)
347 return false;
348 if (node->global.inlined_to)
349 return false;
350 for (e = node->callers; e; e = e->next_caller)
351 if (e->caller->in_other_partition
352 || !cgraph_node_in_set_p (e->caller, set))
353 return true;
354 return false;
355 }
356
357 /* Return if LIST contain references from other partitions. */
358
359 bool
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
362 {
363 int i;
364 struct ipa_ref *ref;
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
366 {
367 if (ref->refering_type == IPA_REF_CGRAPH)
368 {
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
370 return true;
371 }
372 else
373 {
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
375 vset))
376 return true;
377 }
378 }
379 return false;
380 }
381
382 /* Return true when node is reachable from other partition. */
383
384 bool
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
386 {
387 struct cgraph_edge *e;
388 for (e = node->callers; e; e = e->next_caller)
389 if (cgraph_node_in_set_p (e->caller, set))
390 return true;
391 return false;
392 }
393
394 /* Output the cgraph NODE to OB. ENCODER is used to find the
395 reference number of NODE->inlined_to. SET is the set of nodes we
396 are writing to the current file. If NODE is not in SET, then NODE
397 is a boundary of a cgraph_node_set and we pretend NODE just has a
398 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
399 that have had their callgraph node written so far. This is used to
400 determine if NODE is a clone of a previously written node. */
401
402 static void
403 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
404 lto_cgraph_encoder_t encoder, cgraph_node_set set,
405 varpool_node_set vset)
406 {
407 unsigned int tag;
408 struct bitpack_d bp;
409 bool boundary_p;
410 intptr_t ref;
411 bool in_other_partition = false;
412 struct cgraph_node *clone_of;
413
414 boundary_p = !cgraph_node_in_set_p (node, set);
415
416 if (node->analyzed && !boundary_p)
417 tag = LTO_cgraph_analyzed_node;
418 else
419 tag = LTO_cgraph_unavail_node;
420
421 lto_output_uleb128_stream (ob->main_stream, tag);
422
423 /* In WPA mode, we only output part of the call-graph. Also, we
424 fake cgraph node attributes. There are two cases that we care.
425
426 Boundary nodes: There are nodes that are not part of SET but are
427 called from within SET. We artificially make them look like
428 externally visible nodes with no function body.
429
430 Cherry-picked nodes: These are nodes we pulled from other
431 translation units into SET during IPA-inlining. We make them as
432 local static nodes to prevent clashes with other local statics. */
433 if (boundary_p && node->analyzed)
434 {
435 /* Inline clones can not be part of boundary.
436 gcc_assert (!node->global.inlined_to);
437
438 FIXME: At the moment they can be, when partition contains an inline
439 clone that is clone of inline clone from outside partition. We can
440 reshape the clone tree and make other tree to be the root, but it
441 needs a bit extra work and will be promplty done by cgraph_remove_node
442 after reading back. */
443 in_other_partition = 1;
444 }
445
446 clone_of = node->clone_of;
447 while (clone_of
448 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
449 if (clone_of->prev_sibling_clone)
450 clone_of = clone_of->prev_sibling_clone;
451 else
452 clone_of = clone_of->clone_of;
453
454 if (LTO_cgraph_analyzed_node)
455 gcc_assert (clone_of || !node->clone_of);
456 if (!clone_of)
457 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
458 else
459 lto_output_sleb128_stream (ob->main_stream, ref);
460
461
462 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
463 lto_output_sleb128_stream (ob->main_stream, node->count);
464 lto_output_sleb128_stream (ob->main_stream, node->count_materialization_scale);
465
466 if (tag == LTO_cgraph_analyzed_node)
467 {
468 lto_output_sleb128_stream (ob->main_stream,
469 node->local.inline_summary.estimated_self_stack_size);
470 lto_output_sleb128_stream (ob->main_stream,
471 node->local.inline_summary.self_size);
472 lto_output_sleb128_stream (ob->main_stream,
473 node->local.inline_summary.size_inlining_benefit);
474 lto_output_sleb128_stream (ob->main_stream,
475 node->local.inline_summary.self_time);
476 lto_output_sleb128_stream (ob->main_stream,
477 node->local.inline_summary.time_inlining_benefit);
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 lto_output_sleb128_stream (ob->main_stream, ref);
487 }
488
489 if (node->same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
492 gcc_assert (ref != LCC_NOT_FOUND);
493 }
494 else
495 ref = LCC_NOT_FOUND;
496 lto_output_sleb128_stream (ob->main_stream, ref);
497
498 bp = bitpack_create (ob->main_stream);
499 bp_pack_value (&bp, node->local.local, 1);
500 bp_pack_value (&bp, node->local.externally_visible, 1);
501 bp_pack_value (&bp, node->local.finalized, 1);
502 bp_pack_value (&bp, node->local.inlinable, 1);
503 bp_pack_value (&bp, node->local.versionable, 1);
504 bp_pack_value (&bp, node->local.can_change_signature, 1);
505 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
506 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
507 bp_pack_value (&bp, node->local.vtable_method, 1);
508 bp_pack_value (&bp, node->needed, 1);
509 bp_pack_value (&bp, node->address_taken, 1);
510 bp_pack_value (&bp, node->abstract_and_needed, 1);
511 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
512 && !DECL_EXTERNAL (node->decl)
513 && !DECL_COMDAT (node->decl)
514 && (reachable_from_other_partition_p (node, set)
515 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
516 bp_pack_value (&bp, node->lowered, 1);
517 bp_pack_value (&bp, in_other_partition, 1);
518 bp_pack_value (&bp, node->alias, 1);
519 bp_pack_value (&bp, node->finalized_by_frontend, 1);
520 bp_pack_value (&bp, node->frequency, 2);
521 bp_pack_value (&bp, node->only_called_at_startup, 1);
522 bp_pack_value (&bp, node->only_called_at_exit, 1);
523 lto_output_bitpack (&bp);
524 lto_output_uleb128_stream (ob->main_stream, node->resolution);
525
526 if (node->same_body)
527 {
528 struct cgraph_node *alias;
529 unsigned long alias_count = 1;
530 for (alias = node->same_body; alias->next; alias = alias->next)
531 alias_count++;
532 lto_output_uleb128_stream (ob->main_stream, alias_count);
533 do
534 {
535 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
536 alias->decl);
537 if (alias->thunk.thunk_p)
538 {
539 lto_output_uleb128_stream
540 (ob->main_stream,
541 1 + (alias->thunk.this_adjusting != 0) * 2
542 + (alias->thunk.virtual_offset_p != 0) * 4);
543 lto_output_uleb128_stream (ob->main_stream,
544 alias->thunk.fixed_offset);
545 lto_output_uleb128_stream (ob->main_stream,
546 alias->thunk.virtual_value);
547 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
548 alias->thunk.alias);
549 }
550 else
551 {
552 lto_output_uleb128_stream (ob->main_stream, 0);
553 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
554 alias->thunk.alias);
555 }
556 gcc_assert (cgraph_get_node (alias->thunk.alias) == node);
557 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
558 alias = alias->previous;
559 }
560 while (alias);
561 }
562 else
563 lto_output_uleb128_stream (ob->main_stream, 0);
564 }
565
566 /* Output the varpool NODE to OB.
567 If NODE is not in SET, then NODE is a boundary. */
568
569 static void
570 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
571 lto_varpool_encoder_t varpool_encoder,
572 cgraph_node_set set, varpool_node_set vset)
573 {
574 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
575 struct bitpack_d bp;
576 struct varpool_node *alias;
577 int count = 0;
578 int ref;
579
580 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
581 bp = bitpack_create (ob->main_stream);
582 bp_pack_value (&bp, node->externally_visible, 1);
583 bp_pack_value (&bp, node->force_output, 1);
584 bp_pack_value (&bp, node->finalized, 1);
585 bp_pack_value (&bp, node->alias, 1);
586 gcc_assert (!node->alias || !node->extra_name);
587 gcc_assert (node->finalized || !node->analyzed);
588 gcc_assert (node->needed);
589 /* Constant pool initializers can be de-unified into individual ltrans units.
590 FIXME: Alternatively at -Os we may want to avoid generating for them the local
591 labels and share them across LTRANS partitions. */
592 if (DECL_IN_CONSTANT_POOL (node->decl)
593 && !DECL_COMDAT (node->decl))
594 {
595 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
596 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
597 }
598 else
599 {
600 bp_pack_value (&bp, node->analyzed
601 && referenced_from_other_partition_p (&node->ref_list,
602 set, vset), 1);
603 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
604 }
605 /* Also emit any extra name aliases. */
606 for (alias = node->extra_name; alias; alias = alias->next)
607 count++;
608 bp_pack_value (&bp, count != 0, 1);
609 lto_output_bitpack (&bp);
610 if (node->same_comdat_group && !boundary_p)
611 {
612 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
613 gcc_assert (ref != LCC_NOT_FOUND);
614 }
615 else
616 ref = LCC_NOT_FOUND;
617 lto_output_sleb128_stream (ob->main_stream, ref);
618 lto_output_uleb128_stream (ob->main_stream, node->resolution);
619
620 if (count)
621 {
622 lto_output_uleb128_stream (ob->main_stream, count);
623 for (alias = node->extra_name; alias; alias = alias->next)
624 {
625 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
626 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
627 }
628 }
629 }
630
631 /* Output the varpool NODE to OB.
632 If NODE is not in SET, then NODE is a boundary. */
633
634 static void
635 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
636 lto_cgraph_encoder_t encoder,
637 lto_varpool_encoder_t varpool_encoder)
638 {
639 struct bitpack_d bp;
640 bp = bitpack_create (ob->main_stream);
641 bp_pack_value (&bp, ref->refered_type, 1);
642 bp_pack_value (&bp, ref->use, 2);
643 lto_output_bitpack (&bp);
644 if (ref->refered_type == IPA_REF_CGRAPH)
645 {
646 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
647 gcc_assert (nref != LCC_NOT_FOUND);
648 lto_output_sleb128_stream (ob->main_stream, nref);
649 }
650 else
651 {
652 int nref = lto_varpool_encoder_lookup (varpool_encoder,
653 ipa_ref_varpool_node (ref));
654 gcc_assert (nref != LCC_NOT_FOUND);
655 lto_output_sleb128_stream (ob->main_stream, nref);
656 }
657 }
658
659 /* Stream out profile_summary to OB. */
660
661 static void
662 output_profile_summary (struct lto_simple_output_block *ob)
663 {
664 if (profile_info)
665 {
666 /* We do not output num, sum_all and run_max, they are not used by
667 GCC profile feedback and they are difficult to merge from multiple
668 units. */
669 gcc_assert (profile_info->runs);
670 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
671 lto_output_uleb128_stream (ob->main_stream, profile_info->sum_max);
672 }
673 else
674 lto_output_uleb128_stream (ob->main_stream, 0);
675 }
676
677 /* Add NODE into encoder as well as nodes it is cloned from.
678 Do it in a way so clones appear first. */
679
680 static void
681 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
682 bool include_body)
683 {
684 if (node->clone_of)
685 add_node_to (encoder, node->clone_of, include_body);
686 else if (include_body)
687 lto_set_cgraph_encoder_encode_body (encoder, node);
688 lto_cgraph_encoder_encode (encoder, node);
689 }
690
691 /* Add all references in LIST to encoders. */
692
693 static void
694 add_references (lto_cgraph_encoder_t encoder,
695 lto_varpool_encoder_t varpool_encoder,
696 struct ipa_ref_list *list)
697 {
698 int i;
699 struct ipa_ref *ref;
700 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
701 if (ref->refered_type == IPA_REF_CGRAPH)
702 add_node_to (encoder, ipa_ref_node (ref), false);
703 else
704 {
705 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
706 lto_varpool_encoder_encode (varpool_encoder, vnode);
707 }
708 }
709
710 /* Output all callees or indirect outgoing edges. EDGE must be the first such
711 edge. */
712
713 static void
714 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
715 struct lto_simple_output_block *ob,
716 lto_cgraph_encoder_t encoder)
717 {
718 if (!edge)
719 return;
720
721 /* Output edges in backward direction, so the reconstructed callgraph match
722 and it is easy to associate call sites in the IPA pass summaries. */
723 while (edge->next_callee)
724 edge = edge->next_callee;
725 for (; edge; edge = edge->prev_callee)
726 lto_output_edge (ob, edge, encoder);
727 }
728
729 /* Output the part of the cgraph in SET. */
730
731 static void
732 output_refs (cgraph_node_set set, varpool_node_set vset,
733 lto_cgraph_encoder_t encoder,
734 lto_varpool_encoder_t varpool_encoder)
735 {
736 cgraph_node_set_iterator csi;
737 varpool_node_set_iterator vsi;
738 struct lto_simple_output_block *ob;
739 int count;
740 struct ipa_ref *ref;
741 int i;
742
743 ob = lto_create_simple_output_block (LTO_section_refs);
744
745 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
746 {
747 struct cgraph_node *node = csi_node (csi);
748
749 count = ipa_ref_list_nreferences (&node->ref_list);
750 if (count)
751 {
752 lto_output_uleb128_stream (ob->main_stream, count);
753 lto_output_uleb128_stream (ob->main_stream,
754 lto_cgraph_encoder_lookup (encoder, node));
755 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
756 lto_output_ref (ob, ref, encoder, varpool_encoder);
757 }
758 }
759
760 lto_output_uleb128_stream (ob->main_stream, 0);
761
762 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
763 {
764 struct varpool_node *node = vsi_node (vsi);
765
766 count = ipa_ref_list_nreferences (&node->ref_list);
767 if (count)
768 {
769 lto_output_uleb128_stream (ob->main_stream, count);
770 lto_output_uleb128_stream (ob->main_stream,
771 lto_varpool_encoder_lookup (varpool_encoder,
772 node));
773 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
774 lto_output_ref (ob, ref, encoder, varpool_encoder);
775 }
776 }
777
778 lto_output_uleb128_stream (ob->main_stream, 0);
779
780 lto_destroy_simple_output_block (ob);
781 }
782
783 /* Find out all cgraph and varpool nodes we want to encode in current unit
784 and insert them to encoders. */
785 void
786 compute_ltrans_boundary (struct lto_out_decl_state *state,
787 cgraph_node_set set, varpool_node_set vset)
788 {
789 struct cgraph_node *node;
790 cgraph_node_set_iterator csi;
791 varpool_node_set_iterator vsi;
792 struct cgraph_edge *edge;
793 int i;
794 lto_cgraph_encoder_t encoder;
795 lto_varpool_encoder_t varpool_encoder;
796
797 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
798 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
799
800 /* Go over all the nodes in SET and assign references. */
801 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
802 {
803 node = csi_node (csi);
804 add_node_to (encoder, node, true);
805 add_references (encoder, varpool_encoder, &node->ref_list);
806 }
807 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
808 {
809 struct varpool_node *vnode = vsi_node (vsi);
810 gcc_assert (!vnode->alias);
811 lto_varpool_encoder_encode (varpool_encoder, vnode);
812 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
813 add_references (encoder, varpool_encoder, &vnode->ref_list);
814 }
815 /* Pickle in also the initializer of all referenced readonly variables
816 to help folding. Constant pool variables are not shared, so we must
817 pickle those too. */
818 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
819 {
820 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
821 if (DECL_INITIAL (vnode->decl)
822 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
823 vnode)
824 && const_value_known_p (vnode->decl))
825 {
826 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
827 add_references (encoder, varpool_encoder, &vnode->ref_list);
828 }
829 }
830
831 /* Go over all the nodes again to include callees that are not in
832 SET. */
833 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
834 {
835 node = csi_node (csi);
836 for (edge = node->callees; edge; edge = edge->next_callee)
837 {
838 struct cgraph_node *callee = edge->callee;
839 if (!cgraph_node_in_set_p (callee, set))
840 {
841 /* We should have moved all the inlines. */
842 gcc_assert (!callee->global.inlined_to);
843 add_node_to (encoder, callee, false);
844 }
845 }
846 }
847 }
848
849 /* Output the part of the cgraph in SET. */
850
851 void
852 output_cgraph (cgraph_node_set set, varpool_node_set vset)
853 {
854 struct cgraph_node *node;
855 struct lto_simple_output_block *ob;
856 cgraph_node_set_iterator csi;
857 int i, n_nodes;
858 lto_cgraph_encoder_t encoder;
859 lto_varpool_encoder_t varpool_encoder;
860 struct cgraph_asm_node *can;
861 static bool asm_nodes_output = false;
862
863 if (flag_wpa)
864 output_cgraph_opt_summary (set);
865
866 ob = lto_create_simple_output_block (LTO_section_cgraph);
867
868 output_profile_summary (ob);
869
870 /* An encoder for cgraph nodes should have been created by
871 ipa_write_summaries_1. */
872 gcc_assert (ob->decl_state->cgraph_node_encoder);
873 gcc_assert (ob->decl_state->varpool_node_encoder);
874 encoder = ob->decl_state->cgraph_node_encoder;
875 varpool_encoder = ob->decl_state->varpool_node_encoder;
876
877 /* Write out the nodes. We must first output a node and then its clones,
878 otherwise at a time reading back the node there would be nothing to clone
879 from. */
880 n_nodes = lto_cgraph_encoder_size (encoder);
881 for (i = 0; i < n_nodes; i++)
882 {
883 node = lto_cgraph_encoder_deref (encoder, i);
884 lto_output_node (ob, node, encoder, set, vset);
885 }
886
887 /* Go over the nodes in SET again to write edges. */
888 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
889 {
890 node = csi_node (csi);
891 output_outgoing_cgraph_edges (node->callees, ob, encoder);
892 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
893 }
894
895 lto_output_uleb128_stream (ob->main_stream, 0);
896
897 /* Emit toplevel asms.
898 When doing WPA we must output every asm just once. Since we do not partition asm
899 nodes at all, output them to first output. This is kind of hack, but should work
900 well. */
901 if (!asm_nodes_output)
902 {
903 asm_nodes_output = true;
904 for (can = cgraph_asm_nodes; can; can = can->next)
905 {
906 int len = TREE_STRING_LENGTH (can->asm_str);
907 lto_output_uleb128_stream (ob->main_stream, len);
908 for (i = 0; i < len; ++i)
909 lto_output_1_stream (ob->main_stream,
910 TREE_STRING_POINTER (can->asm_str)[i]);
911 }
912 }
913
914 lto_output_uleb128_stream (ob->main_stream, 0);
915
916 lto_destroy_simple_output_block (ob);
917 output_varpool (set, vset);
918 output_refs (set, vset, encoder, varpool_encoder);
919 }
920
921 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
922 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
923 NODE or to replace the values in it, for instance because the first
924 time we saw it, the function body was not available but now it
925 is. BP is a bitpack with all the bitflags for NODE read from the
926 stream. */
927
928 static void
929 input_overwrite_node (struct lto_file_decl_data *file_data,
930 struct cgraph_node *node,
931 enum LTO_cgraph_tags tag,
932 struct bitpack_d *bp,
933 unsigned int stack_size,
934 unsigned int self_time,
935 unsigned int time_inlining_benefit,
936 unsigned int self_size,
937 unsigned int size_inlining_benefit,
938 enum ld_plugin_symbol_resolution resolution)
939 {
940 node->aux = (void *) tag;
941 node->local.inline_summary.estimated_self_stack_size = stack_size;
942 node->local.inline_summary.self_time = self_time;
943 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
944 node->local.inline_summary.self_size = self_size;
945 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
946 node->global.time = self_time;
947 node->global.size = self_size;
948 node->global.estimated_stack_size = stack_size;
949 node->global.estimated_growth = INT_MIN;
950 node->local.lto_file_data = file_data;
951
952 node->local.local = bp_unpack_value (bp, 1);
953 node->local.externally_visible = bp_unpack_value (bp, 1);
954 node->local.finalized = bp_unpack_value (bp, 1);
955 node->local.inlinable = bp_unpack_value (bp, 1);
956 node->local.versionable = bp_unpack_value (bp, 1);
957 node->local.can_change_signature = bp_unpack_value (bp, 1);
958 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
959 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
960 node->local.vtable_method = bp_unpack_value (bp, 1);
961 node->needed = bp_unpack_value (bp, 1);
962 node->address_taken = bp_unpack_value (bp, 1);
963 node->abstract_and_needed = bp_unpack_value (bp, 1);
964 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
965 node->lowered = bp_unpack_value (bp, 1);
966 node->analyzed = tag == LTO_cgraph_analyzed_node;
967 node->in_other_partition = bp_unpack_value (bp, 1);
968 if (node->in_other_partition
969 /* Avoid updating decl when we are seeing just inline clone.
970 When inlining function that has functions already inlined into it,
971 we produce clones of inline clones.
972
973 WPA partitioning might put each clone into different unit and
974 we might end up streaming inline clone from other partition
975 to support clone we are interested in. */
976 && (!node->clone_of
977 || node->clone_of->decl != node->decl))
978 {
979 DECL_EXTERNAL (node->decl) = 1;
980 TREE_STATIC (node->decl) = 0;
981 }
982 node->alias = bp_unpack_value (bp, 1);
983 node->finalized_by_frontend = bp_unpack_value (bp, 1);
984 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
985 node->only_called_at_startup = bp_unpack_value (bp, 1);
986 node->only_called_at_exit = bp_unpack_value (bp, 1);
987 node->resolution = resolution;
988 }
989
990 /* Output the part of the cgraph in SET. */
991
992 static void
993 output_varpool (cgraph_node_set set, varpool_node_set vset)
994 {
995 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
996 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
997 int len = lto_varpool_encoder_size (varpool_encoder), i;
998
999 lto_output_uleb128_stream (ob->main_stream, len);
1000
1001 /* Write out the nodes. We must first output a node and then its clones,
1002 otherwise at a time reading back the node there would be nothing to clone
1003 from. */
1004 for (i = 0; i < len; i++)
1005 {
1006 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
1007 varpool_encoder,
1008 set, vset);
1009 }
1010
1011 lto_destroy_simple_output_block (ob);
1012 }
1013
1014 /* Read a node from input_block IB. TAG is the node's tag just read.
1015 Return the node read or overwriten. */
1016
1017 static struct cgraph_node *
1018 input_node (struct lto_file_decl_data *file_data,
1019 struct lto_input_block *ib,
1020 enum LTO_cgraph_tags tag,
1021 VEC(cgraph_node_ptr, heap) *nodes)
1022 {
1023 tree fn_decl;
1024 struct cgraph_node *node;
1025 struct bitpack_d bp;
1026 int stack_size = 0;
1027 unsigned decl_index;
1028 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1029 int self_time = 0;
1030 int self_size = 0;
1031 int time_inlining_benefit = 0;
1032 int size_inlining_benefit = 0;
1033 unsigned long same_body_count = 0;
1034 int clone_ref;
1035 enum ld_plugin_symbol_resolution resolution;
1036
1037 clone_ref = lto_input_sleb128 (ib);
1038
1039 decl_index = lto_input_uleb128 (ib);
1040 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1041
1042 if (clone_ref != LCC_NOT_FOUND)
1043 {
1044 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1045 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1046 }
1047 else
1048 node = cgraph_node (fn_decl);
1049
1050 node->count = lto_input_sleb128 (ib);
1051 node->count_materialization_scale = lto_input_sleb128 (ib);
1052
1053 if (tag == LTO_cgraph_analyzed_node)
1054 {
1055 stack_size = lto_input_sleb128 (ib);
1056 self_size = lto_input_sleb128 (ib);
1057 size_inlining_benefit = lto_input_sleb128 (ib);
1058 self_time = lto_input_sleb128 (ib);
1059 time_inlining_benefit = lto_input_sleb128 (ib);
1060
1061 ref = lto_input_sleb128 (ib);
1062 }
1063
1064 ref2 = lto_input_sleb128 (ib);
1065
1066 /* Make sure that we have not read this node before. Nodes that
1067 have already been read will have their tag stored in the 'aux'
1068 field. Since built-in functions can be referenced in multiple
1069 functions, they are expected to be read more than once. */
1070 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1071 internal_error ("bytecode stream: found multiple instances of cgraph "
1072 "node %d", node->uid);
1073
1074 bp = lto_input_bitpack (ib);
1075 resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1076 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1077 time_inlining_benefit, self_size,
1078 size_inlining_benefit, resolution);
1079
1080 /* Store a reference for now, and fix up later to be a pointer. */
1081 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1082
1083 /* Store a reference for now, and fix up later to be a pointer. */
1084 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1085
1086 same_body_count = lto_input_uleb128 (ib);
1087 while (same_body_count-- > 0)
1088 {
1089 tree alias_decl;
1090 int type;
1091 struct cgraph_node *alias;
1092 decl_index = lto_input_uleb128 (ib);
1093 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1094 type = lto_input_uleb128 (ib);
1095 if (!type)
1096 {
1097 tree real_alias;
1098 decl_index = lto_input_uleb128 (ib);
1099 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1100 alias = cgraph_same_body_alias (node, alias_decl, real_alias);
1101 }
1102 else
1103 {
1104 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1105 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1106 tree real_alias;
1107 decl_index = lto_input_uleb128 (ib);
1108 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1109 alias = cgraph_add_thunk (node, alias_decl, fn_decl, type & 2, fixed_offset,
1110 virtual_value,
1111 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1112 real_alias);
1113 }
1114 gcc_assert (alias);
1115 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1116 }
1117 return node;
1118 }
1119
1120 /* Read a node from input_block IB. TAG is the node's tag just read.
1121 Return the node read or overwriten. */
1122
1123 static struct varpool_node *
1124 input_varpool_node (struct lto_file_decl_data *file_data,
1125 struct lto_input_block *ib)
1126 {
1127 int decl_index;
1128 tree var_decl;
1129 struct varpool_node *node;
1130 struct bitpack_d bp;
1131 bool aliases_p;
1132 int count;
1133 int ref = LCC_NOT_FOUND;
1134
1135 decl_index = lto_input_uleb128 (ib);
1136 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1137 node = varpool_node (var_decl);
1138 node->lto_file_data = file_data;
1139
1140 bp = lto_input_bitpack (ib);
1141 node->externally_visible = bp_unpack_value (&bp, 1);
1142 node->force_output = bp_unpack_value (&bp, 1);
1143 node->finalized = bp_unpack_value (&bp, 1);
1144 node->alias = bp_unpack_value (&bp, 1);
1145 node->analyzed = node->finalized;
1146 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1147 node->in_other_partition = bp_unpack_value (&bp, 1);
1148 if (node->in_other_partition)
1149 {
1150 DECL_EXTERNAL (node->decl) = 1;
1151 TREE_STATIC (node->decl) = 0;
1152 }
1153 aliases_p = bp_unpack_value (&bp, 1);
1154 if (node->finalized)
1155 varpool_mark_needed_node (node);
1156 ref = lto_input_sleb128 (ib);
1157 /* Store a reference for now, and fix up later to be a pointer. */
1158 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1159 node->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1160 if (aliases_p)
1161 {
1162 count = lto_input_uleb128 (ib);
1163 for (; count > 0; count --)
1164 {
1165 tree decl = lto_file_decl_data_get_var_decl (file_data,
1166 lto_input_uleb128 (ib));
1167 struct varpool_node *alias;
1168 alias = varpool_extra_name_alias (decl, var_decl);
1169 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1170 }
1171 }
1172 return node;
1173 }
1174
1175 /* Read a node from input_block IB. TAG is the node's tag just read.
1176 Return the node read or overwriten. */
1177
1178 static void
1179 input_ref (struct lto_input_block *ib,
1180 struct cgraph_node *refering_node,
1181 struct varpool_node *refering_varpool_node,
1182 VEC(cgraph_node_ptr, heap) *nodes,
1183 VEC(varpool_node_ptr, heap) *varpool_nodes)
1184 {
1185 struct cgraph_node *node = NULL;
1186 struct varpool_node *varpool_node = NULL;
1187 struct bitpack_d bp;
1188 enum ipa_ref_type type;
1189 enum ipa_ref_use use;
1190
1191 bp = lto_input_bitpack (ib);
1192 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1193 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1194 if (type == IPA_REF_CGRAPH)
1195 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1196 else
1197 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1198 ipa_record_reference (refering_node, refering_varpool_node,
1199 node, varpool_node, use, NULL);
1200 }
1201
1202 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1203 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1204 edge being read is indirect (in the sense that it has
1205 indirect_unknown_callee set). */
1206
1207 static void
1208 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1209 bool indirect)
1210 {
1211 struct cgraph_node *caller, *callee;
1212 struct cgraph_edge *edge;
1213 unsigned int stmt_id;
1214 gcov_type count;
1215 int freq;
1216 unsigned int nest;
1217 cgraph_inline_failed_t inline_failed;
1218 struct bitpack_d bp;
1219 int ecf_flags = 0;
1220 int call_stmt_time, call_stmt_size;
1221
1222 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1223 if (caller == NULL || caller->decl == NULL_TREE)
1224 internal_error ("bytecode stream: no caller found while reading edge");
1225
1226 if (!indirect)
1227 {
1228 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1229 if (callee == NULL || callee->decl == NULL_TREE)
1230 internal_error ("bytecode stream: no callee found while reading edge");
1231 }
1232 else
1233 callee = NULL;
1234
1235 count = (gcov_type) lto_input_sleb128 (ib);
1236
1237 bp = lto_input_bitpack (ib);
1238 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1239 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1240 HOST_BITS_PER_INT);
1241 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1242 call_stmt_size = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1243 call_stmt_time = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1244 nest = (unsigned) bp_unpack_value (&bp, 30);
1245
1246 if (indirect)
1247 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1248 else
1249 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1250
1251 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1252 edge->lto_stmt_uid = stmt_id;
1253 edge->inline_failed = inline_failed;
1254 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1255 edge->can_throw_external = bp_unpack_value (&bp, 1);
1256 edge->call_stmt_size = call_stmt_size;
1257 edge->call_stmt_time = call_stmt_time;
1258 if (indirect)
1259 {
1260 if (bp_unpack_value (&bp, 1))
1261 ecf_flags |= ECF_CONST;
1262 if (bp_unpack_value (&bp, 1))
1263 ecf_flags |= ECF_PURE;
1264 if (bp_unpack_value (&bp, 1))
1265 ecf_flags |= ECF_NORETURN;
1266 if (bp_unpack_value (&bp, 1))
1267 ecf_flags |= ECF_MALLOC;
1268 if (bp_unpack_value (&bp, 1))
1269 ecf_flags |= ECF_NOTHROW;
1270 if (bp_unpack_value (&bp, 1))
1271 ecf_flags |= ECF_RETURNS_TWICE;
1272 edge->indirect_info->ecf_flags = ecf_flags;
1273 }
1274 }
1275
1276
1277 /* Read a cgraph from IB using the info in FILE_DATA. */
1278
1279 static VEC(cgraph_node_ptr, heap) *
1280 input_cgraph_1 (struct lto_file_decl_data *file_data,
1281 struct lto_input_block *ib)
1282 {
1283 enum LTO_cgraph_tags tag;
1284 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1285 struct cgraph_node *node;
1286 unsigned i;
1287 unsigned HOST_WIDE_INT len;
1288
1289 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1290 while (tag)
1291 {
1292 if (tag == LTO_cgraph_edge)
1293 input_edge (ib, nodes, false);
1294 else if (tag == LTO_cgraph_indirect_edge)
1295 input_edge (ib, nodes, true);
1296 else
1297 {
1298 node = input_node (file_data, ib, tag,nodes);
1299 if (node == NULL || node->decl == NULL_TREE)
1300 internal_error ("bytecode stream: found empty cgraph node");
1301 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1302 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1303 }
1304
1305 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1306 }
1307
1308 /* Input toplevel asms. */
1309 len = lto_input_uleb128 (ib);
1310 while (len)
1311 {
1312 char *str = (char *)xmalloc (len + 1);
1313 for (i = 0; i < len; ++i)
1314 str[i] = lto_input_1_unsigned (ib);
1315 cgraph_add_asm_node (build_string (len, str));
1316 free (str);
1317
1318 len = lto_input_uleb128 (ib);
1319 }
1320 /* AUX pointers should be all non-zero for nodes read from the stream. */
1321 #ifdef ENABLE_CHECKING
1322 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1323 gcc_assert (node->aux);
1324 #endif
1325 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1326 {
1327 int ref = (int) (intptr_t) node->global.inlined_to;
1328
1329 /* We share declaration of builtins, so we may read same node twice. */
1330 if (!node->aux)
1331 continue;
1332 node->aux = NULL;
1333
1334 /* Fixup inlined_to from reference to pointer. */
1335 if (ref != LCC_NOT_FOUND)
1336 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1337 else
1338 node->global.inlined_to = NULL;
1339
1340 ref = (int) (intptr_t) node->same_comdat_group;
1341
1342 /* Fixup same_comdat_group from reference to pointer. */
1343 if (ref != LCC_NOT_FOUND)
1344 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1345 else
1346 node->same_comdat_group = NULL;
1347 }
1348 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1349 node->aux = (void *)1;
1350 return nodes;
1351 }
1352
1353 /* Read a varpool from IB using the info in FILE_DATA. */
1354
1355 static VEC(varpool_node_ptr, heap) *
1356 input_varpool_1 (struct lto_file_decl_data *file_data,
1357 struct lto_input_block *ib)
1358 {
1359 unsigned HOST_WIDE_INT len;
1360 VEC(varpool_node_ptr, heap) *varpool = NULL;
1361 int i;
1362 struct varpool_node *node;
1363
1364 len = lto_input_uleb128 (ib);
1365 while (len)
1366 {
1367 VEC_safe_push (varpool_node_ptr, heap, varpool,
1368 input_varpool_node (file_data, ib));
1369 len--;
1370 }
1371 #ifdef ENABLE_CHECKING
1372 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1373 gcc_assert (!node->aux);
1374 #endif
1375 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1376 {
1377 int ref = (int) (intptr_t) node->same_comdat_group;
1378 /* We share declaration of builtins, so we may read same node twice. */
1379 if (node->aux)
1380 continue;
1381 node->aux = (void *)1;
1382
1383 /* Fixup same_comdat_group from reference to pointer. */
1384 if (ref != LCC_NOT_FOUND)
1385 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1386 else
1387 node->same_comdat_group = NULL;
1388 }
1389 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1390 node->aux = NULL;
1391 return varpool;
1392 }
1393
1394 /* Input ipa_refs. */
1395
1396 static void
1397 input_refs (struct lto_input_block *ib,
1398 VEC(cgraph_node_ptr, heap) *nodes,
1399 VEC(varpool_node_ptr, heap) *varpool)
1400 {
1401 int count;
1402 int idx;
1403 while (true)
1404 {
1405 struct cgraph_node *node;
1406 count = lto_input_uleb128 (ib);
1407 if (!count)
1408 break;
1409 idx = lto_input_uleb128 (ib);
1410 node = VEC_index (cgraph_node_ptr, nodes, idx);
1411 while (count)
1412 {
1413 input_ref (ib, node, NULL, nodes, varpool);
1414 count--;
1415 }
1416 }
1417 while (true)
1418 {
1419 struct varpool_node *node;
1420 count = lto_input_uleb128 (ib);
1421 if (!count)
1422 break;
1423 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1424 while (count)
1425 {
1426 input_ref (ib, NULL, node, nodes, varpool);
1427 count--;
1428 }
1429 }
1430 }
1431
1432
1433 static struct gcov_ctr_summary lto_gcov_summary;
1434
1435 /* Input profile_info from IB. */
1436 static void
1437 input_profile_summary (struct lto_input_block *ib,
1438 struct lto_file_decl_data *file_data)
1439 {
1440 unsigned int runs = lto_input_uleb128 (ib);
1441 if (runs)
1442 {
1443 file_data->profile_info.runs = runs;
1444 file_data->profile_info.sum_max = lto_input_uleb128 (ib);
1445 }
1446
1447 }
1448
1449 /* Rescale profile summaries to the same number of runs in the whole unit. */
1450
1451 static void
1452 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1453 {
1454 struct lto_file_decl_data *file_data;
1455 unsigned int j;
1456 gcov_unsigned_t max_runs = 0;
1457 struct cgraph_node *node;
1458 struct cgraph_edge *edge;
1459
1460 /* Find unit with maximal number of runs. If we ever get serious about
1461 roundoff errors, we might also consider computing smallest common
1462 multiply. */
1463 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1464 if (max_runs < file_data->profile_info.runs)
1465 max_runs = file_data->profile_info.runs;
1466
1467 if (!max_runs)
1468 return;
1469
1470 /* Simple overflow check. We probably don't need to support that many train
1471 runs. Such a large value probably imply data corruption anyway. */
1472 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1473 {
1474 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1475 INT_MAX / REG_BR_PROB_BASE);
1476 return;
1477 }
1478
1479 profile_info = &lto_gcov_summary;
1480 lto_gcov_summary.runs = max_runs;
1481 lto_gcov_summary.sum_max = 0;
1482
1483 /* Rescale all units to the maximal number of runs.
1484 sum_max can not be easily merged, as we have no idea what files come from
1485 the same run. We do not use the info anyway, so leave it 0. */
1486 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1487 if (file_data->profile_info.runs)
1488 {
1489 int scale = ((REG_BR_PROB_BASE * max_runs
1490 + file_data->profile_info.runs / 2)
1491 / file_data->profile_info.runs);
1492 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1493 (file_data->profile_info.sum_max
1494 * scale
1495 + REG_BR_PROB_BASE / 2)
1496 / REG_BR_PROB_BASE);
1497 }
1498
1499 /* Watch roundoff errors. */
1500 if (lto_gcov_summary.sum_max < max_runs)
1501 lto_gcov_summary.sum_max = max_runs;
1502
1503 /* If merging already happent at WPA time, we are done. */
1504 if (flag_ltrans)
1505 return;
1506
1507 /* Now compute count_materialization_scale of each node.
1508 During LTRANS we already have values of count_materialization_scale
1509 computed, so just update them. */
1510 for (node = cgraph_nodes; node; node = node->next)
1511 if (node->local.lto_file_data->profile_info.runs)
1512 {
1513 int scale;
1514
1515 scale =
1516 ((node->count_materialization_scale * max_runs
1517 + node->local.lto_file_data->profile_info.runs / 2)
1518 / node->local.lto_file_data->profile_info.runs);
1519 node->count_materialization_scale = scale;
1520 if (scale < 0)
1521 fatal_error ("Profile information in %s corrupted",
1522 file_data->file_name);
1523
1524 if (scale == REG_BR_PROB_BASE)
1525 continue;
1526 for (edge = node->callees; edge; edge = edge->next_callee)
1527 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1528 / REG_BR_PROB_BASE);
1529 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1530 / REG_BR_PROB_BASE);
1531 }
1532 }
1533
1534 /* Input and merge the cgraph from each of the .o files passed to
1535 lto1. */
1536
1537 void
1538 input_cgraph (void)
1539 {
1540 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1541 struct lto_file_decl_data *file_data;
1542 unsigned int j = 0;
1543 struct cgraph_node *node;
1544
1545 while ((file_data = file_data_vec[j++]))
1546 {
1547 const char *data;
1548 size_t len;
1549 struct lto_input_block *ib;
1550 VEC(cgraph_node_ptr, heap) *nodes;
1551 VEC(varpool_node_ptr, heap) *varpool;
1552
1553 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1554 &data, &len);
1555 if (!ib)
1556 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1557 input_profile_summary (ib, file_data);
1558 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1559 nodes = input_cgraph_1 (file_data, ib);
1560 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1561 ib, data, len);
1562
1563 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1564 &data, &len);
1565 if (!ib)
1566 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1567 varpool = input_varpool_1 (file_data, ib);
1568 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1569 ib, data, len);
1570
1571 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1572 &data, &len);
1573 if (!ib)
1574 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1575 input_refs (ib, nodes, varpool);
1576 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1577 ib, data, len);
1578 if (flag_ltrans)
1579 input_cgraph_opt_summary (nodes);
1580 VEC_free (cgraph_node_ptr, heap, nodes);
1581 VEC_free (varpool_node_ptr, heap, varpool);
1582 }
1583 merge_profile_summaries (file_data_vec);
1584
1585
1586 /* Clear out the aux field that was used to store enough state to
1587 tell which nodes should be overwritten. */
1588 for (node = cgraph_nodes; node; node = node->next)
1589 {
1590 /* Some nodes may have been created by cgraph_node. This
1591 happens when the callgraph contains nested functions. If the
1592 node for the parent function was never emitted to the gimple
1593 file, cgraph_node will create a node for it when setting the
1594 context of the nested function. */
1595 if (node->local.lto_file_data)
1596 node->aux = NULL;
1597 }
1598 }
1599
1600 /* True when we need optimization summary for NODE. */
1601
1602 static int
1603 output_cgraph_opt_summary_p (struct cgraph_node *node, cgraph_node_set set)
1604 {
1605 struct cgraph_edge *e;
1606
1607 if (cgraph_node_in_set_p (node, set))
1608 {
1609 for (e = node->callees; e; e = e->next_callee)
1610 if (e->indirect_info
1611 && e->indirect_info->thunk_delta != 0)
1612 return true;
1613
1614 for (e = node->indirect_calls; e; e = e->next_callee)
1615 if (e->indirect_info->thunk_delta != 0)
1616 return true;
1617 }
1618
1619 return (node->clone_of
1620 && (node->clone.tree_map
1621 || node->clone.args_to_skip
1622 || node->clone.combined_args_to_skip));
1623 }
1624
1625 /* Output optimization summary for EDGE to OB. */
1626 static void
1627 output_edge_opt_summary (struct output_block *ob,
1628 struct cgraph_edge *edge)
1629 {
1630 if (edge->indirect_info)
1631 lto_output_sleb128_stream (ob->main_stream,
1632 edge->indirect_info->thunk_delta);
1633 else
1634 lto_output_sleb128_stream (ob->main_stream, 0);
1635 }
1636
1637 /* Output optimization summary for NODE to OB. */
1638
1639 static void
1640 output_node_opt_summary (struct output_block *ob,
1641 struct cgraph_node *node,
1642 cgraph_node_set set)
1643 {
1644 unsigned int index;
1645 bitmap_iterator bi;
1646 struct ipa_replace_map *map;
1647 struct bitpack_d bp;
1648 int i;
1649 struct cgraph_edge *e;
1650
1651 lto_output_uleb128_stream (ob->main_stream,
1652 bitmap_count_bits (node->clone.args_to_skip));
1653 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1654 lto_output_uleb128_stream (ob->main_stream, index);
1655 lto_output_uleb128_stream (ob->main_stream,
1656 bitmap_count_bits (node->clone.combined_args_to_skip));
1657 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1658 lto_output_uleb128_stream (ob->main_stream, index);
1659 lto_output_uleb128_stream (ob->main_stream,
1660 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1661 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1662 {
1663 int parm_num;
1664 tree parm;
1665
1666 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1667 parm = DECL_CHAIN (parm), parm_num++)
1668 if (map->old_tree == parm)
1669 break;
1670 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1671 mechanism to store function local declarations into summaries. */
1672 gcc_assert (parm);
1673 lto_output_uleb128_stream (ob->main_stream, parm_num);
1674 lto_output_tree (ob, map->new_tree, true);
1675 bp = bitpack_create (ob->main_stream);
1676 bp_pack_value (&bp, map->replace_p, 1);
1677 bp_pack_value (&bp, map->ref_p, 1);
1678 lto_output_bitpack (&bp);
1679 }
1680
1681 if (cgraph_node_in_set_p (node, set))
1682 {
1683 for (e = node->callees; e; e = e->next_callee)
1684 output_edge_opt_summary (ob, e);
1685 for (e = node->indirect_calls; e; e = e->next_callee)
1686 output_edge_opt_summary (ob, e);
1687 }
1688 }
1689
1690 /* Output optimization summaries stored in callgraph.
1691 At the moment it is the clone info structure. */
1692
1693 static void
1694 output_cgraph_opt_summary (cgraph_node_set set)
1695 {
1696 struct cgraph_node *node;
1697 int i, n_nodes;
1698 lto_cgraph_encoder_t encoder;
1699 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1700 unsigned count = 0;
1701
1702 ob->cgraph_node = NULL;
1703 encoder = ob->decl_state->cgraph_node_encoder;
1704 n_nodes = lto_cgraph_encoder_size (encoder);
1705 for (i = 0; i < n_nodes; i++)
1706 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1707 set))
1708 count++;
1709 lto_output_uleb128_stream (ob->main_stream, count);
1710 for (i = 0; i < n_nodes; i++)
1711 {
1712 node = lto_cgraph_encoder_deref (encoder, i);
1713 if (output_cgraph_opt_summary_p (node, set))
1714 {
1715 lto_output_uleb128_stream (ob->main_stream, i);
1716 output_node_opt_summary (ob, node, set);
1717 }
1718 }
1719 produce_asm (ob, NULL);
1720 destroy_output_block (ob);
1721 }
1722
1723 /* Input optimisation summary of EDGE. */
1724
1725 static void
1726 input_edge_opt_summary (struct cgraph_edge *edge,
1727 struct lto_input_block *ib_main)
1728 {
1729 HOST_WIDE_INT thunk_delta;
1730 thunk_delta = lto_input_sleb128 (ib_main);
1731 if (thunk_delta != 0)
1732 {
1733 gcc_assert (!edge->indirect_info);
1734 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1735 edge->indirect_info->thunk_delta = thunk_delta;
1736 }
1737 }
1738
1739 /* Input optimisation summary of NODE. */
1740
1741 static void
1742 input_node_opt_summary (struct cgraph_node *node,
1743 struct lto_input_block *ib_main,
1744 struct data_in *data_in)
1745 {
1746 int i;
1747 int count;
1748 int bit;
1749 struct bitpack_d bp;
1750 struct cgraph_edge *e;
1751
1752 count = lto_input_uleb128 (ib_main);
1753 if (count)
1754 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1755 for (i = 0; i < count; i++)
1756 {
1757 bit = lto_input_uleb128 (ib_main);
1758 bitmap_set_bit (node->clone.args_to_skip, bit);
1759 }
1760 count = lto_input_uleb128 (ib_main);
1761 if (count)
1762 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1763 for (i = 0; i < count; i++)
1764 {
1765 bit = lto_input_uleb128 (ib_main);
1766 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1767 }
1768 count = lto_input_uleb128 (ib_main);
1769 for (i = 0; i < count; i++)
1770 {
1771 int parm_num;
1772 tree parm;
1773 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1774
1775 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1776 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1777 parm = DECL_CHAIN (parm))
1778 parm_num --;
1779 map->parm_num = lto_input_uleb128 (ib_main);
1780 map->old_tree = NULL;
1781 map->new_tree = lto_input_tree (ib_main, data_in);
1782 bp = lto_input_bitpack (ib_main);
1783 map->replace_p = bp_unpack_value (&bp, 1);
1784 map->ref_p = bp_unpack_value (&bp, 1);
1785 }
1786 for (e = node->callees; e; e = e->next_callee)
1787 input_edge_opt_summary (e, ib_main);
1788 for (e = node->indirect_calls; e; e = e->next_callee)
1789 input_edge_opt_summary (e, ib_main);
1790 }
1791
1792 /* Read section in file FILE_DATA of length LEN with data DATA. */
1793
1794 static void
1795 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1796 const char *data, size_t len, VEC (cgraph_node_ptr,
1797 heap) * nodes)
1798 {
1799 const struct lto_function_header *header =
1800 (const struct lto_function_header *) data;
1801 const int32_t cfg_offset = sizeof (struct lto_function_header);
1802 const int32_t main_offset = cfg_offset + header->cfg_size;
1803 const int32_t string_offset = main_offset + header->main_size;
1804 struct data_in *data_in;
1805 struct lto_input_block ib_main;
1806 unsigned int i;
1807 unsigned int count;
1808
1809 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1810 header->main_size);
1811
1812 data_in =
1813 lto_data_in_create (file_data, (const char *) data + string_offset,
1814 header->string_size, NULL);
1815 count = lto_input_uleb128 (&ib_main);
1816
1817 for (i = 0; i < count; i++)
1818 {
1819 int ref = lto_input_uleb128 (&ib_main);
1820 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1821 &ib_main, data_in);
1822 }
1823 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1824 len);
1825 lto_data_in_delete (data_in);
1826 }
1827
1828 /* Input optimization summary of cgraph. */
1829
1830 static void
1831 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1832 {
1833 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1834 struct lto_file_decl_data *file_data;
1835 unsigned int j = 0;
1836
1837 while ((file_data = file_data_vec[j++]))
1838 {
1839 size_t len;
1840 const char *data =
1841 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1842 &len);
1843
1844 if (data)
1845 input_cgraph_opt_section (file_data, data, len, nodes);
1846 }
1847 }