diagnostic-core.h: New.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_cgraph_tags
57 {
58 /* Must leave 0 for the stopper. */
59
60 /* Cgraph node without body available. */
61 LTO_cgraph_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_cgraph_analyzed_node,
64 /* Cgraph edges. */
65 LTO_cgraph_edge,
66 LTO_cgraph_indirect_edge
67 };
68
69 /* Create a new cgraph encoder. */
70
71 lto_cgraph_encoder_t
72 lto_cgraph_encoder_new (void)
73 {
74 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
75 encoder->map = pointer_map_create ();
76 encoder->nodes = NULL;
77 encoder->body = pointer_set_create ();
78 return encoder;
79 }
80
81
82 /* Delete ENCODER and its components. */
83
84 void
85 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
86 {
87 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
88 pointer_map_destroy (encoder->map);
89 pointer_set_destroy (encoder->body);
90 free (encoder);
91 }
92
93
94 /* Return the existing reference number of NODE in the cgraph encoder in
95 output block OB. Assign a new reference if this is the first time
96 NODE is encoded. */
97
98 int
99 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
100 struct cgraph_node *node)
101 {
102 int ref;
103 void **slot;
104
105 slot = pointer_map_contains (encoder->map, node);
106 if (!slot)
107 {
108 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
109 slot = pointer_map_insert (encoder->map, node);
110 *slot = (void *) (intptr_t) ref;
111 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
112 }
113 else
114 ref = (int) (intptr_t) *slot;
115
116 return ref;
117 }
118
119 #define LCC_NOT_FOUND (-1)
120
121 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
122 or LCC_NOT_FOUND if it is not there. */
123
124 int
125 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
126 struct cgraph_node *node)
127 {
128 void **slot = pointer_map_contains (encoder->map, node);
129 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 }
131
132
133 /* Return the cgraph node corresponding to REF using ENCODER. */
134
135 struct cgraph_node *
136 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
137 {
138 if (ref == LCC_NOT_FOUND)
139 return NULL;
140
141 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 }
143
144
145 /* Return TRUE if we should encode initializer of NODE (if any). */
146
147 bool
148 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
149 struct cgraph_node *node)
150 {
151 return pointer_set_contains (encoder->body, node);
152 }
153
154 /* Return TRUE if we should encode body of NODE (if any). */
155
156 static void
157 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
158 struct cgraph_node *node)
159 {
160 pointer_set_insert (encoder->body, node);
161 }
162
163 /* Create a new varpool encoder. */
164
165 lto_varpool_encoder_t
166 lto_varpool_encoder_new (void)
167 {
168 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
169 encoder->map = pointer_map_create ();
170 encoder->initializer = pointer_set_create ();
171 encoder->nodes = NULL;
172 return encoder;
173 }
174
175
176 /* Delete ENCODER and its components. */
177
178 void
179 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
180 {
181 VEC_free (varpool_node_ptr, heap, encoder->nodes);
182 pointer_map_destroy (encoder->map);
183 pointer_set_destroy (encoder->initializer);
184 free (encoder);
185 }
186
187
188 /* Return the existing reference number of NODE in the varpool encoder in
189 output block OB. Assign a new reference if this is the first time
190 NODE is encoded. */
191
192 int
193 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
194 struct varpool_node *node)
195 {
196 int ref;
197 void **slot;
198
199 slot = pointer_map_contains (encoder->map, node);
200 if (!slot)
201 {
202 ref = VEC_length (varpool_node_ptr, encoder->nodes);
203 slot = pointer_map_insert (encoder->map, node);
204 *slot = (void *) (intptr_t) ref;
205 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
206 }
207 else
208 ref = (int) (intptr_t) *slot;
209
210 return ref;
211 }
212
213 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
214 or LCC_NOT_FOUND if it is not there. */
215
216 int
217 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
218 struct varpool_node *node)
219 {
220 void **slot = pointer_map_contains (encoder->map, node);
221 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 }
223
224
225 /* Return the varpool node corresponding to REF using ENCODER. */
226
227 struct varpool_node *
228 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
229 {
230 if (ref == LCC_NOT_FOUND)
231 return NULL;
232
233 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 }
235
236
237 /* Return number of encoded nodes in ENCODER. */
238
239 static int
240 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
241 {
242 return VEC_length (varpool_node_ptr, encoder->nodes);
243 }
244
245 /* Return TRUE if we should encode initializer of NODE (if any). */
246
247 bool
248 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
249 struct varpool_node *node)
250 {
251 return pointer_set_contains (encoder->initializer, node);
252 }
253
254 /* Return TRUE if we should encode initializer of NODE (if any). */
255
256 static void
257 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
258 struct varpool_node *node)
259 {
260 pointer_set_insert (encoder->initializer, node);
261 }
262
263 /* Output the cgraph EDGE to OB using ENCODER. */
264
265 static void
266 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
267 lto_cgraph_encoder_t encoder)
268 {
269 unsigned int uid;
270 intptr_t ref;
271 struct bitpack_d *bp;
272
273 if (edge->indirect_unknown_callee)
274 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
275 else
276 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
277
278 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
279 gcc_assert (ref != LCC_NOT_FOUND);
280 lto_output_sleb128_stream (ob->main_stream, ref);
281
282 if (!edge->indirect_unknown_callee)
283 {
284 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
285 gcc_assert (ref != LCC_NOT_FOUND);
286 lto_output_sleb128_stream (ob->main_stream, ref);
287 }
288
289 lto_output_sleb128_stream (ob->main_stream, edge->count);
290
291 bp = bitpack_create ();
292 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
293 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
294 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
295 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
296 bp_pack_value (bp, edge->loop_nest, 30);
297 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
298 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
299 bp_pack_value (bp, edge->can_throw_external, 1);
300 if (edge->indirect_unknown_callee)
301 {
302 int flags = edge->indirect_info->ecf_flags;
303 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
304 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
305 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
306 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
307 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
308 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
309 /* Flags that should not appear on indirect calls. */
310 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
311 | ECF_MAY_BE_ALLOCA
312 | ECF_SIBCALL
313 | ECF_NOVOPS)));
314 }
315 lto_output_bitpack (ob->main_stream, bp);
316 bitpack_delete (bp);
317 }
318
319 /* Return if LIST contain references from other partitions. */
320
321 bool
322 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
323 varpool_node_set vset)
324 {
325 int i;
326 struct ipa_ref *ref;
327 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
328 {
329 if (ref->refering_type == IPA_REF_CGRAPH)
330 {
331 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
332 return true;
333 }
334 else
335 {
336 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
337 vset))
338 return true;
339 }
340 }
341 return false;
342 }
343
344 /* Return true when node is reachable from other partition. */
345
346 bool
347 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
348 {
349 struct cgraph_edge *e;
350 if (!node->analyzed)
351 return false;
352 if (node->global.inlined_to)
353 return false;
354 for (e = node->callers; e; e = e->next_caller)
355 if (!cgraph_node_in_set_p (e->caller, set))
356 return true;
357 return false;
358 }
359
360 /* Return if LIST contain references from other partitions. */
361
362 bool
363 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
364 varpool_node_set vset)
365 {
366 int i;
367 struct ipa_ref *ref;
368 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
369 {
370 if (ref->refering_type == IPA_REF_CGRAPH)
371 {
372 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
373 return true;
374 }
375 else
376 {
377 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
378 vset))
379 return true;
380 }
381 }
382 return false;
383 }
384
385 /* Return true when node is reachable from other partition. */
386
387 bool
388 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
389 {
390 struct cgraph_edge *e;
391 if (!node->analyzed)
392 return false;
393 if (node->global.inlined_to)
394 return false;
395 for (e = node->callers; e; e = e->next_caller)
396 if (cgraph_node_in_set_p (e->caller, set))
397 return true;
398 return false;
399 }
400
401 /* Output the cgraph NODE to OB. ENCODER is used to find the
402 reference number of NODE->inlined_to. SET is the set of nodes we
403 are writing to the current file. If NODE is not in SET, then NODE
404 is a boundary of a cgraph_node_set and we pretend NODE just has a
405 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
406 that have had their callgraph node written so far. This is used to
407 determine if NODE is a clone of a previously written node. */
408
409 static void
410 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
411 lto_cgraph_encoder_t encoder, cgraph_node_set set,
412 varpool_node_set vset)
413 {
414 unsigned int tag;
415 struct bitpack_d *bp;
416 bool boundary_p;
417 intptr_t ref;
418 bool in_other_partition = false;
419 struct cgraph_node *clone_of;
420
421 boundary_p = !cgraph_node_in_set_p (node, set);
422
423 if (node->analyzed && !boundary_p)
424 tag = LTO_cgraph_analyzed_node;
425 else
426 tag = LTO_cgraph_unavail_node;
427
428 lto_output_uleb128_stream (ob->main_stream, tag);
429
430 /* In WPA mode, we only output part of the call-graph. Also, we
431 fake cgraph node attributes. There are two cases that we care.
432
433 Boundary nodes: There are nodes that are not part of SET but are
434 called from within SET. We artificially make them look like
435 externally visible nodes with no function body.
436
437 Cherry-picked nodes: These are nodes we pulled from other
438 translation units into SET during IPA-inlining. We make them as
439 local static nodes to prevent clashes with other local statics. */
440 if (boundary_p && node->analyzed)
441 {
442 /* Inline clones can not be part of boundary.
443 gcc_assert (!node->global.inlined_to);
444
445 FIXME: At the moment they can be, when partition contains an inline
446 clone that is clone of inline clone from outside partition. We can
447 reshape the clone tree and make other tree to be the root, but it
448 needs a bit extra work and will be promplty done by cgraph_remove_node
449 after reading back. */
450 in_other_partition = 1;
451 }
452
453 clone_of = node->clone_of;
454 while (clone_of
455 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
456 if (clone_of->prev_sibling_clone)
457 clone_of = clone_of->prev_sibling_clone;
458 else
459 clone_of = clone_of->clone_of;
460 if (!clone_of)
461 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
462 else
463 lto_output_sleb128_stream (ob->main_stream, ref);
464
465
466 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
467 lto_output_sleb128_stream (ob->main_stream, node->count);
468
469 bp = bitpack_create ();
470 bp_pack_value (bp, node->local.local, 1);
471 bp_pack_value (bp, node->local.externally_visible, 1);
472 bp_pack_value (bp, node->local.finalized, 1);
473 bp_pack_value (bp, node->local.inlinable, 1);
474 bp_pack_value (bp, node->local.versionable, 1);
475 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
476 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
477 bp_pack_value (bp, node->local.vtable_method, 1);
478 bp_pack_value (bp, node->needed, 1);
479 bp_pack_value (bp, node->address_taken, 1);
480 bp_pack_value (bp, node->abstract_and_needed, 1);
481 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
482 && !DECL_EXTERNAL (node->decl)
483 && (reachable_from_other_partition_p (node, set)
484 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
485 bp_pack_value (bp, node->lowered, 1);
486 bp_pack_value (bp, in_other_partition, 1);
487 bp_pack_value (bp, node->alias, 1);
488 bp_pack_value (bp, node->finalized_by_frontend, 1);
489 bp_pack_value (bp, node->frequency, 2);
490 lto_output_bitpack (ob->main_stream, bp);
491 bitpack_delete (bp);
492
493 if (tag == LTO_cgraph_analyzed_node)
494 {
495 lto_output_sleb128_stream (ob->main_stream,
496 node->local.inline_summary.estimated_self_stack_size);
497 lto_output_sleb128_stream (ob->main_stream,
498 node->local.inline_summary.self_size);
499 lto_output_sleb128_stream (ob->main_stream,
500 node->local.inline_summary.size_inlining_benefit);
501 lto_output_sleb128_stream (ob->main_stream,
502 node->local.inline_summary.self_time);
503 lto_output_sleb128_stream (ob->main_stream,
504 node->local.inline_summary.time_inlining_benefit);
505 if (node->global.inlined_to)
506 {
507 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
508 gcc_assert (ref != LCC_NOT_FOUND);
509 }
510 else
511 ref = LCC_NOT_FOUND;
512
513 lto_output_sleb128_stream (ob->main_stream, ref);
514 }
515
516 if (node->same_comdat_group && !boundary_p)
517 {
518 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
519 gcc_assert (ref != LCC_NOT_FOUND);
520 }
521 else
522 ref = LCC_NOT_FOUND;
523 lto_output_sleb128_stream (ob->main_stream, ref);
524
525 if (node->same_body)
526 {
527 struct cgraph_node *alias;
528 unsigned long alias_count = 1;
529 for (alias = node->same_body; alias->next; alias = alias->next)
530 alias_count++;
531 lto_output_uleb128_stream (ob->main_stream, alias_count);
532 do
533 {
534 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
535 alias->decl);
536 if (alias->thunk.thunk_p)
537 {
538 lto_output_uleb128_stream
539 (ob->main_stream,
540 1 + (alias->thunk.this_adjusting != 0) * 2
541 + (alias->thunk.virtual_offset_p != 0) * 4);
542 lto_output_uleb128_stream (ob->main_stream,
543 alias->thunk.fixed_offset);
544 lto_output_uleb128_stream (ob->main_stream,
545 alias->thunk.virtual_value);
546 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
547 alias->thunk.alias);
548 }
549 else
550 {
551 lto_output_uleb128_stream (ob->main_stream, 0);
552 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
553 alias->thunk.alias);
554 }
555 alias = alias->previous;
556 }
557 while (alias);
558 }
559 else
560 lto_output_uleb128_stream (ob->main_stream, 0);
561 }
562
563 /* Output the varpool NODE to OB.
564 If NODE is not in SET, then NODE is a boundary. */
565
566 static void
567 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
568 lto_varpool_encoder_t varpool_encoder,
569 cgraph_node_set set, varpool_node_set vset)
570 {
571 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
572 struct bitpack_d *bp;
573 struct varpool_node *alias;
574 int count = 0;
575 int ref;
576
577 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
578 bp = bitpack_create ();
579 bp_pack_value (bp, node->externally_visible, 1);
580 bp_pack_value (bp, node->force_output, 1);
581 bp_pack_value (bp, node->finalized, 1);
582 bp_pack_value (bp, node->alias, 1);
583 gcc_assert (!node->alias || !node->extra_name);
584 gcc_assert (node->finalized || !node->analyzed);
585 gcc_assert (node->needed);
586 /* Constant pool initializers can be de-unified into individual ltrans units.
587 FIXME: Alternatively at -Os we may want to avoid generating for them the local
588 labels and share them across LTRANS partitions. */
589 if (DECL_IN_CONSTANT_POOL (node->decl))
590 {
591 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
592 bp_pack_value (bp, 0, 1); /* in_other_partition. */
593 }
594 else
595 {
596 bp_pack_value (bp, node->analyzed
597 && referenced_from_other_partition_p (&node->ref_list,
598 set, vset), 1);
599 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
600 }
601 /* Also emit any extra name aliases. */
602 for (alias = node->extra_name; alias; alias = alias->next)
603 count++;
604 bp_pack_value (bp, count != 0, 1);
605 lto_output_bitpack (ob->main_stream, bp);
606 bitpack_delete (bp);
607 if (node->same_comdat_group && !boundary_p)
608 {
609 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
610 gcc_assert (ref != LCC_NOT_FOUND);
611 }
612 else
613 ref = LCC_NOT_FOUND;
614 lto_output_sleb128_stream (ob->main_stream, ref);
615
616 if (count)
617 {
618 lto_output_uleb128_stream (ob->main_stream, count);
619 for (alias = node->extra_name; alias; alias = alias->next)
620 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
621 }
622 }
623
624 /* Output the varpool NODE to OB.
625 If NODE is not in SET, then NODE is a boundary. */
626
627 static void
628 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
629 lto_cgraph_encoder_t encoder,
630 lto_varpool_encoder_t varpool_encoder)
631 {
632 struct bitpack_d *bp = bitpack_create ();
633 bp_pack_value (bp, ref->refered_type, 1);
634 bp_pack_value (bp, ref->use, 2);
635 lto_output_bitpack (ob->main_stream, bp);
636 bitpack_delete (bp);
637 if (ref->refered_type == IPA_REF_CGRAPH)
638 {
639 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
640 gcc_assert (nref != LCC_NOT_FOUND);
641 lto_output_sleb128_stream (ob->main_stream, nref);
642 }
643 else
644 {
645 int nref = lto_varpool_encoder_lookup (varpool_encoder,
646 ipa_ref_varpool_node (ref));
647 gcc_assert (nref != LCC_NOT_FOUND);
648 lto_output_sleb128_stream (ob->main_stream, nref);
649 }
650 }
651
652 /* Stream out profile_summary to OB. */
653
654 static void
655 output_profile_summary (struct lto_simple_output_block *ob)
656 {
657 if (profile_info)
658 {
659 /* We do not output num, it is not terribly useful. */
660 gcc_assert (profile_info->runs);
661 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
662 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
663 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
664 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
665 }
666 else
667 lto_output_uleb128_stream (ob->main_stream, 0);
668 }
669
670 /* Add NODE into encoder as well as nodes it is cloned from.
671 Do it in a way so clones appear first. */
672
673 static void
674 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
675 bool include_body)
676 {
677 if (node->clone_of)
678 add_node_to (encoder, node->clone_of, include_body);
679 else if (include_body)
680 lto_set_cgraph_encoder_encode_body (encoder, node);
681 lto_cgraph_encoder_encode (encoder, node);
682 }
683
684 /* Add all references in LIST to encoders. */
685
686 static void
687 add_references (lto_cgraph_encoder_t encoder,
688 lto_varpool_encoder_t varpool_encoder,
689 struct ipa_ref_list *list)
690 {
691 int i;
692 struct ipa_ref *ref;
693 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
694 if (ref->refered_type == IPA_REF_CGRAPH)
695 add_node_to (encoder, ipa_ref_node (ref), false);
696 else
697 {
698 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
699 lto_varpool_encoder_encode (varpool_encoder, vnode);
700 }
701 }
702
703 /* Output all callees or indirect outgoing edges. EDGE must be the first such
704 edge. */
705
706 static void
707 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
708 struct lto_simple_output_block *ob,
709 lto_cgraph_encoder_t encoder)
710 {
711 if (!edge)
712 return;
713
714 /* Output edges in backward direction, so the reconstructed callgraph match
715 and it is easy to associate call sites in the IPA pass summaries. */
716 while (edge->next_callee)
717 edge = edge->next_callee;
718 for (; edge; edge = edge->prev_callee)
719 lto_output_edge (ob, edge, encoder);
720 }
721
722 /* Output the part of the cgraph in SET. */
723
724 static void
725 output_refs (cgraph_node_set set, varpool_node_set vset,
726 lto_cgraph_encoder_t encoder,
727 lto_varpool_encoder_t varpool_encoder)
728 {
729 cgraph_node_set_iterator csi;
730 varpool_node_set_iterator vsi;
731 struct lto_simple_output_block *ob;
732 int count;
733 struct ipa_ref *ref;
734 int i;
735
736 ob = lto_create_simple_output_block (LTO_section_refs);
737
738 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
739 {
740 struct cgraph_node *node = csi_node (csi);
741
742 count = ipa_ref_list_nreferences (&node->ref_list);
743 if (count)
744 {
745 lto_output_uleb128_stream (ob->main_stream, count);
746 lto_output_uleb128_stream (ob->main_stream,
747 lto_cgraph_encoder_lookup (encoder, node));
748 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
749 lto_output_ref (ob, ref, encoder, varpool_encoder);
750 }
751 }
752
753 lto_output_uleb128_stream (ob->main_stream, 0);
754
755 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
756 {
757 struct varpool_node *node = vsi_node (vsi);
758
759 count = ipa_ref_list_nreferences (&node->ref_list);
760 if (count)
761 {
762 lto_output_uleb128_stream (ob->main_stream, count);
763 lto_output_uleb128_stream (ob->main_stream,
764 lto_varpool_encoder_lookup (varpool_encoder,
765 node));
766 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
767 lto_output_ref (ob, ref, encoder, varpool_encoder);
768 }
769 }
770
771 lto_output_uleb128_stream (ob->main_stream, 0);
772
773 lto_destroy_simple_output_block (ob);
774 }
775
776 /* Find out all cgraph and varpool nodes we want to encode in current unit
777 and insert them to encoders. */
778 void
779 compute_ltrans_boundary (struct lto_out_decl_state *state,
780 cgraph_node_set set, varpool_node_set vset)
781 {
782 struct cgraph_node *node;
783 cgraph_node_set_iterator csi;
784 varpool_node_set_iterator vsi;
785 struct cgraph_edge *edge;
786 int i;
787 lto_cgraph_encoder_t encoder;
788 lto_varpool_encoder_t varpool_encoder;
789
790 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
791 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
792
793 /* Go over all the nodes in SET and assign references. */
794 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
795 {
796 node = csi_node (csi);
797 add_node_to (encoder, node, true);
798 add_references (encoder, varpool_encoder, &node->ref_list);
799 }
800 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
801 {
802 struct varpool_node *vnode = vsi_node (vsi);
803 gcc_assert (!vnode->alias);
804 lto_varpool_encoder_encode (varpool_encoder, vnode);
805 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
806 add_references (encoder, varpool_encoder, &vnode->ref_list);
807 }
808 /* Pickle in also the initializer of all referenced readonly variables
809 to help folding. Constant pool variables are not shared, so we must
810 pickle those too. */
811 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
812 {
813 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
814 if (DECL_INITIAL (vnode->decl)
815 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
816 vnode)
817 && (DECL_IN_CONSTANT_POOL (vnode->decl)
818 || TREE_READONLY (vnode->decl)))
819 {
820 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
821 add_references (encoder, varpool_encoder, &vnode->ref_list);
822 }
823 }
824
825 /* Go over all the nodes again to include callees that are not in
826 SET. */
827 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
828 {
829 node = csi_node (csi);
830 for (edge = node->callees; edge; edge = edge->next_callee)
831 {
832 struct cgraph_node *callee = edge->callee;
833 if (!cgraph_node_in_set_p (callee, set))
834 {
835 /* We should have moved all the inlines. */
836 gcc_assert (!callee->global.inlined_to);
837 add_node_to (encoder, callee, false);
838 }
839 }
840 }
841 }
842
843 /* Output the part of the cgraph in SET. */
844
845 void
846 output_cgraph (cgraph_node_set set, varpool_node_set vset)
847 {
848 struct cgraph_node *node;
849 struct lto_simple_output_block *ob;
850 cgraph_node_set_iterator csi;
851 int i, n_nodes;
852 lto_cgraph_encoder_t encoder;
853 lto_varpool_encoder_t varpool_encoder;
854 struct cgraph_asm_node *can;
855
856 if (flag_wpa)
857 output_cgraph_opt_summary ();
858
859 ob = lto_create_simple_output_block (LTO_section_cgraph);
860
861 output_profile_summary (ob);
862
863 /* An encoder for cgraph nodes should have been created by
864 ipa_write_summaries_1. */
865 gcc_assert (ob->decl_state->cgraph_node_encoder);
866 gcc_assert (ob->decl_state->varpool_node_encoder);
867 encoder = ob->decl_state->cgraph_node_encoder;
868 varpool_encoder = ob->decl_state->varpool_node_encoder;
869
870 /* Write out the nodes. We must first output a node and then its clones,
871 otherwise at a time reading back the node there would be nothing to clone
872 from. */
873 n_nodes = lto_cgraph_encoder_size (encoder);
874 for (i = 0; i < n_nodes; i++)
875 {
876 node = lto_cgraph_encoder_deref (encoder, i);
877 lto_output_node (ob, node, encoder, set, vset);
878 }
879
880 /* Go over the nodes in SET again to write edges. */
881 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
882 {
883 node = csi_node (csi);
884 output_outgoing_cgraph_edges (node->callees, ob, encoder);
885 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
886 }
887
888 lto_output_uleb128_stream (ob->main_stream, 0);
889
890 /* Emit toplevel asms. */
891 for (can = cgraph_asm_nodes; can; can = can->next)
892 {
893 int len = TREE_STRING_LENGTH (can->asm_str);
894 lto_output_uleb128_stream (ob->main_stream, len);
895 for (i = 0; i < len; ++i)
896 lto_output_1_stream (ob->main_stream,
897 TREE_STRING_POINTER (can->asm_str)[i]);
898 }
899
900 lto_output_uleb128_stream (ob->main_stream, 0);
901
902 lto_destroy_simple_output_block (ob);
903 output_varpool (set, vset);
904 output_refs (set, vset, encoder, varpool_encoder);
905 }
906
907 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
908 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
909 NODE or to replace the values in it, for instance because the first
910 time we saw it, the function body was not available but now it
911 is. BP is a bitpack with all the bitflags for NODE read from the
912 stream. */
913
914 static void
915 input_overwrite_node (struct lto_file_decl_data *file_data,
916 struct cgraph_node *node,
917 enum LTO_cgraph_tags tag,
918 struct bitpack_d *bp,
919 unsigned int stack_size,
920 unsigned int self_time,
921 unsigned int time_inlining_benefit,
922 unsigned int self_size,
923 unsigned int size_inlining_benefit)
924 {
925 node->aux = (void *) tag;
926 node->local.inline_summary.estimated_self_stack_size = stack_size;
927 node->local.inline_summary.self_time = self_time;
928 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
929 node->local.inline_summary.self_size = self_size;
930 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
931 node->global.time = self_time;
932 node->global.size = self_size;
933 node->global.estimated_stack_size = stack_size;
934 node->global.estimated_growth = INT_MIN;
935 node->local.lto_file_data = file_data;
936
937 node->local.local = bp_unpack_value (bp, 1);
938 node->local.externally_visible = bp_unpack_value (bp, 1);
939 node->local.finalized = bp_unpack_value (bp, 1);
940 node->local.inlinable = bp_unpack_value (bp, 1);
941 node->local.versionable = bp_unpack_value (bp, 1);
942 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
943 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
944 node->local.vtable_method = bp_unpack_value (bp, 1);
945 node->needed = bp_unpack_value (bp, 1);
946 node->address_taken = bp_unpack_value (bp, 1);
947 node->abstract_and_needed = bp_unpack_value (bp, 1);
948 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
949 node->lowered = bp_unpack_value (bp, 1);
950 node->analyzed = tag == LTO_cgraph_analyzed_node;
951 node->in_other_partition = bp_unpack_value (bp, 1);
952 node->alias = bp_unpack_value (bp, 1);
953 node->finalized_by_frontend = bp_unpack_value (bp, 1);
954 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
955 }
956
957 /* Output the part of the cgraph in SET. */
958
959 static void
960 output_varpool (cgraph_node_set set, varpool_node_set vset)
961 {
962 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
963 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
964 int len = lto_varpool_encoder_size (varpool_encoder), i;
965
966 lto_output_uleb128_stream (ob->main_stream, len);
967
968 /* Write out the nodes. We must first output a node and then its clones,
969 otherwise at a time reading back the node there would be nothing to clone
970 from. */
971 for (i = 0; i < len; i++)
972 {
973 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
974 varpool_encoder,
975 set, vset);
976 }
977
978 lto_destroy_simple_output_block (ob);
979 }
980
981 /* Read a node from input_block IB. TAG is the node's tag just read.
982 Return the node read or overwriten. */
983
984 static struct cgraph_node *
985 input_node (struct lto_file_decl_data *file_data,
986 struct lto_input_block *ib,
987 enum LTO_cgraph_tags tag,
988 VEC(cgraph_node_ptr, heap) *nodes)
989 {
990 tree fn_decl;
991 struct cgraph_node *node;
992 struct bitpack_d *bp;
993 int stack_size = 0;
994 unsigned decl_index;
995 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
996 int self_time = 0;
997 int self_size = 0;
998 int time_inlining_benefit = 0;
999 int size_inlining_benefit = 0;
1000 unsigned long same_body_count = 0;
1001 int clone_ref;
1002
1003 clone_ref = lto_input_sleb128 (ib);
1004
1005 decl_index = lto_input_uleb128 (ib);
1006 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1007
1008 if (clone_ref != LCC_NOT_FOUND)
1009 {
1010 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1011 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1012 }
1013 else
1014 node = cgraph_node (fn_decl);
1015
1016 node->count = lto_input_sleb128 (ib);
1017 bp = lto_input_bitpack (ib);
1018
1019 if (tag == LTO_cgraph_analyzed_node)
1020 {
1021 stack_size = lto_input_sleb128 (ib);
1022 self_size = lto_input_sleb128 (ib);
1023 size_inlining_benefit = lto_input_sleb128 (ib);
1024 self_time = lto_input_sleb128 (ib);
1025 time_inlining_benefit = lto_input_sleb128 (ib);
1026
1027 ref = lto_input_sleb128 (ib);
1028 }
1029
1030 ref2 = lto_input_sleb128 (ib);
1031 same_body_count = lto_input_uleb128 (ib);
1032
1033 /* Make sure that we have not read this node before. Nodes that
1034 have already been read will have their tag stored in the 'aux'
1035 field. Since built-in functions can be referenced in multiple
1036 functions, they are expected to be read more than once. */
1037 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1038 internal_error ("bytecode stream: found multiple instances of cgraph "
1039 "node %d", node->uid);
1040
1041 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
1042 time_inlining_benefit, self_size,
1043 size_inlining_benefit);
1044 bitpack_delete (bp);
1045
1046 /* Store a reference for now, and fix up later to be a pointer. */
1047 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1048
1049 /* Store a reference for now, and fix up later to be a pointer. */
1050 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1051
1052 while (same_body_count-- > 0)
1053 {
1054 tree alias_decl;
1055 int type;
1056 decl_index = lto_input_uleb128 (ib);
1057 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1058 type = lto_input_uleb128 (ib);
1059 if (!type)
1060 {
1061 tree real_alias;
1062 decl_index = lto_input_uleb128 (ib);
1063 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1064 cgraph_same_body_alias (alias_decl, real_alias);
1065 }
1066 else
1067 {
1068 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1069 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1070 tree real_alias;
1071 decl_index = lto_input_uleb128 (ib);
1072 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1073 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1074 virtual_value,
1075 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1076 real_alias);
1077 }
1078 }
1079 return node;
1080 }
1081
1082 /* Read a node from input_block IB. TAG is the node's tag just read.
1083 Return the node read or overwriten. */
1084
1085 static struct varpool_node *
1086 input_varpool_node (struct lto_file_decl_data *file_data,
1087 struct lto_input_block *ib)
1088 {
1089 int decl_index;
1090 tree var_decl;
1091 struct varpool_node *node;
1092 struct bitpack_d *bp;
1093 bool aliases_p;
1094 int count;
1095 int ref = LCC_NOT_FOUND;
1096
1097 decl_index = lto_input_uleb128 (ib);
1098 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1099 node = varpool_node (var_decl);
1100
1101 bp = lto_input_bitpack (ib);
1102 node->externally_visible = bp_unpack_value (bp, 1);
1103 node->force_output = bp_unpack_value (bp, 1);
1104 node->finalized = bp_unpack_value (bp, 1);
1105 node->alias = bp_unpack_value (bp, 1);
1106 node->analyzed = node->finalized;
1107 node->used_from_other_partition = bp_unpack_value (bp, 1);
1108 node->in_other_partition = bp_unpack_value (bp, 1);
1109 aliases_p = bp_unpack_value (bp, 1);
1110 if (node->finalized)
1111 varpool_mark_needed_node (node);
1112 bitpack_delete (bp);
1113 ref = lto_input_sleb128 (ib);
1114 /* Store a reference for now, and fix up later to be a pointer. */
1115 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1116 if (aliases_p)
1117 {
1118 count = lto_input_uleb128 (ib);
1119 for (; count > 0; count --)
1120 {
1121 tree decl = lto_file_decl_data_get_var_decl (file_data,
1122 lto_input_uleb128 (ib));
1123 varpool_extra_name_alias (decl, var_decl);
1124 }
1125 }
1126 return node;
1127 }
1128
1129 /* Read a node from input_block IB. TAG is the node's tag just read.
1130 Return the node read or overwriten. */
1131
1132 static void
1133 input_ref (struct lto_input_block *ib,
1134 struct cgraph_node *refering_node,
1135 struct varpool_node *refering_varpool_node,
1136 VEC(cgraph_node_ptr, heap) *nodes,
1137 VEC(varpool_node_ptr, heap) *varpool_nodes)
1138 {
1139 struct cgraph_node *node = NULL;
1140 struct varpool_node *varpool_node = NULL;
1141 struct bitpack_d *bp;
1142 enum ipa_ref_type type;
1143 enum ipa_ref_use use;
1144
1145 bp = lto_input_bitpack (ib);
1146 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1147 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1148 bitpack_delete (bp);
1149 if (type == IPA_REF_CGRAPH)
1150 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1151 else
1152 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1153 ipa_record_reference (refering_node, refering_varpool_node,
1154 node, varpool_node, use, NULL);
1155 }
1156
1157 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1158 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1159 edge being read is indirect (in the sense that it has
1160 indirect_unknown_callee set). */
1161
1162 static void
1163 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1164 bool indirect)
1165 {
1166 struct cgraph_node *caller, *callee;
1167 struct cgraph_edge *edge;
1168 unsigned int stmt_id;
1169 gcov_type count;
1170 int freq;
1171 unsigned int nest;
1172 cgraph_inline_failed_t inline_failed;
1173 struct bitpack_d *bp;
1174 enum ld_plugin_symbol_resolution caller_resolution;
1175 int ecf_flags = 0;
1176
1177 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1178 if (caller == NULL || caller->decl == NULL_TREE)
1179 internal_error ("bytecode stream: no caller found while reading edge");
1180
1181 if (!indirect)
1182 {
1183 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1184 if (callee == NULL || callee->decl == NULL_TREE)
1185 internal_error ("bytecode stream: no callee found while reading edge");
1186 }
1187 else
1188 callee = NULL;
1189
1190 count = (gcov_type) lto_input_sleb128 (ib);
1191
1192 bp = lto_input_bitpack (ib);
1193 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1194 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1195 HOST_BITS_PER_INT);
1196 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1197 nest = (unsigned) bp_unpack_value (bp, 30);
1198
1199 /* If the caller was preempted, don't create the edge.
1200 ??? Should we ever have edges from a preempted caller? */
1201 caller_resolution = lto_symtab_get_resolution (caller->decl);
1202 if (caller_resolution == LDPR_PREEMPTED_REG
1203 || caller_resolution == LDPR_PREEMPTED_IR)
1204 return;
1205
1206 if (indirect)
1207 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1208 else
1209 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1210
1211 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1212 edge->lto_stmt_uid = stmt_id;
1213 edge->inline_failed = inline_failed;
1214 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1215 edge->can_throw_external = bp_unpack_value (bp, 1);
1216 if (indirect)
1217 {
1218 if (bp_unpack_value (bp, 1))
1219 ecf_flags |= ECF_CONST;
1220 if (bp_unpack_value (bp, 1))
1221 ecf_flags |= ECF_PURE;
1222 if (bp_unpack_value (bp, 1))
1223 ecf_flags |= ECF_NORETURN;
1224 if (bp_unpack_value (bp, 1))
1225 ecf_flags |= ECF_MALLOC;
1226 if (bp_unpack_value (bp, 1))
1227 ecf_flags |= ECF_NOTHROW;
1228 if (bp_unpack_value (bp, 1))
1229 ecf_flags |= ECF_RETURNS_TWICE;
1230 edge->indirect_info->ecf_flags = ecf_flags;
1231 }
1232 bitpack_delete (bp);
1233 }
1234
1235
1236 /* Read a cgraph from IB using the info in FILE_DATA. */
1237
1238 static VEC(cgraph_node_ptr, heap) *
1239 input_cgraph_1 (struct lto_file_decl_data *file_data,
1240 struct lto_input_block *ib)
1241 {
1242 enum LTO_cgraph_tags tag;
1243 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1244 struct cgraph_node *node;
1245 unsigned i;
1246 unsigned HOST_WIDE_INT len;
1247
1248 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1249 while (tag)
1250 {
1251 if (tag == LTO_cgraph_edge)
1252 input_edge (ib, nodes, false);
1253 else if (tag == LTO_cgraph_indirect_edge)
1254 input_edge (ib, nodes, true);
1255 else
1256 {
1257 node = input_node (file_data, ib, tag,nodes);
1258 if (node == NULL || node->decl == NULL_TREE)
1259 internal_error ("bytecode stream: found empty cgraph node");
1260 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1261 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1262 }
1263
1264 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1265 }
1266
1267 /* Input toplevel asms. */
1268 len = lto_input_uleb128 (ib);
1269 while (len)
1270 {
1271 char *str = (char *)xmalloc (len + 1);
1272 for (i = 0; i < len; ++i)
1273 str[i] = lto_input_1_unsigned (ib);
1274 cgraph_add_asm_node (build_string (len, str));
1275 free (str);
1276
1277 len = lto_input_uleb128 (ib);
1278 }
1279
1280 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1281 {
1282 int ref = (int) (intptr_t) node->global.inlined_to;
1283
1284 /* Fixup inlined_to from reference to pointer. */
1285 if (ref != LCC_NOT_FOUND)
1286 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1287 else
1288 node->global.inlined_to = NULL;
1289
1290 ref = (int) (intptr_t) node->same_comdat_group;
1291
1292 /* Fixup same_comdat_group from reference to pointer. */
1293 if (ref != LCC_NOT_FOUND)
1294 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1295 else
1296 node->same_comdat_group = NULL;
1297 }
1298 return nodes;
1299 }
1300
1301 /* Read a varpool from IB using the info in FILE_DATA. */
1302
1303 static VEC(varpool_node_ptr, heap) *
1304 input_varpool_1 (struct lto_file_decl_data *file_data,
1305 struct lto_input_block *ib)
1306 {
1307 unsigned HOST_WIDE_INT len;
1308 VEC(varpool_node_ptr, heap) *varpool = NULL;
1309 int i;
1310 struct varpool_node *node;
1311
1312 len = lto_input_uleb128 (ib);
1313 while (len)
1314 {
1315 VEC_safe_push (varpool_node_ptr, heap, varpool,
1316 input_varpool_node (file_data, ib));
1317 len--;
1318 }
1319 for (i = 0; VEC_iterate (varpool_node_ptr, varpool, i, node); i++)
1320 {
1321 int ref = (int) (intptr_t) node->same_comdat_group;
1322
1323 /* Fixup same_comdat_group from reference to pointer. */
1324 if (ref != LCC_NOT_FOUND)
1325 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1326 else
1327 node->same_comdat_group = NULL;
1328 }
1329 return varpool;
1330 }
1331
1332 /* Input ipa_refs. */
1333
1334 static void
1335 input_refs (struct lto_input_block *ib,
1336 VEC(cgraph_node_ptr, heap) *nodes,
1337 VEC(varpool_node_ptr, heap) *varpool)
1338 {
1339 int count;
1340 int idx;
1341 while (true)
1342 {
1343 struct cgraph_node *node;
1344 count = lto_input_uleb128 (ib);
1345 if (!count)
1346 break;
1347 idx = lto_input_uleb128 (ib);
1348 node = VEC_index (cgraph_node_ptr, nodes, idx);
1349 while (count)
1350 {
1351 input_ref (ib, node, NULL, nodes, varpool);
1352 count--;
1353 }
1354 }
1355 while (true)
1356 {
1357 struct varpool_node *node;
1358 count = lto_input_uleb128 (ib);
1359 if (!count)
1360 break;
1361 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1362 while (count)
1363 {
1364 input_ref (ib, NULL, node, nodes, varpool);
1365 count--;
1366 }
1367 }
1368 }
1369
1370
1371 static struct gcov_ctr_summary lto_gcov_summary;
1372
1373 /* Input profile_info from IB. */
1374 static void
1375 input_profile_summary (struct lto_input_block *ib)
1376 {
1377 unsigned int runs = lto_input_uleb128 (ib);
1378 if (runs)
1379 {
1380 if (!profile_info)
1381 {
1382 profile_info = &lto_gcov_summary;
1383 lto_gcov_summary.runs = runs;
1384 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1385 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1386 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1387 }
1388 /* We can support this by scaling all counts to nearest common multiple
1389 of all different runs, but it is perhaps not worth the effort. */
1390 else if (profile_info->runs != runs
1391 || profile_info->sum_all != lto_input_sleb128 (ib)
1392 || profile_info->run_max != lto_input_sleb128 (ib)
1393 || profile_info->sum_max != lto_input_sleb128 (ib))
1394 sorry ("Combining units with different profiles is not supported.");
1395 /* We allow some units to have profile and other to not have one. This will
1396 just make unprofiled units to be size optimized that is sane. */
1397 }
1398
1399 }
1400
1401 /* Input and merge the cgraph from each of the .o files passed to
1402 lto1. */
1403
1404 void
1405 input_cgraph (void)
1406 {
1407 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1408 struct lto_file_decl_data *file_data;
1409 unsigned int j = 0;
1410 struct cgraph_node *node;
1411
1412 while ((file_data = file_data_vec[j++]))
1413 {
1414 const char *data;
1415 size_t len;
1416 struct lto_input_block *ib;
1417 VEC(cgraph_node_ptr, heap) *nodes;
1418 VEC(varpool_node_ptr, heap) *varpool;
1419
1420 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1421 &data, &len);
1422 input_profile_summary (ib);
1423 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1424 nodes = input_cgraph_1 (file_data, ib);
1425 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1426 ib, data, len);
1427
1428 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1429 &data, &len);
1430 varpool = input_varpool_1 (file_data, ib);
1431 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1432 ib, data, len);
1433
1434 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1435 &data, &len);
1436 input_refs (ib, nodes, varpool);
1437 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1438 ib, data, len);
1439 if (flag_ltrans)
1440 input_cgraph_opt_summary (nodes);
1441 VEC_free (cgraph_node_ptr, heap, nodes);
1442 VEC_free (varpool_node_ptr, heap, varpool);
1443 }
1444
1445 /* Clear out the aux field that was used to store enough state to
1446 tell which nodes should be overwritten. */
1447 for (node = cgraph_nodes; node; node = node->next)
1448 {
1449 /* Some nodes may have been created by cgraph_node. This
1450 happens when the callgraph contains nested functions. If the
1451 node for the parent function was never emitted to the gimple
1452 file, cgraph_node will create a node for it when setting the
1453 context of the nested function. */
1454 if (node->local.lto_file_data)
1455 node->aux = NULL;
1456 }
1457 }
1458
1459 /* True when we need optimization summary for NODE. */
1460
1461 static int
1462 output_cgraph_opt_summary_p (struct cgraph_node *node)
1463 {
1464 if (!node->clone_of)
1465 return false;
1466 return (node->clone.tree_map
1467 || node->clone.args_to_skip
1468 || node->clone.combined_args_to_skip);
1469 }
1470
1471 /* Output optimization summary for NODE to OB. */
1472
1473 static void
1474 output_node_opt_summary (struct output_block *ob,
1475 struct cgraph_node *node)
1476 {
1477 unsigned int index;
1478 bitmap_iterator bi;
1479 struct ipa_replace_map *map;
1480 struct bitpack_d *bp;
1481 int i;
1482
1483 lto_output_uleb128_stream (ob->main_stream,
1484 bitmap_count_bits (node->clone.args_to_skip));
1485 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1486 lto_output_uleb128_stream (ob->main_stream, index);
1487 lto_output_uleb128_stream (ob->main_stream,
1488 bitmap_count_bits (node->clone.combined_args_to_skip));
1489 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1490 lto_output_uleb128_stream (ob->main_stream, index);
1491 lto_output_uleb128_stream (ob->main_stream,
1492 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1493 for (i = 0; VEC_iterate (ipa_replace_map_p, node->clone.tree_map, i, map); i++)
1494 {
1495 int parm_num;
1496 tree parm;
1497
1498 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1499 parm = TREE_CHAIN (parm), parm_num++)
1500 if (map->old_tree == parm)
1501 break;
1502 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1503 mechanism to store function local declarations into summaries. */
1504 gcc_assert (parm);
1505 lto_output_uleb128_stream (ob->main_stream, parm_num);
1506 lto_output_tree (ob, map->new_tree, true);
1507 bp = bitpack_create ();
1508 bp_pack_value (bp, map->replace_p, 1);
1509 bp_pack_value (bp, map->ref_p, 1);
1510 lto_output_bitpack (ob->main_stream, bp);
1511 bitpack_delete (bp);
1512 }
1513 }
1514
1515 /* Output optimization summaries stored in callgraph.
1516 At the moment it is the clone info structure. */
1517
1518 static void
1519 output_cgraph_opt_summary (void)
1520 {
1521 struct cgraph_node *node;
1522 int i, n_nodes;
1523 lto_cgraph_encoder_t encoder;
1524 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1525 unsigned count = 0;
1526
1527 ob->cgraph_node = NULL;
1528 encoder = ob->decl_state->cgraph_node_encoder;
1529 n_nodes = lto_cgraph_encoder_size (encoder);
1530 for (i = 0; i < n_nodes; i++)
1531 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i)))
1532 count++;
1533 lto_output_uleb128_stream (ob->main_stream, count);
1534 for (i = 0; i < n_nodes; i++)
1535 {
1536 node = lto_cgraph_encoder_deref (encoder, i);
1537 if (output_cgraph_opt_summary_p (node))
1538 {
1539 lto_output_uleb128_stream (ob->main_stream, i);
1540 output_node_opt_summary (ob, node);
1541 }
1542 }
1543 produce_asm (ob, NULL);
1544 destroy_output_block (ob);
1545 }
1546
1547 /* Input optimiation summary of NODE. */
1548
1549 static void
1550 input_node_opt_summary (struct cgraph_node *node,
1551 struct lto_input_block *ib_main,
1552 struct data_in *data_in)
1553 {
1554 int i;
1555 int count;
1556 int bit;
1557 struct bitpack_d *bp;
1558
1559 count = lto_input_uleb128 (ib_main);
1560 if (count)
1561 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1562 for (i = 0; i < count; i++)
1563 {
1564 bit = lto_input_uleb128 (ib_main);
1565 bitmap_set_bit (node->clone.args_to_skip, bit);
1566 }
1567 count = lto_input_uleb128 (ib_main);
1568 if (count)
1569 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1570 for (i = 0; i < count; i++)
1571 {
1572 bit = lto_input_uleb128 (ib_main);
1573 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1574 }
1575 count = lto_input_uleb128 (ib_main);
1576 for (i = 0; i < count; i++)
1577 {
1578 int parm_num;
1579 tree parm;
1580 struct ipa_replace_map *map = GGC_NEW (struct ipa_replace_map);
1581
1582 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1583 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1584 parm = TREE_CHAIN (parm))
1585 parm_num --;
1586 map->parm_num = lto_input_uleb128 (ib_main);
1587 map->old_tree = NULL;
1588 map->new_tree = lto_input_tree (ib_main, data_in);
1589 bp = lto_input_bitpack (ib_main);
1590 map->replace_p = bp_unpack_value (bp, 1);
1591 map->ref_p = bp_unpack_value (bp, 1);
1592 bitpack_delete (bp);
1593 }
1594 }
1595
1596 /* Read section in file FILE_DATA of length LEN with data DATA. */
1597
1598 static void
1599 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1600 const char *data, size_t len, VEC (cgraph_node_ptr,
1601 heap) * nodes)
1602 {
1603 const struct lto_function_header *header =
1604 (const struct lto_function_header *) data;
1605 const int32_t cfg_offset = sizeof (struct lto_function_header);
1606 const int32_t main_offset = cfg_offset + header->cfg_size;
1607 const int32_t string_offset = main_offset + header->main_size;
1608 struct data_in *data_in;
1609 struct lto_input_block ib_main;
1610 unsigned int i;
1611 unsigned int count;
1612
1613 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1614 header->main_size);
1615
1616 data_in =
1617 lto_data_in_create (file_data, (const char *) data + string_offset,
1618 header->string_size, NULL);
1619 count = lto_input_uleb128 (&ib_main);
1620
1621 for (i = 0; i < count; i++)
1622 {
1623 int ref = lto_input_uleb128 (&ib_main);
1624 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1625 &ib_main, data_in);
1626 }
1627 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
1628 len);
1629 lto_data_in_delete (data_in);
1630 }
1631
1632 /* Input optimization summary of cgraph. */
1633
1634 static void
1635 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1636 {
1637 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1638 struct lto_file_decl_data *file_data;
1639 unsigned int j = 0;
1640
1641 while ((file_data = file_data_vec[j++]))
1642 {
1643 size_t len;
1644 const char *data =
1645 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1646 &len);
1647
1648 if (data)
1649 input_cgraph_opt_section (file_data, data, len, nodes);
1650 }
1651 }