lto-cgraph.c (output_varpool): Forward declare; work on encoder.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (varpool_node_set);
50
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
53 enum LTO_cgraph_tags
54 {
55 /* Must leave 0 for the stopper. */
56
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
61 /* Cgraph edges. */
62 LTO_cgraph_edge,
63 LTO_cgraph_indirect_edge
64 };
65
66 /* Create a new cgraph encoder. */
67
68 lto_cgraph_encoder_t
69 lto_cgraph_encoder_new (void)
70 {
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
74 return encoder;
75 }
76
77
78 /* Delete ENCODER and its components. */
79
80 void
81 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
82 {
83 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
84 pointer_map_destroy (encoder->map);
85 free (encoder);
86 }
87
88
89 /* Return the existing reference number of NODE in the cgraph encoder in
90 output block OB. Assign a new reference if this is the first time
91 NODE is encoded. */
92
93 int
94 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
95 struct cgraph_node *node)
96 {
97 int ref;
98 void **slot;
99
100 slot = pointer_map_contains (encoder->map, node);
101 if (!slot)
102 {
103 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
104 slot = pointer_map_insert (encoder->map, node);
105 *slot = (void *) (intptr_t) ref;
106 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
107 }
108 else
109 ref = (int) (intptr_t) *slot;
110
111 return ref;
112 }
113
114 #define LCC_NOT_FOUND (-1)
115
116 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
117 or LCC_NOT_FOUND if it is not there. */
118
119 int
120 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
121 struct cgraph_node *node)
122 {
123 void **slot = pointer_map_contains (encoder->map, node);
124 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
125 }
126
127
128 /* Return the cgraph node corresponding to REF using ENCODER. */
129
130 struct cgraph_node *
131 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
132 {
133 if (ref == LCC_NOT_FOUND)
134 return NULL;
135
136 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
137 }
138
139
140 /* Return number of encoded nodes in ENCODER. */
141
142 static int
143 lto_cgraph_encoder_size (lto_cgraph_encoder_t encoder)
144 {
145 return VEC_length (cgraph_node_ptr, encoder->nodes);
146 }
147
148 /* Create a new varpool encoder. */
149
150 lto_varpool_encoder_t
151 lto_varpool_encoder_new (void)
152 {
153 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
154 encoder->map = pointer_map_create ();
155 encoder->initializer = pointer_set_create ();
156 encoder->nodes = NULL;
157 return encoder;
158 }
159
160
161 /* Delete ENCODER and its components. */
162
163 void
164 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
165 {
166 VEC_free (varpool_node_ptr, heap, encoder->nodes);
167 pointer_map_destroy (encoder->map);
168 pointer_set_destroy (encoder->initializer);
169 free (encoder);
170 }
171
172
173 /* Return the existing reference number of NODE in the varpool encoder in
174 output block OB. Assign a new reference if this is the first time
175 NODE is encoded. */
176
177 int
178 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
179 struct varpool_node *node)
180 {
181 int ref;
182 void **slot;
183
184 slot = pointer_map_contains (encoder->map, node);
185 if (!slot)
186 {
187 ref = VEC_length (varpool_node_ptr, encoder->nodes);
188 slot = pointer_map_insert (encoder->map, node);
189 *slot = (void *) (intptr_t) ref;
190 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
191 }
192 else
193 ref = (int) (intptr_t) *slot;
194
195 return ref;
196 }
197
198 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
199 or LCC_NOT_FOUND if it is not there. */
200
201 int
202 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
203 struct varpool_node *node)
204 {
205 void **slot = pointer_map_contains (encoder->map, node);
206 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
207 }
208
209
210 /* Return the varpool node corresponding to REF using ENCODER. */
211
212 struct varpool_node *
213 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
214 {
215 if (ref == LCC_NOT_FOUND)
216 return NULL;
217
218 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
219 }
220
221
222 /* Return number of encoded nodes in ENCODER. */
223
224 static int
225 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
226 {
227 return VEC_length (varpool_node_ptr, encoder->nodes);
228 }
229
230 /* Return TRUE if we should encode initializer of NODE (if any). */
231
232 bool
233 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
234 struct varpool_node *node)
235 {
236 return pointer_set_contains (encoder->initializer, node);
237 }
238
239 /* Return TRUE if we should encode initializer of NODE (if any). */
240
241 static void
242 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
243 struct varpool_node *node)
244 {
245 pointer_set_insert (encoder->initializer, node);
246 }
247
248 /* Output the cgraph EDGE to OB using ENCODER. */
249
250 static void
251 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
252 lto_cgraph_encoder_t encoder)
253 {
254 unsigned int uid;
255 intptr_t ref;
256 struct bitpack_d *bp;
257
258 if (edge->indirect_unknown_callee)
259 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
260 else
261 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
262
263 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
264 gcc_assert (ref != LCC_NOT_FOUND);
265 lto_output_sleb128_stream (ob->main_stream, ref);
266
267 if (!edge->indirect_unknown_callee)
268 {
269 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
272 }
273
274 lto_output_sleb128_stream (ob->main_stream, edge->count);
275
276 bp = bitpack_create ();
277 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
278 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
279 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
280 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
281 bp_pack_value (bp, edge->loop_nest, 30);
282 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
283 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
284 bp_pack_value (bp, edge->can_throw_external, 1);
285 lto_output_bitpack (ob->main_stream, bp);
286 bitpack_delete (bp);
287 }
288
289 /* Return true when node is reachable from other partition. */
290
291 static bool
292 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
293 {
294 struct cgraph_edge *e;
295 if (node->needed)
296 return true;
297 if (!node->analyzed)
298 return false;
299 if (node->global.inlined_to)
300 return false;
301 for (e = node->callers; e; e = e->next_caller)
302 if (!cgraph_node_in_set_p (e->caller, set))
303 return true;
304 return false;
305 }
306
307 /* Output the cgraph NODE to OB. ENCODER is used to find the
308 reference number of NODE->inlined_to. SET is the set of nodes we
309 are writing to the current file. If NODE is not in SET, then NODE
310 is a boundary of a cgraph_node_set and we pretend NODE just has a
311 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
312 that have had their callgraph node written so far. This is used to
313 determine if NODE is a clone of a previously written node. */
314
315 static void
316 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
317 lto_cgraph_encoder_t encoder, cgraph_node_set set,
318 bitmap written_decls)
319 {
320 unsigned int tag;
321 struct bitpack_d *bp;
322 bool boundary_p, wrote_decl_p;
323 intptr_t ref;
324 bool in_other_partition = false;
325
326 boundary_p = !cgraph_node_in_set_p (node, set);
327 wrote_decl_p = bitmap_bit_p (written_decls, DECL_UID (node->decl));
328
329 if (node->analyzed && !boundary_p)
330 tag = LTO_cgraph_analyzed_node;
331 else
332 tag = LTO_cgraph_unavail_node;
333
334 lto_output_uleb128_stream (ob->main_stream, tag);
335
336 /* In WPA mode, we only output part of the call-graph. Also, we
337 fake cgraph node attributes. There are two cases that we care.
338
339 Boundary nodes: There are nodes that are not part of SET but are
340 called from within SET. We artificially make them look like
341 externally visible nodes with no function body.
342
343 Cherry-picked nodes: These are nodes we pulled from other
344 translation units into SET during IPA-inlining. We make them as
345 local static nodes to prevent clashes with other local statics. */
346 if (boundary_p && node->analyzed)
347 {
348 /* Inline clones can not be part of boundary.
349 gcc_assert (!node->global.inlined_to);
350
351 FIXME: At the moment they can be, when partition contains an inline
352 clone that is clone of inline clone from outside partition. We can
353 reshape the clone tree and make other tree to be the root, but it
354 needs a bit extra work and will be promplty done by cgraph_remove_node
355 after reading back. */
356 in_other_partition = 1;
357 }
358
359 lto_output_uleb128_stream (ob->main_stream, wrote_decl_p);
360
361 if (!wrote_decl_p)
362 bitmap_set_bit (written_decls, DECL_UID (node->decl));
363
364 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
365 lto_output_sleb128_stream (ob->main_stream, node->count);
366
367 bp = bitpack_create ();
368 bp_pack_value (bp, node->local.local, 1);
369 bp_pack_value (bp, node->local.externally_visible, 1);
370 bp_pack_value (bp, node->local.finalized, 1);
371 bp_pack_value (bp, node->local.inlinable, 1);
372 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
373 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
374 bp_pack_value (bp, node->local.vtable_method, 1);
375 bp_pack_value (bp, node->needed, 1);
376 bp_pack_value (bp, node->address_taken, 1);
377 bp_pack_value (bp, node->abstract_and_needed, 1);
378 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
379 && reachable_from_other_partition_p (node, set), 1);
380 bp_pack_value (bp, node->lowered, 1);
381 bp_pack_value (bp, in_other_partition, 1);
382 bp_pack_value (bp, node->alias, 1);
383 bp_pack_value (bp, node->finalized_by_frontend, 1);
384 bp_pack_value (bp, node->frequency, 2);
385 lto_output_bitpack (ob->main_stream, bp);
386 bitpack_delete (bp);
387
388 if (tag == LTO_cgraph_analyzed_node)
389 {
390 lto_output_sleb128_stream (ob->main_stream,
391 node->local.inline_summary.estimated_self_stack_size);
392 lto_output_sleb128_stream (ob->main_stream,
393 node->local.inline_summary.self_size);
394 lto_output_sleb128_stream (ob->main_stream,
395 node->local.inline_summary.size_inlining_benefit);
396 lto_output_sleb128_stream (ob->main_stream,
397 node->local.inline_summary.self_time);
398 lto_output_sleb128_stream (ob->main_stream,
399 node->local.inline_summary.time_inlining_benefit);
400 if (node->global.inlined_to)
401 {
402 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
403 gcc_assert (ref != LCC_NOT_FOUND);
404 }
405 else
406 ref = LCC_NOT_FOUND;
407
408 lto_output_sleb128_stream (ob->main_stream, ref);
409 }
410
411 if (node->same_comdat_group && !boundary_p)
412 {
413 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
414 gcc_assert (ref != LCC_NOT_FOUND);
415 }
416 else
417 ref = LCC_NOT_FOUND;
418 lto_output_sleb128_stream (ob->main_stream, ref);
419
420 if (node->same_body)
421 {
422 struct cgraph_node *alias;
423 unsigned long alias_count = 1;
424 for (alias = node->same_body; alias->next; alias = alias->next)
425 alias_count++;
426 lto_output_uleb128_stream (ob->main_stream, alias_count);
427 do
428 {
429 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
430 alias->decl);
431 if (alias->thunk.thunk_p)
432 {
433 lto_output_uleb128_stream
434 (ob->main_stream,
435 1 + (alias->thunk.this_adjusting != 0) * 2
436 + (alias->thunk.virtual_offset_p != 0) * 4);
437 lto_output_uleb128_stream (ob->main_stream,
438 alias->thunk.fixed_offset);
439 lto_output_uleb128_stream (ob->main_stream,
440 alias->thunk.virtual_value);
441 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
442 alias->thunk.alias);
443 }
444 else
445 {
446 lto_output_uleb128_stream (ob->main_stream, 0);
447 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
448 alias->thunk.alias);
449 }
450 alias = alias->previous;
451 }
452 while (alias);
453 }
454 else
455 lto_output_uleb128_stream (ob->main_stream, 0);
456 }
457
458 /* Output the varpool NODE to OB.
459 If NODE is not in SET, then NODE is a boundary. */
460
461 static void
462 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
463 varpool_node_set set)
464 {
465 bool boundary_p = !varpool_node_in_set_p (node, set) && node->analyzed;
466 struct bitpack_d *bp;
467 struct varpool_node *alias;
468 int count = 0;
469
470 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
471 bp = bitpack_create ();
472 bp_pack_value (bp, node->externally_visible, 1);
473 bp_pack_value (bp, node->force_output, 1);
474 bp_pack_value (bp, node->finalized, 1);
475 gcc_assert (node->finalized || !node->analyzed);
476 gcc_assert (node->needed);
477 gcc_assert (!node->alias);
478 /* Constant pool initializers can be de-unified into individual ltrans units.
479 FIXME: Alternatively at -Os we may want to avoid generating for them the local
480 labels and share them across LTRANS partitions. */
481 if (DECL_IN_CONSTANT_POOL (node->decl))
482 {
483 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
484 bp_pack_value (bp, 0, 1); /* in_other_partition. */
485 }
486 else
487 {
488 /* FIXME: We have no idea how we move references around. For moment assume that
489 everything is used externally. */
490 bp_pack_value (bp, flag_wpa, 1); /* used_from_other_parition. */
491 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
492 }
493 /* Also emit any extra name aliases. */
494 for (alias = node->extra_name; alias; alias = alias->next)
495 count++;
496 bp_pack_value (bp, count != 0, 1);
497 lto_output_bitpack (ob->main_stream, bp);
498 bitpack_delete (bp);
499
500 if (count)
501 {
502 lto_output_uleb128_stream (ob->main_stream, count);
503 for (alias = node->extra_name; alias; alias = alias->next)
504 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
505 }
506 }
507
508 /* Stream out profile_summary to OB. */
509
510 static void
511 output_profile_summary (struct lto_simple_output_block *ob)
512 {
513 if (profile_info)
514 {
515 /* We do not output num, it is not terribly useful. */
516 gcc_assert (profile_info->runs);
517 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
518 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
519 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
520 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
521 }
522 else
523 lto_output_uleb128_stream (ob->main_stream, 0);
524 }
525
526 /* Add NODE into encoder as well as nodes it is cloned from.
527 Do it in a way so clones appear first. */
528 static void
529 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node)
530 {
531 if (node->clone_of)
532 add_node_to (encoder, node->clone_of);
533 lto_cgraph_encoder_encode (encoder, node);
534 }
535
536 /* Output all callees or indirect outgoing edges. EDGE must be the first such
537 edge. */
538
539 static void
540 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
541 struct lto_simple_output_block *ob,
542 lto_cgraph_encoder_t encoder)
543 {
544 if (!edge)
545 return;
546
547 /* Output edges in backward direction, so the reconstructed callgraph match
548 and it is easy to associate call sites in the IPA pass summaries. */
549 while (edge->next_callee)
550 edge = edge->next_callee;
551 for (; edge; edge = edge->prev_callee)
552 lto_output_edge (ob, edge, encoder);
553 }
554
555 /* Output the part of the cgraph in SET. */
556
557 void
558 output_cgraph (cgraph_node_set set, varpool_node_set vset)
559 {
560 struct cgraph_node *node;
561 struct lto_simple_output_block *ob;
562 cgraph_node_set_iterator csi;
563 varpool_node_set_iterator vsi;
564 struct cgraph_edge *edge;
565 int i, n_nodes;
566 bitmap written_decls;
567 lto_cgraph_encoder_t encoder;
568 lto_varpool_encoder_t varpool_encoder;
569 struct cgraph_asm_node *can;
570 struct varpool_node *vnode;
571
572 ob = lto_create_simple_output_block (LTO_section_cgraph);
573
574 output_profile_summary (ob);
575
576 /* An encoder for cgraph nodes should have been created by
577 ipa_write_summaries_1. */
578 gcc_assert (ob->decl_state->cgraph_node_encoder);
579 gcc_assert (ob->decl_state->varpool_node_encoder);
580 encoder = ob->decl_state->cgraph_node_encoder;
581 varpool_encoder = ob->decl_state->varpool_node_encoder;
582
583 /* The FUNCTION_DECLs for which we have written a node. The first
584 node found is written as the "original" node, the remaining nodes
585 are considered its clones. */
586 written_decls = lto_bitmap_alloc ();
587
588 /* Go over all the nodes in SET and assign references. */
589 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
590 {
591 node = csi_node (csi);
592 add_node_to (encoder, node);
593 }
594 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
595 {
596 struct varpool_node *vnode = vsi_node (vsi);
597 gcc_assert (!vnode->alias);
598 lto_varpool_encoder_encode (varpool_encoder, vnode);
599 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
600 }
601 /* FIXME: We do not track references, so for now we need to include all possibly
602 used variables in the encoder set. */
603 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
604 if (vnode->needed)
605 lto_varpool_encoder_encode (varpool_encoder, vnode);
606 /* Pickle in also the initializer of all referenced readonly variables
607 to help folding. Constant pool variables are not shared, so we must
608 pickle those too. */
609 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
610 {
611 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
612 if (DECL_INITIAL (vnode->decl)
613 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
614 vnode)
615 && (DECL_IN_CONSTANT_POOL (vnode->decl)
616 || TREE_READONLY (vnode->decl)))
617 {
618 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
619 }
620 }
621
622 /* Go over all the nodes again to include callees that are not in
623 SET. */
624 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
625 {
626 node = csi_node (csi);
627 for (edge = node->callees; edge; edge = edge->next_callee)
628 {
629 struct cgraph_node *callee = edge->callee;
630 if (!cgraph_node_in_set_p (callee, set))
631 {
632 /* We should have moved all the inlines. */
633 gcc_assert (!callee->global.inlined_to);
634 add_node_to (encoder, callee);
635 }
636 }
637 }
638
639 /* Write out the nodes. We must first output a node and then its clones,
640 otherwise at a time reading back the node there would be nothing to clone
641 from. */
642 n_nodes = lto_cgraph_encoder_size (encoder);
643 for (i = 0; i < n_nodes; i++)
644 {
645 node = lto_cgraph_encoder_deref (encoder, i);
646 lto_output_node (ob, node, encoder, set, written_decls);
647 }
648
649 lto_bitmap_free (written_decls);
650
651 /* Go over the nodes in SET again to write edges. */
652 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
653 {
654 node = csi_node (csi);
655 output_outgoing_cgraph_edges (node->callees, ob, encoder);
656 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
657 }
658
659 lto_output_uleb128_stream (ob->main_stream, 0);
660
661 /* Emit toplevel asms. */
662 for (can = cgraph_asm_nodes; can; can = can->next)
663 {
664 int len = TREE_STRING_LENGTH (can->asm_str);
665 lto_output_uleb128_stream (ob->main_stream, len);
666 for (i = 0; i < len; ++i)
667 lto_output_1_stream (ob->main_stream,
668 TREE_STRING_POINTER (can->asm_str)[i]);
669 }
670
671 lto_output_uleb128_stream (ob->main_stream, 0);
672
673 lto_destroy_simple_output_block (ob);
674 output_varpool (vset);
675 }
676
677 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
678 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
679 NODE or to replace the values in it, for instance because the first
680 time we saw it, the function body was not available but now it
681 is. BP is a bitpack with all the bitflags for NODE read from the
682 stream. */
683
684 static void
685 input_overwrite_node (struct lto_file_decl_data *file_data,
686 struct cgraph_node *node,
687 enum LTO_cgraph_tags tag,
688 struct bitpack_d *bp,
689 unsigned int stack_size,
690 unsigned int self_time,
691 unsigned int time_inlining_benefit,
692 unsigned int self_size,
693 unsigned int size_inlining_benefit)
694 {
695 node->aux = (void *) tag;
696 node->local.inline_summary.estimated_self_stack_size = stack_size;
697 node->local.inline_summary.self_time = self_time;
698 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
699 node->local.inline_summary.self_size = self_size;
700 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
701 node->global.time = self_time;
702 node->global.size = self_size;
703 node->global.estimated_stack_size = stack_size;
704 node->global.estimated_growth = INT_MIN;
705 node->local.lto_file_data = file_data;
706
707 node->local.local = bp_unpack_value (bp, 1);
708 node->local.externally_visible = bp_unpack_value (bp, 1);
709 node->local.finalized = bp_unpack_value (bp, 1);
710 node->local.inlinable = bp_unpack_value (bp, 1);
711 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
712 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
713 node->local.vtable_method = bp_unpack_value (bp, 1);
714 node->needed = bp_unpack_value (bp, 1);
715 node->address_taken = bp_unpack_value (bp, 1);
716 node->abstract_and_needed = bp_unpack_value (bp, 1);
717 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
718 node->lowered = bp_unpack_value (bp, 1);
719 node->analyzed = tag == LTO_cgraph_analyzed_node;
720 node->in_other_partition = bp_unpack_value (bp, 1);
721 node->alias = bp_unpack_value (bp, 1);
722 node->finalized_by_frontend = bp_unpack_value (bp, 1);
723 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
724 }
725
726 /* Output the part of the cgraph in SET. */
727
728 static void
729 output_varpool (varpool_node_set vset)
730 {
731 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
732 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
733 int len = lto_varpool_encoder_size (varpool_encoder), i;
734
735 lto_output_uleb128_stream (ob->main_stream, len);
736
737 /* Write out the nodes. We must first output a node and then its clones,
738 otherwise at a time reading back the node there would be nothing to clone
739 from. */
740 for (i = 0; i < len; i++)
741 {
742 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
743 vset);
744 }
745
746 lto_destroy_simple_output_block (ob);
747 }
748
749 /* Read a node from input_block IB. TAG is the node's tag just read.
750 Return the node read or overwriten. */
751
752 static struct cgraph_node *
753 input_node (struct lto_file_decl_data *file_data,
754 struct lto_input_block *ib,
755 enum LTO_cgraph_tags tag)
756 {
757 tree fn_decl;
758 struct cgraph_node *node;
759 struct bitpack_d *bp;
760 int stack_size = 0;
761 unsigned decl_index;
762 bool clone_p;
763 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
764 int self_time = 0;
765 int self_size = 0;
766 int time_inlining_benefit = 0;
767 int size_inlining_benefit = 0;
768 unsigned long same_body_count = 0;
769
770 clone_p = (lto_input_uleb128 (ib) != 0);
771
772 decl_index = lto_input_uleb128 (ib);
773 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
774
775 if (clone_p)
776 node = cgraph_clone_node (cgraph_node (fn_decl), 0,
777 CGRAPH_FREQ_BASE, 0, false, NULL);
778
779 else
780 node = cgraph_node (fn_decl);
781
782 node->count = lto_input_sleb128 (ib);
783 bp = lto_input_bitpack (ib);
784
785 if (tag == LTO_cgraph_analyzed_node)
786 {
787 stack_size = lto_input_sleb128 (ib);
788 self_size = lto_input_sleb128 (ib);
789 size_inlining_benefit = lto_input_sleb128 (ib);
790 self_time = lto_input_sleb128 (ib);
791 time_inlining_benefit = lto_input_sleb128 (ib);
792
793 ref = lto_input_sleb128 (ib);
794 }
795
796 ref2 = lto_input_sleb128 (ib);
797 same_body_count = lto_input_uleb128 (ib);
798
799 /* Make sure that we have not read this node before. Nodes that
800 have already been read will have their tag stored in the 'aux'
801 field. Since built-in functions can be referenced in multiple
802 functions, they are expected to be read more than once. */
803 if (node->aux && !DECL_IS_BUILTIN (node->decl))
804 internal_error ("bytecode stream: found multiple instances of cgraph "
805 "node %d", node->uid);
806
807 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
808 time_inlining_benefit, self_size,
809 size_inlining_benefit);
810 bitpack_delete (bp);
811
812 /* Store a reference for now, and fix up later to be a pointer. */
813 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
814
815 /* Store a reference for now, and fix up later to be a pointer. */
816 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
817
818 while (same_body_count-- > 0)
819 {
820 tree alias_decl;
821 int type;
822 decl_index = lto_input_uleb128 (ib);
823 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
824 type = lto_input_uleb128 (ib);
825 if (!type)
826 {
827 tree real_alias;
828 decl_index = lto_input_uleb128 (ib);
829 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
830 cgraph_same_body_alias (alias_decl, real_alias);
831 }
832 else
833 {
834 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
835 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
836 tree real_alias;
837 decl_index = lto_input_uleb128 (ib);
838 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
839 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
840 virtual_value,
841 (type & 4) ? size_int (virtual_value) : NULL_TREE,
842 real_alias);
843 }
844 }
845 return node;
846 }
847
848 /* Read a node from input_block IB. TAG is the node's tag just read.
849 Return the node read or overwriten. */
850
851 static struct varpool_node *
852 input_varpool_node (struct lto_file_decl_data *file_data,
853 struct lto_input_block *ib)
854 {
855 int decl_index;
856 tree var_decl;
857 struct varpool_node *node;
858 struct bitpack_d *bp;
859 bool aliases_p;
860 int count;
861
862 decl_index = lto_input_uleb128 (ib);
863 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
864 node = varpool_node (var_decl);
865
866 bp = lto_input_bitpack (ib);
867 node->externally_visible = bp_unpack_value (bp, 1);
868 node->force_output = bp_unpack_value (bp, 1);
869 node->finalized = bp_unpack_value (bp, 1);
870 node->analyzed = node->finalized;
871 node->used_from_other_partition = bp_unpack_value (bp, 1);
872 node->in_other_partition = bp_unpack_value (bp, 1);
873 aliases_p = bp_unpack_value (bp, 1);
874 if (node->finalized)
875 varpool_mark_needed_node (node);
876 bitpack_delete (bp);
877 if (aliases_p)
878 {
879 count = lto_input_uleb128 (ib);
880 for (; count > 0; count --)
881 {
882 tree decl = lto_file_decl_data_get_var_decl (file_data,
883 lto_input_uleb128 (ib));
884 varpool_extra_name_alias (decl, var_decl);
885 }
886 }
887 return node;
888 }
889
890
891 /* Read an edge from IB. NODES points to a vector of previously read nodes for
892 decoding caller and callee of the edge to be read. If INDIRECT is true, the
893 edge being read is indirect (in the sense that it has
894 indirect_unknown_callee set). */
895
896 static void
897 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
898 bool indirect)
899 {
900 struct cgraph_node *caller, *callee;
901 struct cgraph_edge *edge;
902 unsigned int stmt_id;
903 gcov_type count;
904 int freq;
905 unsigned int nest;
906 cgraph_inline_failed_t inline_failed;
907 struct bitpack_d *bp;
908 enum ld_plugin_symbol_resolution caller_resolution;
909
910 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
911 if (caller == NULL || caller->decl == NULL_TREE)
912 internal_error ("bytecode stream: no caller found while reading edge");
913
914 if (!indirect)
915 {
916 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
917 if (callee == NULL || callee->decl == NULL_TREE)
918 internal_error ("bytecode stream: no callee found while reading edge");
919 }
920 else
921 callee = NULL;
922
923 count = (gcov_type) lto_input_sleb128 (ib);
924
925 bp = lto_input_bitpack (ib);
926 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
927 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
928 HOST_BITS_PER_INT);
929 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
930 nest = (unsigned) bp_unpack_value (bp, 30);
931
932 /* If the caller was preempted, don't create the edge.
933 ??? Should we ever have edges from a preempted caller? */
934 caller_resolution = lto_symtab_get_resolution (caller->decl);
935 if (caller_resolution == LDPR_PREEMPTED_REG
936 || caller_resolution == LDPR_PREEMPTED_IR)
937 return;
938
939 if (indirect)
940 edge = cgraph_create_indirect_edge (caller, NULL, count, freq, nest);
941 else
942 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
943
944 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
945 edge->lto_stmt_uid = stmt_id;
946 edge->inline_failed = inline_failed;
947 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
948 edge->can_throw_external = bp_unpack_value (bp, 1);
949 bitpack_delete (bp);
950 }
951
952
953 /* Read a cgraph from IB using the info in FILE_DATA. */
954
955 static VEC(cgraph_node_ptr, heap) *
956 input_cgraph_1 (struct lto_file_decl_data *file_data,
957 struct lto_input_block *ib)
958 {
959 enum LTO_cgraph_tags tag;
960 VEC(cgraph_node_ptr, heap) *nodes = NULL;
961 struct cgraph_node *node;
962 unsigned i;
963 unsigned HOST_WIDE_INT len;
964
965 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
966 while (tag)
967 {
968 if (tag == LTO_cgraph_edge)
969 input_edge (ib, nodes, false);
970 else if (tag == LTO_cgraph_indirect_edge)
971 input_edge (ib, nodes, true);
972 else
973 {
974 node = input_node (file_data, ib, tag);
975 if (node == NULL || node->decl == NULL_TREE)
976 internal_error ("bytecode stream: found empty cgraph node");
977 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
978 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
979 }
980
981 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
982 }
983
984 /* Input toplevel asms. */
985 len = lto_input_uleb128 (ib);
986 while (len)
987 {
988 char *str = (char *)xmalloc (len + 1);
989 for (i = 0; i < len; ++i)
990 str[i] = lto_input_1_unsigned (ib);
991 cgraph_add_asm_node (build_string (len, str));
992 free (str);
993
994 len = lto_input_uleb128 (ib);
995 }
996
997 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
998 {
999 int ref = (int) (intptr_t) node->global.inlined_to;
1000
1001 /* Fixup inlined_to from reference to pointer. */
1002 if (ref != LCC_NOT_FOUND)
1003 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1004 else
1005 node->global.inlined_to = NULL;
1006
1007 ref = (int) (intptr_t) node->same_comdat_group;
1008
1009 /* Fixup same_comdat_group from reference to pointer. */
1010 if (ref != LCC_NOT_FOUND)
1011 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1012 else
1013 node->same_comdat_group = NULL;
1014 }
1015 return nodes;
1016 }
1017
1018 /* Read a varpool from IB using the info in FILE_DATA. */
1019
1020 static VEC(varpool_node_ptr, heap) *
1021 input_varpool_1 (struct lto_file_decl_data *file_data,
1022 struct lto_input_block *ib)
1023 {
1024 unsigned HOST_WIDE_INT len;
1025 VEC(varpool_node_ptr, heap) *varpool = NULL;
1026
1027 len = lto_input_uleb128 (ib);
1028 while (len)
1029 {
1030 VEC_safe_push (varpool_node_ptr, heap, varpool,
1031 input_varpool_node (file_data, ib));
1032 len--;
1033 }
1034 return varpool;
1035 }
1036
1037
1038 static struct gcov_ctr_summary lto_gcov_summary;
1039
1040 /* Input profile_info from IB. */
1041 static void
1042 input_profile_summary (struct lto_input_block *ib)
1043 {
1044 unsigned int runs = lto_input_uleb128 (ib);
1045 if (runs)
1046 {
1047 if (!profile_info)
1048 {
1049 profile_info = &lto_gcov_summary;
1050 lto_gcov_summary.runs = runs;
1051 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1052 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1053 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1054 }
1055 /* We can support this by scaling all counts to nearest common multiple
1056 of all different runs, but it is perhaps not worth the effort. */
1057 else if (profile_info->runs != runs
1058 || profile_info->sum_all != lto_input_sleb128 (ib)
1059 || profile_info->run_max != lto_input_sleb128 (ib)
1060 || profile_info->sum_max != lto_input_sleb128 (ib))
1061 sorry ("Combining units with different profiles is not supported.");
1062 /* We allow some units to have profile and other to not have one. This will
1063 just make unprofiled units to be size optimized that is sane. */
1064 }
1065
1066 }
1067
1068 /* Input and merge the cgraph from each of the .o files passed to
1069 lto1. */
1070
1071 void
1072 input_cgraph (void)
1073 {
1074 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1075 struct lto_file_decl_data *file_data;
1076 unsigned int j = 0;
1077 struct cgraph_node *node;
1078
1079 while ((file_data = file_data_vec[j++]))
1080 {
1081 const char *data;
1082 size_t len;
1083 struct lto_input_block *ib;
1084 VEC(cgraph_node_ptr, heap) *nodes;
1085 VEC(varpool_node_ptr, heap) *varpool;
1086
1087 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1088 &data, &len);
1089 input_profile_summary (ib);
1090 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1091 nodes = input_cgraph_1 (file_data, ib);
1092 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1093 ib, data, len);
1094
1095 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1096 &data, &len);
1097 varpool = input_varpool_1 (file_data, ib);
1098 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1099 ib, data, len);
1100
1101 VEC_free (cgraph_node_ptr, heap, nodes);
1102 VEC_free (varpool_node_ptr, heap, varpool);
1103 }
1104
1105 /* Clear out the aux field that was used to store enough state to
1106 tell which nodes should be overwritten. */
1107 for (node = cgraph_nodes; node; node = node->next)
1108 {
1109 /* Some nodes may have been created by cgraph_node. This
1110 happens when the callgraph contains nested functions. If the
1111 node for the parent function was never emitted to the gimple
1112 file, cgraph_node will create a node for it when setting the
1113 context of the nested function. */
1114 if (node->local.lto_file_data)
1115 node->aux = NULL;
1116 }
1117 }