lto-cgraph.c (output_cgraph): Output toplevel asms only into first partition.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_cgraph_tags
57 {
58 /* Must leave 0 for the stopper. */
59
60 /* Cgraph node without body available. */
61 LTO_cgraph_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_cgraph_analyzed_node,
64 /* Cgraph edges. */
65 LTO_cgraph_edge,
66 LTO_cgraph_indirect_edge
67 };
68
69 /* Create a new cgraph encoder. */
70
71 lto_cgraph_encoder_t
72 lto_cgraph_encoder_new (void)
73 {
74 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
75 encoder->map = pointer_map_create ();
76 encoder->nodes = NULL;
77 encoder->body = pointer_set_create ();
78 return encoder;
79 }
80
81
82 /* Delete ENCODER and its components. */
83
84 void
85 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
86 {
87 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
88 pointer_map_destroy (encoder->map);
89 pointer_set_destroy (encoder->body);
90 free (encoder);
91 }
92
93
94 /* Return the existing reference number of NODE in the cgraph encoder in
95 output block OB. Assign a new reference if this is the first time
96 NODE is encoded. */
97
98 int
99 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
100 struct cgraph_node *node)
101 {
102 int ref;
103 void **slot;
104
105 slot = pointer_map_contains (encoder->map, node);
106 if (!slot)
107 {
108 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
109 slot = pointer_map_insert (encoder->map, node);
110 *slot = (void *) (intptr_t) ref;
111 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
112 }
113 else
114 ref = (int) (intptr_t) *slot;
115
116 return ref;
117 }
118
119 #define LCC_NOT_FOUND (-1)
120
121 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
122 or LCC_NOT_FOUND if it is not there. */
123
124 int
125 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
126 struct cgraph_node *node)
127 {
128 void **slot = pointer_map_contains (encoder->map, node);
129 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 }
131
132
133 /* Return the cgraph node corresponding to REF using ENCODER. */
134
135 struct cgraph_node *
136 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
137 {
138 if (ref == LCC_NOT_FOUND)
139 return NULL;
140
141 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 }
143
144
145 /* Return TRUE if we should encode initializer of NODE (if any). */
146
147 bool
148 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
149 struct cgraph_node *node)
150 {
151 return pointer_set_contains (encoder->body, node);
152 }
153
154 /* Return TRUE if we should encode body of NODE (if any). */
155
156 static void
157 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
158 struct cgraph_node *node)
159 {
160 pointer_set_insert (encoder->body, node);
161 }
162
163 /* Create a new varpool encoder. */
164
165 lto_varpool_encoder_t
166 lto_varpool_encoder_new (void)
167 {
168 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
169 encoder->map = pointer_map_create ();
170 encoder->initializer = pointer_set_create ();
171 encoder->nodes = NULL;
172 return encoder;
173 }
174
175
176 /* Delete ENCODER and its components. */
177
178 void
179 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
180 {
181 VEC_free (varpool_node_ptr, heap, encoder->nodes);
182 pointer_map_destroy (encoder->map);
183 pointer_set_destroy (encoder->initializer);
184 free (encoder);
185 }
186
187
188 /* Return the existing reference number of NODE in the varpool encoder in
189 output block OB. Assign a new reference if this is the first time
190 NODE is encoded. */
191
192 int
193 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
194 struct varpool_node *node)
195 {
196 int ref;
197 void **slot;
198
199 slot = pointer_map_contains (encoder->map, node);
200 if (!slot)
201 {
202 ref = VEC_length (varpool_node_ptr, encoder->nodes);
203 slot = pointer_map_insert (encoder->map, node);
204 *slot = (void *) (intptr_t) ref;
205 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
206 }
207 else
208 ref = (int) (intptr_t) *slot;
209
210 return ref;
211 }
212
213 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
214 or LCC_NOT_FOUND if it is not there. */
215
216 int
217 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
218 struct varpool_node *node)
219 {
220 void **slot = pointer_map_contains (encoder->map, node);
221 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 }
223
224
225 /* Return the varpool node corresponding to REF using ENCODER. */
226
227 struct varpool_node *
228 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
229 {
230 if (ref == LCC_NOT_FOUND)
231 return NULL;
232
233 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 }
235
236
237 /* Return TRUE if we should encode initializer of NODE (if any). */
238
239 bool
240 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
241 struct varpool_node *node)
242 {
243 return pointer_set_contains (encoder->initializer, node);
244 }
245
246 /* Return TRUE if we should encode initializer of NODE (if any). */
247
248 static void
249 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
250 struct varpool_node *node)
251 {
252 pointer_set_insert (encoder->initializer, node);
253 }
254
255 /* Output the cgraph EDGE to OB using ENCODER. */
256
257 static void
258 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
259 lto_cgraph_encoder_t encoder)
260 {
261 unsigned int uid;
262 intptr_t ref;
263 struct bitpack_d bp;
264
265 if (edge->indirect_unknown_callee)
266 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
267 else
268 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
269
270 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
271 gcc_assert (ref != LCC_NOT_FOUND);
272 lto_output_sleb128_stream (ob->main_stream, ref);
273
274 if (!edge->indirect_unknown_callee)
275 {
276 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 lto_output_sleb128_stream (ob->main_stream, ref);
279 }
280
281 lto_output_sleb128_stream (ob->main_stream, edge->count);
282
283 bp = bitpack_create (ob->main_stream);
284 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
285 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
286 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->loop_nest, 30);
289 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
290 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
291 bp_pack_value (&bp, edge->can_throw_external, 1);
292 if (edge->indirect_unknown_callee)
293 {
294 int flags = edge->indirect_info->ecf_flags;
295 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
296 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
301 /* Flags that should not appear on indirect calls. */
302 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
303 | ECF_MAY_BE_ALLOCA
304 | ECF_SIBCALL
305 | ECF_NOVOPS)));
306 }
307 lto_output_bitpack (&bp);
308 }
309
310 /* Return if LIST contain references from other partitions. */
311
312 bool
313 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
314 varpool_node_set vset)
315 {
316 int i;
317 struct ipa_ref *ref;
318 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
319 {
320 if (ref->refering_type == IPA_REF_CGRAPH)
321 {
322 if (ipa_ref_refering_node (ref)->in_other_partition
323 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
324 return true;
325 }
326 else
327 {
328 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
329 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
330 vset))
331 return true;
332 }
333 }
334 return false;
335 }
336
337 /* Return true when node is reachable from other partition. */
338
339 bool
340 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
341 {
342 struct cgraph_edge *e;
343 if (!node->analyzed)
344 return false;
345 if (node->global.inlined_to)
346 return false;
347 for (e = node->callers; e; e = e->next_caller)
348 if (e->caller->in_other_partition
349 || !cgraph_node_in_set_p (e->caller, set))
350 return true;
351 return false;
352 }
353
354 /* Return if LIST contain references from other partitions. */
355
356 bool
357 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
358 varpool_node_set vset)
359 {
360 int i;
361 struct ipa_ref *ref;
362 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
363 {
364 if (ref->refering_type == IPA_REF_CGRAPH)
365 {
366 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
367 return true;
368 }
369 else
370 {
371 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
372 vset))
373 return true;
374 }
375 }
376 return false;
377 }
378
379 /* Return true when node is reachable from other partition. */
380
381 bool
382 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
383 {
384 struct cgraph_edge *e;
385 if (!node->analyzed)
386 return false;
387 if (node->global.inlined_to)
388 return false;
389 for (e = node->callers; e; e = e->next_caller)
390 if (cgraph_node_in_set_p (e->caller, set))
391 return true;
392 return false;
393 }
394
395 /* Output the cgraph NODE to OB. ENCODER is used to find the
396 reference number of NODE->inlined_to. SET is the set of nodes we
397 are writing to the current file. If NODE is not in SET, then NODE
398 is a boundary of a cgraph_node_set and we pretend NODE just has a
399 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
400 that have had their callgraph node written so far. This is used to
401 determine if NODE is a clone of a previously written node. */
402
403 static void
404 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
405 lto_cgraph_encoder_t encoder, cgraph_node_set set,
406 varpool_node_set vset)
407 {
408 unsigned int tag;
409 struct bitpack_d bp;
410 bool boundary_p;
411 intptr_t ref;
412 bool in_other_partition = false;
413 struct cgraph_node *clone_of;
414
415 boundary_p = !cgraph_node_in_set_p (node, set);
416
417 if (node->analyzed && !boundary_p)
418 tag = LTO_cgraph_analyzed_node;
419 else
420 tag = LTO_cgraph_unavail_node;
421
422 lto_output_uleb128_stream (ob->main_stream, tag);
423
424 /* In WPA mode, we only output part of the call-graph. Also, we
425 fake cgraph node attributes. There are two cases that we care.
426
427 Boundary nodes: There are nodes that are not part of SET but are
428 called from within SET. We artificially make them look like
429 externally visible nodes with no function body.
430
431 Cherry-picked nodes: These are nodes we pulled from other
432 translation units into SET during IPA-inlining. We make them as
433 local static nodes to prevent clashes with other local statics. */
434 if (boundary_p && node->analyzed)
435 {
436 /* Inline clones can not be part of boundary.
437 gcc_assert (!node->global.inlined_to);
438
439 FIXME: At the moment they can be, when partition contains an inline
440 clone that is clone of inline clone from outside partition. We can
441 reshape the clone tree and make other tree to be the root, but it
442 needs a bit extra work and will be promplty done by cgraph_remove_node
443 after reading back. */
444 in_other_partition = 1;
445 }
446
447 clone_of = node->clone_of;
448 while (clone_of
449 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
450 if (clone_of->prev_sibling_clone)
451 clone_of = clone_of->prev_sibling_clone;
452 else
453 clone_of = clone_of->clone_of;
454 if (!clone_of)
455 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 lto_output_sleb128_stream (ob->main_stream, ref);
458
459
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 lto_output_sleb128_stream (ob->main_stream, node->count);
462
463 if (tag == LTO_cgraph_analyzed_node)
464 {
465 lto_output_sleb128_stream (ob->main_stream,
466 node->local.inline_summary.estimated_self_stack_size);
467 lto_output_sleb128_stream (ob->main_stream,
468 node->local.inline_summary.self_size);
469 lto_output_sleb128_stream (ob->main_stream,
470 node->local.inline_summary.size_inlining_benefit);
471 lto_output_sleb128_stream (ob->main_stream,
472 node->local.inline_summary.self_time);
473 lto_output_sleb128_stream (ob->main_stream,
474 node->local.inline_summary.time_inlining_benefit);
475 if (node->global.inlined_to)
476 {
477 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
478 gcc_assert (ref != LCC_NOT_FOUND);
479 }
480 else
481 ref = LCC_NOT_FOUND;
482
483 lto_output_sleb128_stream (ob->main_stream, ref);
484 }
485
486 if (node->same_comdat_group && !boundary_p)
487 {
488 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
489 gcc_assert (ref != LCC_NOT_FOUND);
490 }
491 else
492 ref = LCC_NOT_FOUND;
493 lto_output_sleb128_stream (ob->main_stream, ref);
494
495 bp = bitpack_create (ob->main_stream);
496 bp_pack_value (&bp, node->local.local, 1);
497 bp_pack_value (&bp, node->local.externally_visible, 1);
498 bp_pack_value (&bp, node->local.finalized, 1);
499 bp_pack_value (&bp, node->local.inlinable, 1);
500 bp_pack_value (&bp, node->local.versionable, 1);
501 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
502 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
503 bp_pack_value (&bp, node->local.vtable_method, 1);
504 bp_pack_value (&bp, node->needed, 1);
505 bp_pack_value (&bp, node->address_taken, 1);
506 bp_pack_value (&bp, node->abstract_and_needed, 1);
507 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
508 && !DECL_EXTERNAL (node->decl)
509 && !DECL_COMDAT (node->decl)
510 && (reachable_from_other_partition_p (node, set)
511 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
512 bp_pack_value (&bp, node->lowered, 1);
513 bp_pack_value (&bp, in_other_partition, 1);
514 bp_pack_value (&bp, node->alias, 1);
515 bp_pack_value (&bp, node->finalized_by_frontend, 1);
516 bp_pack_value (&bp, node->frequency, 2);
517 lto_output_bitpack (&bp);
518
519 if (node->same_body)
520 {
521 struct cgraph_node *alias;
522 unsigned long alias_count = 1;
523 for (alias = node->same_body; alias->next; alias = alias->next)
524 alias_count++;
525 lto_output_uleb128_stream (ob->main_stream, alias_count);
526 do
527 {
528 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
529 alias->decl);
530 if (alias->thunk.thunk_p)
531 {
532 lto_output_uleb128_stream
533 (ob->main_stream,
534 1 + (alias->thunk.this_adjusting != 0) * 2
535 + (alias->thunk.virtual_offset_p != 0) * 4);
536 lto_output_uleb128_stream (ob->main_stream,
537 alias->thunk.fixed_offset);
538 lto_output_uleb128_stream (ob->main_stream,
539 alias->thunk.virtual_value);
540 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
541 alias->thunk.alias);
542 }
543 else
544 {
545 lto_output_uleb128_stream (ob->main_stream, 0);
546 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
547 alias->thunk.alias);
548 }
549 alias = alias->previous;
550 }
551 while (alias);
552 }
553 else
554 lto_output_uleb128_stream (ob->main_stream, 0);
555 }
556
557 /* Output the varpool NODE to OB.
558 If NODE is not in SET, then NODE is a boundary. */
559
560 static void
561 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
562 lto_varpool_encoder_t varpool_encoder,
563 cgraph_node_set set, varpool_node_set vset)
564 {
565 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
566 struct bitpack_d bp;
567 struct varpool_node *alias;
568 int count = 0;
569 int ref;
570
571 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
572 bp = bitpack_create (ob->main_stream);
573 bp_pack_value (&bp, node->externally_visible, 1);
574 bp_pack_value (&bp, node->force_output, 1);
575 bp_pack_value (&bp, node->finalized, 1);
576 bp_pack_value (&bp, node->alias, 1);
577 gcc_assert (!node->alias || !node->extra_name);
578 gcc_assert (node->finalized || !node->analyzed);
579 gcc_assert (node->needed);
580 /* Constant pool initializers can be de-unified into individual ltrans units.
581 FIXME: Alternatively at -Os we may want to avoid generating for them the local
582 labels and share them across LTRANS partitions. */
583 if (DECL_IN_CONSTANT_POOL (node->decl)
584 && !DECL_COMDAT (node->decl))
585 {
586 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
587 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
588 }
589 else
590 {
591 bp_pack_value (&bp, node->analyzed
592 && referenced_from_other_partition_p (&node->ref_list,
593 set, vset), 1);
594 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
595 }
596 /* Also emit any extra name aliases. */
597 for (alias = node->extra_name; alias; alias = alias->next)
598 count++;
599 bp_pack_value (&bp, count != 0, 1);
600 lto_output_bitpack (&bp);
601 if (node->same_comdat_group && !boundary_p)
602 {
603 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
604 gcc_assert (ref != LCC_NOT_FOUND);
605 }
606 else
607 ref = LCC_NOT_FOUND;
608 lto_output_sleb128_stream (ob->main_stream, ref);
609
610 if (count)
611 {
612 lto_output_uleb128_stream (ob->main_stream, count);
613 for (alias = node->extra_name; alias; alias = alias->next)
614 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
615 }
616 }
617
618 /* Output the varpool NODE to OB.
619 If NODE is not in SET, then NODE is a boundary. */
620
621 static void
622 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
623 lto_cgraph_encoder_t encoder,
624 lto_varpool_encoder_t varpool_encoder)
625 {
626 struct bitpack_d bp;
627 bp = bitpack_create (ob->main_stream);
628 bp_pack_value (&bp, ref->refered_type, 1);
629 bp_pack_value (&bp, ref->use, 2);
630 lto_output_bitpack (&bp);
631 if (ref->refered_type == IPA_REF_CGRAPH)
632 {
633 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
634 gcc_assert (nref != LCC_NOT_FOUND);
635 lto_output_sleb128_stream (ob->main_stream, nref);
636 }
637 else
638 {
639 int nref = lto_varpool_encoder_lookup (varpool_encoder,
640 ipa_ref_varpool_node (ref));
641 gcc_assert (nref != LCC_NOT_FOUND);
642 lto_output_sleb128_stream (ob->main_stream, nref);
643 }
644 }
645
646 /* Stream out profile_summary to OB. */
647
648 static void
649 output_profile_summary (struct lto_simple_output_block *ob)
650 {
651 if (profile_info)
652 {
653 /* We do not output num, it is not terribly useful. */
654 gcc_assert (profile_info->runs);
655 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
656 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
657 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
658 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
659 }
660 else
661 lto_output_uleb128_stream (ob->main_stream, 0);
662 }
663
664 /* Add NODE into encoder as well as nodes it is cloned from.
665 Do it in a way so clones appear first. */
666
667 static void
668 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
669 bool include_body)
670 {
671 if (node->clone_of)
672 add_node_to (encoder, node->clone_of, include_body);
673 else if (include_body)
674 lto_set_cgraph_encoder_encode_body (encoder, node);
675 lto_cgraph_encoder_encode (encoder, node);
676 }
677
678 /* Add all references in LIST to encoders. */
679
680 static void
681 add_references (lto_cgraph_encoder_t encoder,
682 lto_varpool_encoder_t varpool_encoder,
683 struct ipa_ref_list *list)
684 {
685 int i;
686 struct ipa_ref *ref;
687 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
688 if (ref->refered_type == IPA_REF_CGRAPH)
689 add_node_to (encoder, ipa_ref_node (ref), false);
690 else
691 {
692 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
693 lto_varpool_encoder_encode (varpool_encoder, vnode);
694 }
695 }
696
697 /* Output all callees or indirect outgoing edges. EDGE must be the first such
698 edge. */
699
700 static void
701 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
702 struct lto_simple_output_block *ob,
703 lto_cgraph_encoder_t encoder)
704 {
705 if (!edge)
706 return;
707
708 /* Output edges in backward direction, so the reconstructed callgraph match
709 and it is easy to associate call sites in the IPA pass summaries. */
710 while (edge->next_callee)
711 edge = edge->next_callee;
712 for (; edge; edge = edge->prev_callee)
713 lto_output_edge (ob, edge, encoder);
714 }
715
716 /* Output the part of the cgraph in SET. */
717
718 static void
719 output_refs (cgraph_node_set set, varpool_node_set vset,
720 lto_cgraph_encoder_t encoder,
721 lto_varpool_encoder_t varpool_encoder)
722 {
723 cgraph_node_set_iterator csi;
724 varpool_node_set_iterator vsi;
725 struct lto_simple_output_block *ob;
726 int count;
727 struct ipa_ref *ref;
728 int i;
729
730 ob = lto_create_simple_output_block (LTO_section_refs);
731
732 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
733 {
734 struct cgraph_node *node = csi_node (csi);
735
736 count = ipa_ref_list_nreferences (&node->ref_list);
737 if (count)
738 {
739 lto_output_uleb128_stream (ob->main_stream, count);
740 lto_output_uleb128_stream (ob->main_stream,
741 lto_cgraph_encoder_lookup (encoder, node));
742 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
743 lto_output_ref (ob, ref, encoder, varpool_encoder);
744 }
745 }
746
747 lto_output_uleb128_stream (ob->main_stream, 0);
748
749 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
750 {
751 struct varpool_node *node = vsi_node (vsi);
752
753 count = ipa_ref_list_nreferences (&node->ref_list);
754 if (count)
755 {
756 lto_output_uleb128_stream (ob->main_stream, count);
757 lto_output_uleb128_stream (ob->main_stream,
758 lto_varpool_encoder_lookup (varpool_encoder,
759 node));
760 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
761 lto_output_ref (ob, ref, encoder, varpool_encoder);
762 }
763 }
764
765 lto_output_uleb128_stream (ob->main_stream, 0);
766
767 lto_destroy_simple_output_block (ob);
768 }
769
770 /* Find out all cgraph and varpool nodes we want to encode in current unit
771 and insert them to encoders. */
772 void
773 compute_ltrans_boundary (struct lto_out_decl_state *state,
774 cgraph_node_set set, varpool_node_set vset)
775 {
776 struct cgraph_node *node;
777 cgraph_node_set_iterator csi;
778 varpool_node_set_iterator vsi;
779 struct cgraph_edge *edge;
780 int i;
781 lto_cgraph_encoder_t encoder;
782 lto_varpool_encoder_t varpool_encoder;
783
784 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
785 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
786
787 /* Go over all the nodes in SET and assign references. */
788 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
789 {
790 node = csi_node (csi);
791 add_node_to (encoder, node, true);
792 add_references (encoder, varpool_encoder, &node->ref_list);
793 }
794 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
795 {
796 struct varpool_node *vnode = vsi_node (vsi);
797 gcc_assert (!vnode->alias);
798 lto_varpool_encoder_encode (varpool_encoder, vnode);
799 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
800 add_references (encoder, varpool_encoder, &vnode->ref_list);
801 }
802 /* Pickle in also the initializer of all referenced readonly variables
803 to help folding. Constant pool variables are not shared, so we must
804 pickle those too. */
805 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
806 {
807 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
808 if (DECL_INITIAL (vnode->decl)
809 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
810 vnode)
811 && (DECL_IN_CONSTANT_POOL (vnode->decl)
812 || TREE_READONLY (vnode->decl)))
813 {
814 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
815 add_references (encoder, varpool_encoder, &vnode->ref_list);
816 }
817 }
818
819 /* Go over all the nodes again to include callees that are not in
820 SET. */
821 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
822 {
823 node = csi_node (csi);
824 for (edge = node->callees; edge; edge = edge->next_callee)
825 {
826 struct cgraph_node *callee = edge->callee;
827 if (!cgraph_node_in_set_p (callee, set))
828 {
829 /* We should have moved all the inlines. */
830 gcc_assert (!callee->global.inlined_to);
831 add_node_to (encoder, callee, false);
832 }
833 }
834 }
835 }
836
837 /* Output the part of the cgraph in SET. */
838
839 void
840 output_cgraph (cgraph_node_set set, varpool_node_set vset)
841 {
842 struct cgraph_node *node;
843 struct lto_simple_output_block *ob;
844 cgraph_node_set_iterator csi;
845 int i, n_nodes;
846 lto_cgraph_encoder_t encoder;
847 lto_varpool_encoder_t varpool_encoder;
848 struct cgraph_asm_node *can;
849
850 if (flag_wpa)
851 output_cgraph_opt_summary ();
852
853 ob = lto_create_simple_output_block (LTO_section_cgraph);
854
855 output_profile_summary (ob);
856
857 /* An encoder for cgraph nodes should have been created by
858 ipa_write_summaries_1. */
859 gcc_assert (ob->decl_state->cgraph_node_encoder);
860 gcc_assert (ob->decl_state->varpool_node_encoder);
861 encoder = ob->decl_state->cgraph_node_encoder;
862 varpool_encoder = ob->decl_state->varpool_node_encoder;
863
864 /* Write out the nodes. We must first output a node and then its clones,
865 otherwise at a time reading back the node there would be nothing to clone
866 from. */
867 n_nodes = lto_cgraph_encoder_size (encoder);
868 for (i = 0; i < n_nodes; i++)
869 {
870 node = lto_cgraph_encoder_deref (encoder, i);
871 lto_output_node (ob, node, encoder, set, vset);
872 }
873
874 /* Go over the nodes in SET again to write edges. */
875 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
876 {
877 node = csi_node (csi);
878 output_outgoing_cgraph_edges (node->callees, ob, encoder);
879 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
880 }
881
882 lto_output_uleb128_stream (ob->main_stream, 0);
883
884 /* Emit toplevel asms.
885 When doing WPA we must output every asm just once. Since we do not partition asm
886 nodes at all, output them to first output. This is kind of hack, but should work
887 well. */
888 if (!asm_nodes_output)
889 {
890 asm_nodes_output = true;
891 for (can = cgraph_asm_nodes; can; can = can->next)
892 {
893 int len = TREE_STRING_LENGTH (can->asm_str);
894 lto_output_uleb128_stream (ob->main_stream, len);
895 for (i = 0; i < len; ++i)
896 lto_output_1_stream (ob->main_stream,
897 TREE_STRING_POINTER (can->asm_str)[i]);
898 }
899 }
900
901 lto_output_uleb128_stream (ob->main_stream, 0);
902
903 lto_destroy_simple_output_block (ob);
904 output_varpool (set, vset);
905 output_refs (set, vset, encoder, varpool_encoder);
906 }
907
908 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
909 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
910 NODE or to replace the values in it, for instance because the first
911 time we saw it, the function body was not available but now it
912 is. BP is a bitpack with all the bitflags for NODE read from the
913 stream. */
914
915 static void
916 input_overwrite_node (struct lto_file_decl_data *file_data,
917 struct cgraph_node *node,
918 enum LTO_cgraph_tags tag,
919 struct bitpack_d *bp,
920 unsigned int stack_size,
921 unsigned int self_time,
922 unsigned int time_inlining_benefit,
923 unsigned int self_size,
924 unsigned int size_inlining_benefit)
925 {
926 node->aux = (void *) tag;
927 node->local.inline_summary.estimated_self_stack_size = stack_size;
928 node->local.inline_summary.self_time = self_time;
929 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
930 node->local.inline_summary.self_size = self_size;
931 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
932 node->global.time = self_time;
933 node->global.size = self_size;
934 node->global.estimated_stack_size = stack_size;
935 node->global.estimated_growth = INT_MIN;
936 node->local.lto_file_data = file_data;
937
938 node->local.local = bp_unpack_value (bp, 1);
939 node->local.externally_visible = bp_unpack_value (bp, 1);
940 node->local.finalized = bp_unpack_value (bp, 1);
941 node->local.inlinable = bp_unpack_value (bp, 1);
942 node->local.versionable = bp_unpack_value (bp, 1);
943 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
944 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
945 node->local.vtable_method = bp_unpack_value (bp, 1);
946 node->needed = bp_unpack_value (bp, 1);
947 node->address_taken = bp_unpack_value (bp, 1);
948 node->abstract_and_needed = bp_unpack_value (bp, 1);
949 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
950 node->lowered = bp_unpack_value (bp, 1);
951 node->analyzed = tag == LTO_cgraph_analyzed_node;
952 node->in_other_partition = bp_unpack_value (bp, 1);
953 node->alias = bp_unpack_value (bp, 1);
954 node->finalized_by_frontend = bp_unpack_value (bp, 1);
955 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
956 }
957
958 /* Output the part of the cgraph in SET. */
959
960 static void
961 output_varpool (cgraph_node_set set, varpool_node_set vset)
962 {
963 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
964 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
965 int len = lto_varpool_encoder_size (varpool_encoder), i;
966
967 lto_output_uleb128_stream (ob->main_stream, len);
968
969 /* Write out the nodes. We must first output a node and then its clones,
970 otherwise at a time reading back the node there would be nothing to clone
971 from. */
972 for (i = 0; i < len; i++)
973 {
974 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
975 varpool_encoder,
976 set, vset);
977 }
978
979 lto_destroy_simple_output_block (ob);
980 }
981
982 /* Read a node from input_block IB. TAG is the node's tag just read.
983 Return the node read or overwriten. */
984
985 static struct cgraph_node *
986 input_node (struct lto_file_decl_data *file_data,
987 struct lto_input_block *ib,
988 enum LTO_cgraph_tags tag,
989 VEC(cgraph_node_ptr, heap) *nodes)
990 {
991 tree fn_decl;
992 struct cgraph_node *node;
993 struct bitpack_d bp;
994 int stack_size = 0;
995 unsigned decl_index;
996 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
997 int self_time = 0;
998 int self_size = 0;
999 int time_inlining_benefit = 0;
1000 int size_inlining_benefit = 0;
1001 unsigned long same_body_count = 0;
1002 int clone_ref;
1003
1004 clone_ref = lto_input_sleb128 (ib);
1005
1006 decl_index = lto_input_uleb128 (ib);
1007 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1008
1009 if (clone_ref != LCC_NOT_FOUND)
1010 {
1011 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1012 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1013 }
1014 else
1015 node = cgraph_node (fn_decl);
1016
1017 node->count = lto_input_sleb128 (ib);
1018
1019 if (tag == LTO_cgraph_analyzed_node)
1020 {
1021 stack_size = lto_input_sleb128 (ib);
1022 self_size = lto_input_sleb128 (ib);
1023 size_inlining_benefit = lto_input_sleb128 (ib);
1024 self_time = lto_input_sleb128 (ib);
1025 time_inlining_benefit = lto_input_sleb128 (ib);
1026
1027 ref = lto_input_sleb128 (ib);
1028 }
1029
1030 ref2 = lto_input_sleb128 (ib);
1031
1032 /* Make sure that we have not read this node before. Nodes that
1033 have already been read will have their tag stored in the 'aux'
1034 field. Since built-in functions can be referenced in multiple
1035 functions, they are expected to be read more than once. */
1036 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1037 internal_error ("bytecode stream: found multiple instances of cgraph "
1038 "node %d", node->uid);
1039
1040 bp = lto_input_bitpack (ib);
1041 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1042 time_inlining_benefit, self_size,
1043 size_inlining_benefit);
1044
1045 /* Store a reference for now, and fix up later to be a pointer. */
1046 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1047
1048 /* Store a reference for now, and fix up later to be a pointer. */
1049 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1050
1051 same_body_count = lto_input_uleb128 (ib);
1052 while (same_body_count-- > 0)
1053 {
1054 tree alias_decl;
1055 int type;
1056 decl_index = lto_input_uleb128 (ib);
1057 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1058 type = lto_input_uleb128 (ib);
1059 if (!type)
1060 {
1061 tree real_alias;
1062 decl_index = lto_input_uleb128 (ib);
1063 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1064 cgraph_same_body_alias (alias_decl, real_alias);
1065 }
1066 else
1067 {
1068 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1069 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1070 tree real_alias;
1071 decl_index = lto_input_uleb128 (ib);
1072 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1073 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1074 virtual_value,
1075 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1076 real_alias);
1077 }
1078 }
1079 return node;
1080 }
1081
1082 /* Read a node from input_block IB. TAG is the node's tag just read.
1083 Return the node read or overwriten. */
1084
1085 static struct varpool_node *
1086 input_varpool_node (struct lto_file_decl_data *file_data,
1087 struct lto_input_block *ib)
1088 {
1089 int decl_index;
1090 tree var_decl;
1091 struct varpool_node *node;
1092 struct bitpack_d bp;
1093 bool aliases_p;
1094 int count;
1095 int ref = LCC_NOT_FOUND;
1096
1097 decl_index = lto_input_uleb128 (ib);
1098 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1099 node = varpool_node (var_decl);
1100 node->lto_file_data = file_data;
1101
1102 bp = lto_input_bitpack (ib);
1103 node->externally_visible = bp_unpack_value (&bp, 1);
1104 node->force_output = bp_unpack_value (&bp, 1);
1105 node->finalized = bp_unpack_value (&bp, 1);
1106 node->alias = bp_unpack_value (&bp, 1);
1107 node->analyzed = node->finalized;
1108 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1109 node->in_other_partition = bp_unpack_value (&bp, 1);
1110 aliases_p = bp_unpack_value (&bp, 1);
1111 if (node->finalized)
1112 varpool_mark_needed_node (node);
1113 ref = lto_input_sleb128 (ib);
1114 /* Store a reference for now, and fix up later to be a pointer. */
1115 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1116 if (aliases_p)
1117 {
1118 count = lto_input_uleb128 (ib);
1119 for (; count > 0; count --)
1120 {
1121 tree decl = lto_file_decl_data_get_var_decl (file_data,
1122 lto_input_uleb128 (ib));
1123 varpool_extra_name_alias (decl, var_decl);
1124 }
1125 }
1126 return node;
1127 }
1128
1129 /* Read a node from input_block IB. TAG is the node's tag just read.
1130 Return the node read or overwriten. */
1131
1132 static void
1133 input_ref (struct lto_input_block *ib,
1134 struct cgraph_node *refering_node,
1135 struct varpool_node *refering_varpool_node,
1136 VEC(cgraph_node_ptr, heap) *nodes,
1137 VEC(varpool_node_ptr, heap) *varpool_nodes)
1138 {
1139 struct cgraph_node *node = NULL;
1140 struct varpool_node *varpool_node = NULL;
1141 struct bitpack_d bp;
1142 enum ipa_ref_type type;
1143 enum ipa_ref_use use;
1144
1145 bp = lto_input_bitpack (ib);
1146 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1147 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1148 if (type == IPA_REF_CGRAPH)
1149 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1150 else
1151 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1152 ipa_record_reference (refering_node, refering_varpool_node,
1153 node, varpool_node, use, NULL);
1154 }
1155
1156 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1157 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1158 edge being read is indirect (in the sense that it has
1159 indirect_unknown_callee set). */
1160
1161 static void
1162 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1163 bool indirect)
1164 {
1165 struct cgraph_node *caller, *callee;
1166 struct cgraph_edge *edge;
1167 unsigned int stmt_id;
1168 gcov_type count;
1169 int freq;
1170 unsigned int nest;
1171 cgraph_inline_failed_t inline_failed;
1172 struct bitpack_d bp;
1173 int ecf_flags = 0;
1174
1175 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1176 if (caller == NULL || caller->decl == NULL_TREE)
1177 internal_error ("bytecode stream: no caller found while reading edge");
1178
1179 if (!indirect)
1180 {
1181 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1182 if (callee == NULL || callee->decl == NULL_TREE)
1183 internal_error ("bytecode stream: no callee found while reading edge");
1184 }
1185 else
1186 callee = NULL;
1187
1188 count = (gcov_type) lto_input_sleb128 (ib);
1189
1190 bp = lto_input_bitpack (ib);
1191 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1192 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1193 HOST_BITS_PER_INT);
1194 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1195 nest = (unsigned) bp_unpack_value (&bp, 30);
1196
1197 if (indirect)
1198 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1199 else
1200 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1201
1202 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1203 edge->lto_stmt_uid = stmt_id;
1204 edge->inline_failed = inline_failed;
1205 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1206 edge->can_throw_external = bp_unpack_value (&bp, 1);
1207 if (indirect)
1208 {
1209 if (bp_unpack_value (&bp, 1))
1210 ecf_flags |= ECF_CONST;
1211 if (bp_unpack_value (&bp, 1))
1212 ecf_flags |= ECF_PURE;
1213 if (bp_unpack_value (&bp, 1))
1214 ecf_flags |= ECF_NORETURN;
1215 if (bp_unpack_value (&bp, 1))
1216 ecf_flags |= ECF_MALLOC;
1217 if (bp_unpack_value (&bp, 1))
1218 ecf_flags |= ECF_NOTHROW;
1219 if (bp_unpack_value (&bp, 1))
1220 ecf_flags |= ECF_RETURNS_TWICE;
1221 edge->indirect_info->ecf_flags = ecf_flags;
1222 }
1223 }
1224
1225
1226 /* Read a cgraph from IB using the info in FILE_DATA. */
1227
1228 static VEC(cgraph_node_ptr, heap) *
1229 input_cgraph_1 (struct lto_file_decl_data *file_data,
1230 struct lto_input_block *ib)
1231 {
1232 enum LTO_cgraph_tags tag;
1233 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1234 struct cgraph_node *node;
1235 unsigned i;
1236 unsigned HOST_WIDE_INT len;
1237
1238 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1239 while (tag)
1240 {
1241 if (tag == LTO_cgraph_edge)
1242 input_edge (ib, nodes, false);
1243 else if (tag == LTO_cgraph_indirect_edge)
1244 input_edge (ib, nodes, true);
1245 else
1246 {
1247 node = input_node (file_data, ib, tag,nodes);
1248 if (node == NULL || node->decl == NULL_TREE)
1249 internal_error ("bytecode stream: found empty cgraph node");
1250 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1251 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1252 }
1253
1254 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1255 }
1256
1257 /* Input toplevel asms. */
1258 len = lto_input_uleb128 (ib);
1259 while (len)
1260 {
1261 char *str = (char *)xmalloc (len + 1);
1262 for (i = 0; i < len; ++i)
1263 str[i] = lto_input_1_unsigned (ib);
1264 cgraph_add_asm_node (build_string (len, str));
1265 free (str);
1266
1267 len = lto_input_uleb128 (ib);
1268 }
1269
1270 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1271 {
1272 int ref = (int) (intptr_t) node->global.inlined_to;
1273
1274 /* Fixup inlined_to from reference to pointer. */
1275 if (ref != LCC_NOT_FOUND)
1276 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1277 else
1278 node->global.inlined_to = NULL;
1279
1280 ref = (int) (intptr_t) node->same_comdat_group;
1281
1282 /* Fixup same_comdat_group from reference to pointer. */
1283 if (ref != LCC_NOT_FOUND)
1284 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1285 else
1286 node->same_comdat_group = NULL;
1287 }
1288 return nodes;
1289 }
1290
1291 /* Read a varpool from IB using the info in FILE_DATA. */
1292
1293 static VEC(varpool_node_ptr, heap) *
1294 input_varpool_1 (struct lto_file_decl_data *file_data,
1295 struct lto_input_block *ib)
1296 {
1297 unsigned HOST_WIDE_INT len;
1298 VEC(varpool_node_ptr, heap) *varpool = NULL;
1299 int i;
1300 struct varpool_node *node;
1301
1302 len = lto_input_uleb128 (ib);
1303 while (len)
1304 {
1305 VEC_safe_push (varpool_node_ptr, heap, varpool,
1306 input_varpool_node (file_data, ib));
1307 len--;
1308 }
1309 for (i = 0; VEC_iterate (varpool_node_ptr, varpool, i, node); i++)
1310 {
1311 int ref = (int) (intptr_t) node->same_comdat_group;
1312
1313 /* Fixup same_comdat_group from reference to pointer. */
1314 if (ref != LCC_NOT_FOUND)
1315 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1316 else
1317 node->same_comdat_group = NULL;
1318 }
1319 return varpool;
1320 }
1321
1322 /* Input ipa_refs. */
1323
1324 static void
1325 input_refs (struct lto_input_block *ib,
1326 VEC(cgraph_node_ptr, heap) *nodes,
1327 VEC(varpool_node_ptr, heap) *varpool)
1328 {
1329 int count;
1330 int idx;
1331 while (true)
1332 {
1333 struct cgraph_node *node;
1334 count = lto_input_uleb128 (ib);
1335 if (!count)
1336 break;
1337 idx = lto_input_uleb128 (ib);
1338 node = VEC_index (cgraph_node_ptr, nodes, idx);
1339 while (count)
1340 {
1341 input_ref (ib, node, NULL, nodes, varpool);
1342 count--;
1343 }
1344 }
1345 while (true)
1346 {
1347 struct varpool_node *node;
1348 count = lto_input_uleb128 (ib);
1349 if (!count)
1350 break;
1351 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1352 while (count)
1353 {
1354 input_ref (ib, NULL, node, nodes, varpool);
1355 count--;
1356 }
1357 }
1358 }
1359
1360
1361 static struct gcov_ctr_summary lto_gcov_summary;
1362
1363 /* Input profile_info from IB. */
1364 static void
1365 input_profile_summary (struct lto_input_block *ib)
1366 {
1367 unsigned int runs = lto_input_uleb128 (ib);
1368 if (runs)
1369 {
1370 if (!profile_info)
1371 {
1372 profile_info = &lto_gcov_summary;
1373 lto_gcov_summary.runs = runs;
1374 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1375 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1376 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1377 }
1378 /* We can support this by scaling all counts to nearest common multiple
1379 of all different runs, but it is perhaps not worth the effort. */
1380 else if (profile_info->runs != runs
1381 || profile_info->sum_all != lto_input_sleb128 (ib)
1382 || profile_info->run_max != lto_input_sleb128 (ib)
1383 || profile_info->sum_max != lto_input_sleb128 (ib))
1384 sorry ("Combining units with different profiles is not supported.");
1385 /* We allow some units to have profile and other to not have one. This will
1386 just make unprofiled units to be size optimized that is sane. */
1387 }
1388
1389 }
1390
1391 /* Input and merge the cgraph from each of the .o files passed to
1392 lto1. */
1393
1394 void
1395 input_cgraph (void)
1396 {
1397 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1398 struct lto_file_decl_data *file_data;
1399 unsigned int j = 0;
1400 struct cgraph_node *node;
1401
1402 while ((file_data = file_data_vec[j++]))
1403 {
1404 const char *data;
1405 size_t len;
1406 struct lto_input_block *ib;
1407 VEC(cgraph_node_ptr, heap) *nodes;
1408 VEC(varpool_node_ptr, heap) *varpool;
1409
1410 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1411 &data, &len);
1412 input_profile_summary (ib);
1413 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1414 nodes = input_cgraph_1 (file_data, ib);
1415 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1416 ib, data, len);
1417
1418 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1419 &data, &len);
1420 varpool = input_varpool_1 (file_data, ib);
1421 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1422 ib, data, len);
1423
1424 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1425 &data, &len);
1426 input_refs (ib, nodes, varpool);
1427 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1428 ib, data, len);
1429 if (flag_ltrans)
1430 input_cgraph_opt_summary (nodes);
1431 VEC_free (cgraph_node_ptr, heap, nodes);
1432 VEC_free (varpool_node_ptr, heap, varpool);
1433 }
1434
1435 /* Clear out the aux field that was used to store enough state to
1436 tell which nodes should be overwritten. */
1437 for (node = cgraph_nodes; node; node = node->next)
1438 {
1439 /* Some nodes may have been created by cgraph_node. This
1440 happens when the callgraph contains nested functions. If the
1441 node for the parent function was never emitted to the gimple
1442 file, cgraph_node will create a node for it when setting the
1443 context of the nested function. */
1444 if (node->local.lto_file_data)
1445 node->aux = NULL;
1446 }
1447 }
1448
1449 /* True when we need optimization summary for NODE. */
1450
1451 static int
1452 output_cgraph_opt_summary_p (struct cgraph_node *node)
1453 {
1454 if (!node->clone_of)
1455 return false;
1456 return (node->clone.tree_map
1457 || node->clone.args_to_skip
1458 || node->clone.combined_args_to_skip);
1459 }
1460
1461 /* Output optimization summary for NODE to OB. */
1462
1463 static void
1464 output_node_opt_summary (struct output_block *ob,
1465 struct cgraph_node *node)
1466 {
1467 unsigned int index;
1468 bitmap_iterator bi;
1469 struct ipa_replace_map *map;
1470 struct bitpack_d bp;
1471 int i;
1472
1473 lto_output_uleb128_stream (ob->main_stream,
1474 bitmap_count_bits (node->clone.args_to_skip));
1475 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1476 lto_output_uleb128_stream (ob->main_stream, index);
1477 lto_output_uleb128_stream (ob->main_stream,
1478 bitmap_count_bits (node->clone.combined_args_to_skip));
1479 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1480 lto_output_uleb128_stream (ob->main_stream, index);
1481 lto_output_uleb128_stream (ob->main_stream,
1482 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1483 for (i = 0; VEC_iterate (ipa_replace_map_p, node->clone.tree_map, i, map); i++)
1484 {
1485 int parm_num;
1486 tree parm;
1487
1488 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1489 parm = TREE_CHAIN (parm), parm_num++)
1490 if (map->old_tree == parm)
1491 break;
1492 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1493 mechanism to store function local declarations into summaries. */
1494 gcc_assert (parm);
1495 lto_output_uleb128_stream (ob->main_stream, parm_num);
1496 lto_output_tree (ob, map->new_tree, true);
1497 bp = bitpack_create (ob->main_stream);
1498 bp_pack_value (&bp, map->replace_p, 1);
1499 bp_pack_value (&bp, map->ref_p, 1);
1500 lto_output_bitpack (&bp);
1501 }
1502 }
1503
1504 /* Output optimization summaries stored in callgraph.
1505 At the moment it is the clone info structure. */
1506
1507 static void
1508 output_cgraph_opt_summary (void)
1509 {
1510 struct cgraph_node *node;
1511 int i, n_nodes;
1512 lto_cgraph_encoder_t encoder;
1513 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1514 unsigned count = 0;
1515
1516 ob->cgraph_node = NULL;
1517 encoder = ob->decl_state->cgraph_node_encoder;
1518 n_nodes = lto_cgraph_encoder_size (encoder);
1519 for (i = 0; i < n_nodes; i++)
1520 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i)))
1521 count++;
1522 lto_output_uleb128_stream (ob->main_stream, count);
1523 for (i = 0; i < n_nodes; i++)
1524 {
1525 node = lto_cgraph_encoder_deref (encoder, i);
1526 if (output_cgraph_opt_summary_p (node))
1527 {
1528 lto_output_uleb128_stream (ob->main_stream, i);
1529 output_node_opt_summary (ob, node);
1530 }
1531 }
1532 produce_asm (ob, NULL);
1533 destroy_output_block (ob);
1534 }
1535
1536 /* Input optimiation summary of NODE. */
1537
1538 static void
1539 input_node_opt_summary (struct cgraph_node *node,
1540 struct lto_input_block *ib_main,
1541 struct data_in *data_in)
1542 {
1543 int i;
1544 int count;
1545 int bit;
1546 struct bitpack_d bp;
1547
1548 count = lto_input_uleb128 (ib_main);
1549 if (count)
1550 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1551 for (i = 0; i < count; i++)
1552 {
1553 bit = lto_input_uleb128 (ib_main);
1554 bitmap_set_bit (node->clone.args_to_skip, bit);
1555 }
1556 count = lto_input_uleb128 (ib_main);
1557 if (count)
1558 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1559 for (i = 0; i < count; i++)
1560 {
1561 bit = lto_input_uleb128 (ib_main);
1562 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1563 }
1564 count = lto_input_uleb128 (ib_main);
1565 for (i = 0; i < count; i++)
1566 {
1567 int parm_num;
1568 tree parm;
1569 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1570
1571 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1572 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1573 parm = TREE_CHAIN (parm))
1574 parm_num --;
1575 map->parm_num = lto_input_uleb128 (ib_main);
1576 map->old_tree = NULL;
1577 map->new_tree = lto_input_tree (ib_main, data_in);
1578 bp = lto_input_bitpack (ib_main);
1579 map->replace_p = bp_unpack_value (&bp, 1);
1580 map->ref_p = bp_unpack_value (&bp, 1);
1581 }
1582 }
1583
1584 /* Read section in file FILE_DATA of length LEN with data DATA. */
1585
1586 static void
1587 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1588 const char *data, size_t len, VEC (cgraph_node_ptr,
1589 heap) * nodes)
1590 {
1591 const struct lto_function_header *header =
1592 (const struct lto_function_header *) data;
1593 const int32_t cfg_offset = sizeof (struct lto_function_header);
1594 const int32_t main_offset = cfg_offset + header->cfg_size;
1595 const int32_t string_offset = main_offset + header->main_size;
1596 struct data_in *data_in;
1597 struct lto_input_block ib_main;
1598 unsigned int i;
1599 unsigned int count;
1600
1601 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1602 header->main_size);
1603
1604 data_in =
1605 lto_data_in_create (file_data, (const char *) data + string_offset,
1606 header->string_size, NULL);
1607 count = lto_input_uleb128 (&ib_main);
1608
1609 for (i = 0; i < count; i++)
1610 {
1611 int ref = lto_input_uleb128 (&ib_main);
1612 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1613 &ib_main, data_in);
1614 }
1615 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
1616 len);
1617 lto_data_in_delete (data_in);
1618 }
1619
1620 /* Input optimization summary of cgraph. */
1621
1622 static void
1623 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1624 {
1625 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1626 struct lto_file_decl_data *file_data;
1627 unsigned int j = 0;
1628
1629 while ((file_data = file_data_vec[j++]))
1630 {
1631 size_t len;
1632 const char *data =
1633 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1634 &len);
1635
1636 if (data)
1637 input_cgraph_opt_section (file_data, data, len, nodes);
1638 }
1639 }