cgraph.h (struct varpool_node): Add const_value_known.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_cgraph_tags
57 {
58 /* Must leave 0 for the stopper. */
59
60 /* Cgraph node without body available. */
61 LTO_cgraph_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_cgraph_analyzed_node,
64 /* Cgraph edges. */
65 LTO_cgraph_edge,
66 LTO_cgraph_indirect_edge
67 };
68
69 /* Create a new cgraph encoder. */
70
71 lto_cgraph_encoder_t
72 lto_cgraph_encoder_new (void)
73 {
74 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
75 encoder->map = pointer_map_create ();
76 encoder->nodes = NULL;
77 encoder->body = pointer_set_create ();
78 return encoder;
79 }
80
81
82 /* Delete ENCODER and its components. */
83
84 void
85 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
86 {
87 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
88 pointer_map_destroy (encoder->map);
89 pointer_set_destroy (encoder->body);
90 free (encoder);
91 }
92
93
94 /* Return the existing reference number of NODE in the cgraph encoder in
95 output block OB. Assign a new reference if this is the first time
96 NODE is encoded. */
97
98 int
99 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
100 struct cgraph_node *node)
101 {
102 int ref;
103 void **slot;
104
105 slot = pointer_map_contains (encoder->map, node);
106 if (!slot)
107 {
108 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
109 slot = pointer_map_insert (encoder->map, node);
110 *slot = (void *) (intptr_t) ref;
111 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
112 }
113 else
114 ref = (int) (intptr_t) *slot;
115
116 return ref;
117 }
118
119 #define LCC_NOT_FOUND (-1)
120
121 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
122 or LCC_NOT_FOUND if it is not there. */
123
124 int
125 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
126 struct cgraph_node *node)
127 {
128 void **slot = pointer_map_contains (encoder->map, node);
129 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 }
131
132
133 /* Return the cgraph node corresponding to REF using ENCODER. */
134
135 struct cgraph_node *
136 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
137 {
138 if (ref == LCC_NOT_FOUND)
139 return NULL;
140
141 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 }
143
144
145 /* Return TRUE if we should encode initializer of NODE (if any). */
146
147 bool
148 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
149 struct cgraph_node *node)
150 {
151 return pointer_set_contains (encoder->body, node);
152 }
153
154 /* Return TRUE if we should encode body of NODE (if any). */
155
156 static void
157 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
158 struct cgraph_node *node)
159 {
160 pointer_set_insert (encoder->body, node);
161 }
162
163 /* Create a new varpool encoder. */
164
165 lto_varpool_encoder_t
166 lto_varpool_encoder_new (void)
167 {
168 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
169 encoder->map = pointer_map_create ();
170 encoder->initializer = pointer_set_create ();
171 encoder->nodes = NULL;
172 return encoder;
173 }
174
175
176 /* Delete ENCODER and its components. */
177
178 void
179 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
180 {
181 VEC_free (varpool_node_ptr, heap, encoder->nodes);
182 pointer_map_destroy (encoder->map);
183 pointer_set_destroy (encoder->initializer);
184 free (encoder);
185 }
186
187
188 /* Return the existing reference number of NODE in the varpool encoder in
189 output block OB. Assign a new reference if this is the first time
190 NODE is encoded. */
191
192 int
193 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
194 struct varpool_node *node)
195 {
196 int ref;
197 void **slot;
198
199 slot = pointer_map_contains (encoder->map, node);
200 if (!slot)
201 {
202 ref = VEC_length (varpool_node_ptr, encoder->nodes);
203 slot = pointer_map_insert (encoder->map, node);
204 *slot = (void *) (intptr_t) ref;
205 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
206 }
207 else
208 ref = (int) (intptr_t) *slot;
209
210 return ref;
211 }
212
213 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
214 or LCC_NOT_FOUND if it is not there. */
215
216 int
217 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
218 struct varpool_node *node)
219 {
220 void **slot = pointer_map_contains (encoder->map, node);
221 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 }
223
224
225 /* Return the varpool node corresponding to REF using ENCODER. */
226
227 struct varpool_node *
228 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
229 {
230 if (ref == LCC_NOT_FOUND)
231 return NULL;
232
233 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 }
235
236
237 /* Return TRUE if we should encode initializer of NODE (if any). */
238
239 bool
240 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
241 struct varpool_node *node)
242 {
243 return pointer_set_contains (encoder->initializer, node);
244 }
245
246 /* Return TRUE if we should encode initializer of NODE (if any). */
247
248 static void
249 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
250 struct varpool_node *node)
251 {
252 pointer_set_insert (encoder->initializer, node);
253 }
254
255 /* Output the cgraph EDGE to OB using ENCODER. */
256
257 static void
258 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
259 lto_cgraph_encoder_t encoder)
260 {
261 unsigned int uid;
262 intptr_t ref;
263 struct bitpack_d bp;
264
265 if (edge->indirect_unknown_callee)
266 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
267 else
268 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
269
270 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
271 gcc_assert (ref != LCC_NOT_FOUND);
272 lto_output_sleb128_stream (ob->main_stream, ref);
273
274 if (!edge->indirect_unknown_callee)
275 {
276 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 lto_output_sleb128_stream (ob->main_stream, ref);
279 }
280
281 lto_output_sleb128_stream (ob->main_stream, edge->count);
282
283 bp = bitpack_create (ob->main_stream);
284 uid = (!gimple_has_body_p (edge->caller->decl)
285 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
286 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
289 bp_pack_value (&bp, edge->loop_nest, 30);
290 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
291 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
292 bp_pack_value (&bp, edge->can_throw_external, 1);
293 if (edge->indirect_unknown_callee)
294 {
295 int flags = edge->indirect_info->ecf_flags;
296 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
302 /* Flags that should not appear on indirect calls. */
303 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
304 | ECF_MAY_BE_ALLOCA
305 | ECF_SIBCALL
306 | ECF_NOVOPS)));
307 }
308 lto_output_bitpack (&bp);
309 }
310
311 /* Return if LIST contain references from other partitions. */
312
313 bool
314 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
315 varpool_node_set vset)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
320 {
321 if (ref->refering_type == IPA_REF_CGRAPH)
322 {
323 if (ipa_ref_refering_node (ref)->in_other_partition
324 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
325 return true;
326 }
327 else
328 {
329 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
330 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
331 vset))
332 return true;
333 }
334 }
335 return false;
336 }
337
338 /* Return true when node is reachable from other partition. */
339
340 bool
341 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
342 {
343 struct cgraph_edge *e;
344 if (!node->analyzed)
345 return false;
346 if (node->global.inlined_to)
347 return false;
348 for (e = node->callers; e; e = e->next_caller)
349 if (e->caller->in_other_partition
350 || !cgraph_node_in_set_p (e->caller, set))
351 return true;
352 return false;
353 }
354
355 /* Return if LIST contain references from other partitions. */
356
357 bool
358 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
359 varpool_node_set vset)
360 {
361 int i;
362 struct ipa_ref *ref;
363 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
364 {
365 if (ref->refering_type == IPA_REF_CGRAPH)
366 {
367 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
368 return true;
369 }
370 else
371 {
372 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
373 vset))
374 return true;
375 }
376 }
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
384 {
385 struct cgraph_edge *e;
386 if (!node->analyzed)
387 return false;
388 if (node->global.inlined_to)
389 return false;
390 for (e = node->callers; e; e = e->next_caller)
391 if (cgraph_node_in_set_p (e->caller, set))
392 return true;
393 return false;
394 }
395
396 /* Output the cgraph NODE to OB. ENCODER is used to find the
397 reference number of NODE->inlined_to. SET is the set of nodes we
398 are writing to the current file. If NODE is not in SET, then NODE
399 is a boundary of a cgraph_node_set and we pretend NODE just has a
400 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
401 that have had their callgraph node written so far. This is used to
402 determine if NODE is a clone of a previously written node. */
403
404 static void
405 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
406 lto_cgraph_encoder_t encoder, cgraph_node_set set,
407 varpool_node_set vset)
408 {
409 unsigned int tag;
410 struct bitpack_d bp;
411 bool boundary_p;
412 intptr_t ref;
413 bool in_other_partition = false;
414 struct cgraph_node *clone_of;
415
416 boundary_p = !cgraph_node_in_set_p (node, set);
417
418 if (node->analyzed && !boundary_p)
419 tag = LTO_cgraph_analyzed_node;
420 else
421 tag = LTO_cgraph_unavail_node;
422
423 lto_output_uleb128_stream (ob->main_stream, tag);
424
425 /* In WPA mode, we only output part of the call-graph. Also, we
426 fake cgraph node attributes. There are two cases that we care.
427
428 Boundary nodes: There are nodes that are not part of SET but are
429 called from within SET. We artificially make them look like
430 externally visible nodes with no function body.
431
432 Cherry-picked nodes: These are nodes we pulled from other
433 translation units into SET during IPA-inlining. We make them as
434 local static nodes to prevent clashes with other local statics. */
435 if (boundary_p && node->analyzed)
436 {
437 /* Inline clones can not be part of boundary.
438 gcc_assert (!node->global.inlined_to);
439
440 FIXME: At the moment they can be, when partition contains an inline
441 clone that is clone of inline clone from outside partition. We can
442 reshape the clone tree and make other tree to be the root, but it
443 needs a bit extra work and will be promplty done by cgraph_remove_node
444 after reading back. */
445 in_other_partition = 1;
446 }
447
448 clone_of = node->clone_of;
449 while (clone_of
450 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
451 if (clone_of->prev_sibling_clone)
452 clone_of = clone_of->prev_sibling_clone;
453 else
454 clone_of = clone_of->clone_of;
455 if (!clone_of)
456 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
457 else
458 lto_output_sleb128_stream (ob->main_stream, ref);
459
460
461 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
462 lto_output_sleb128_stream (ob->main_stream, node->count);
463
464 if (tag == LTO_cgraph_analyzed_node)
465 {
466 lto_output_sleb128_stream (ob->main_stream,
467 node->local.inline_summary.estimated_self_stack_size);
468 lto_output_sleb128_stream (ob->main_stream,
469 node->local.inline_summary.self_size);
470 lto_output_sleb128_stream (ob->main_stream,
471 node->local.inline_summary.size_inlining_benefit);
472 lto_output_sleb128_stream (ob->main_stream,
473 node->local.inline_summary.self_time);
474 lto_output_sleb128_stream (ob->main_stream,
475 node->local.inline_summary.time_inlining_benefit);
476 if (node->global.inlined_to)
477 {
478 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
479 gcc_assert (ref != LCC_NOT_FOUND);
480 }
481 else
482 ref = LCC_NOT_FOUND;
483
484 lto_output_sleb128_stream (ob->main_stream, ref);
485 }
486
487 if (node->same_comdat_group && !boundary_p)
488 {
489 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
490 gcc_assert (ref != LCC_NOT_FOUND);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 lto_output_sleb128_stream (ob->main_stream, ref);
495
496 bp = bitpack_create (ob->main_stream);
497 bp_pack_value (&bp, node->local.local, 1);
498 bp_pack_value (&bp, node->local.externally_visible, 1);
499 bp_pack_value (&bp, node->local.finalized, 1);
500 bp_pack_value (&bp, node->local.inlinable, 1);
501 bp_pack_value (&bp, node->local.versionable, 1);
502 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
503 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
504 bp_pack_value (&bp, node->local.vtable_method, 1);
505 bp_pack_value (&bp, node->needed, 1);
506 bp_pack_value (&bp, node->address_taken, 1);
507 bp_pack_value (&bp, node->abstract_and_needed, 1);
508 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
509 && !DECL_EXTERNAL (node->decl)
510 && !DECL_COMDAT (node->decl)
511 && (reachable_from_other_partition_p (node, set)
512 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
513 bp_pack_value (&bp, node->lowered, 1);
514 bp_pack_value (&bp, in_other_partition, 1);
515 bp_pack_value (&bp, node->alias, 1);
516 bp_pack_value (&bp, node->finalized_by_frontend, 1);
517 bp_pack_value (&bp, node->frequency, 2);
518 lto_output_bitpack (&bp);
519
520 if (node->same_body)
521 {
522 struct cgraph_node *alias;
523 unsigned long alias_count = 1;
524 for (alias = node->same_body; alias->next; alias = alias->next)
525 alias_count++;
526 lto_output_uleb128_stream (ob->main_stream, alias_count);
527 do
528 {
529 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
530 alias->decl);
531 if (alias->thunk.thunk_p)
532 {
533 lto_output_uleb128_stream
534 (ob->main_stream,
535 1 + (alias->thunk.this_adjusting != 0) * 2
536 + (alias->thunk.virtual_offset_p != 0) * 4);
537 lto_output_uleb128_stream (ob->main_stream,
538 alias->thunk.fixed_offset);
539 lto_output_uleb128_stream (ob->main_stream,
540 alias->thunk.virtual_value);
541 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
542 alias->thunk.alias);
543 }
544 else
545 {
546 lto_output_uleb128_stream (ob->main_stream, 0);
547 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
548 alias->thunk.alias);
549 }
550 alias = alias->previous;
551 }
552 while (alias);
553 }
554 else
555 lto_output_uleb128_stream (ob->main_stream, 0);
556 }
557
558 /* Output the varpool NODE to OB.
559 If NODE is not in SET, then NODE is a boundary. */
560
561 static void
562 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
563 lto_varpool_encoder_t varpool_encoder,
564 cgraph_node_set set, varpool_node_set vset)
565 {
566 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
567 struct bitpack_d bp;
568 struct varpool_node *alias;
569 int count = 0;
570 int ref;
571
572 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
573 bp = bitpack_create (ob->main_stream);
574 bp_pack_value (&bp, node->externally_visible, 1);
575 bp_pack_value (&bp, node->force_output, 1);
576 bp_pack_value (&bp, node->finalized, 1);
577 bp_pack_value (&bp, node->alias, 1);
578 bp_pack_value (&bp, node->const_value_known, 1);
579 gcc_assert (!node->alias || !node->extra_name);
580 gcc_assert (node->finalized || !node->analyzed);
581 gcc_assert (node->needed);
582 /* Constant pool initializers can be de-unified into individual ltrans units.
583 FIXME: Alternatively at -Os we may want to avoid generating for them the local
584 labels and share them across LTRANS partitions. */
585 if (DECL_IN_CONSTANT_POOL (node->decl)
586 && !DECL_COMDAT (node->decl))
587 {
588 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
589 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
590 }
591 else
592 {
593 bp_pack_value (&bp, node->analyzed
594 && referenced_from_other_partition_p (&node->ref_list,
595 set, vset), 1);
596 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
597 }
598 /* Also emit any extra name aliases. */
599 for (alias = node->extra_name; alias; alias = alias->next)
600 count++;
601 bp_pack_value (&bp, count != 0, 1);
602 lto_output_bitpack (&bp);
603 if (node->same_comdat_group && !boundary_p)
604 {
605 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
606 gcc_assert (ref != LCC_NOT_FOUND);
607 }
608 else
609 ref = LCC_NOT_FOUND;
610 lto_output_sleb128_stream (ob->main_stream, ref);
611
612 if (count)
613 {
614 lto_output_uleb128_stream (ob->main_stream, count);
615 for (alias = node->extra_name; alias; alias = alias->next)
616 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
617 }
618 }
619
620 /* Output the varpool NODE to OB.
621 If NODE is not in SET, then NODE is a boundary. */
622
623 static void
624 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
625 lto_cgraph_encoder_t encoder,
626 lto_varpool_encoder_t varpool_encoder)
627 {
628 struct bitpack_d bp;
629 bp = bitpack_create (ob->main_stream);
630 bp_pack_value (&bp, ref->refered_type, 1);
631 bp_pack_value (&bp, ref->use, 2);
632 lto_output_bitpack (&bp);
633 if (ref->refered_type == IPA_REF_CGRAPH)
634 {
635 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
636 gcc_assert (nref != LCC_NOT_FOUND);
637 lto_output_sleb128_stream (ob->main_stream, nref);
638 }
639 else
640 {
641 int nref = lto_varpool_encoder_lookup (varpool_encoder,
642 ipa_ref_varpool_node (ref));
643 gcc_assert (nref != LCC_NOT_FOUND);
644 lto_output_sleb128_stream (ob->main_stream, nref);
645 }
646 }
647
648 /* Stream out profile_summary to OB. */
649
650 static void
651 output_profile_summary (struct lto_simple_output_block *ob)
652 {
653 if (profile_info)
654 {
655 /* We do not output num, it is not terribly useful. */
656 gcc_assert (profile_info->runs);
657 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
658 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
659 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
660 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
661 }
662 else
663 lto_output_uleb128_stream (ob->main_stream, 0);
664 }
665
666 /* Add NODE into encoder as well as nodes it is cloned from.
667 Do it in a way so clones appear first. */
668
669 static void
670 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
671 bool include_body)
672 {
673 if (node->clone_of)
674 add_node_to (encoder, node->clone_of, include_body);
675 else if (include_body)
676 lto_set_cgraph_encoder_encode_body (encoder, node);
677 lto_cgraph_encoder_encode (encoder, node);
678 }
679
680 /* Add all references in LIST to encoders. */
681
682 static void
683 add_references (lto_cgraph_encoder_t encoder,
684 lto_varpool_encoder_t varpool_encoder,
685 struct ipa_ref_list *list)
686 {
687 int i;
688 struct ipa_ref *ref;
689 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
690 if (ref->refered_type == IPA_REF_CGRAPH)
691 add_node_to (encoder, ipa_ref_node (ref), false);
692 else
693 {
694 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
695 lto_varpool_encoder_encode (varpool_encoder, vnode);
696 }
697 }
698
699 /* Output all callees or indirect outgoing edges. EDGE must be the first such
700 edge. */
701
702 static void
703 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
704 struct lto_simple_output_block *ob,
705 lto_cgraph_encoder_t encoder)
706 {
707 if (!edge)
708 return;
709
710 /* Output edges in backward direction, so the reconstructed callgraph match
711 and it is easy to associate call sites in the IPA pass summaries. */
712 while (edge->next_callee)
713 edge = edge->next_callee;
714 for (; edge; edge = edge->prev_callee)
715 lto_output_edge (ob, edge, encoder);
716 }
717
718 /* Output the part of the cgraph in SET. */
719
720 static void
721 output_refs (cgraph_node_set set, varpool_node_set vset,
722 lto_cgraph_encoder_t encoder,
723 lto_varpool_encoder_t varpool_encoder)
724 {
725 cgraph_node_set_iterator csi;
726 varpool_node_set_iterator vsi;
727 struct lto_simple_output_block *ob;
728 int count;
729 struct ipa_ref *ref;
730 int i;
731
732 ob = lto_create_simple_output_block (LTO_section_refs);
733
734 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
735 {
736 struct cgraph_node *node = csi_node (csi);
737
738 count = ipa_ref_list_nreferences (&node->ref_list);
739 if (count)
740 {
741 lto_output_uleb128_stream (ob->main_stream, count);
742 lto_output_uleb128_stream (ob->main_stream,
743 lto_cgraph_encoder_lookup (encoder, node));
744 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
745 lto_output_ref (ob, ref, encoder, varpool_encoder);
746 }
747 }
748
749 lto_output_uleb128_stream (ob->main_stream, 0);
750
751 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
752 {
753 struct varpool_node *node = vsi_node (vsi);
754
755 count = ipa_ref_list_nreferences (&node->ref_list);
756 if (count)
757 {
758 lto_output_uleb128_stream (ob->main_stream, count);
759 lto_output_uleb128_stream (ob->main_stream,
760 lto_varpool_encoder_lookup (varpool_encoder,
761 node));
762 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
763 lto_output_ref (ob, ref, encoder, varpool_encoder);
764 }
765 }
766
767 lto_output_uleb128_stream (ob->main_stream, 0);
768
769 lto_destroy_simple_output_block (ob);
770 }
771
772 /* Find out all cgraph and varpool nodes we want to encode in current unit
773 and insert them to encoders. */
774 void
775 compute_ltrans_boundary (struct lto_out_decl_state *state,
776 cgraph_node_set set, varpool_node_set vset)
777 {
778 struct cgraph_node *node;
779 cgraph_node_set_iterator csi;
780 varpool_node_set_iterator vsi;
781 struct cgraph_edge *edge;
782 int i;
783 lto_cgraph_encoder_t encoder;
784 lto_varpool_encoder_t varpool_encoder;
785
786 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
787 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
788
789 /* Go over all the nodes in SET and assign references. */
790 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
791 {
792 node = csi_node (csi);
793 add_node_to (encoder, node, true);
794 add_references (encoder, varpool_encoder, &node->ref_list);
795 }
796 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
797 {
798 struct varpool_node *vnode = vsi_node (vsi);
799 gcc_assert (!vnode->alias);
800 lto_varpool_encoder_encode (varpool_encoder, vnode);
801 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
802 add_references (encoder, varpool_encoder, &vnode->ref_list);
803 }
804 /* Pickle in also the initializer of all referenced readonly variables
805 to help folding. Constant pool variables are not shared, so we must
806 pickle those too. */
807 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
808 {
809 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
810 if (DECL_INITIAL (vnode->decl)
811 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
812 vnode)
813 && (DECL_IN_CONSTANT_POOL (vnode->decl)
814 || TREE_READONLY (vnode->decl)))
815 {
816 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
817 add_references (encoder, varpool_encoder, &vnode->ref_list);
818 }
819 }
820
821 /* Go over all the nodes again to include callees that are not in
822 SET. */
823 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
824 {
825 node = csi_node (csi);
826 for (edge = node->callees; edge; edge = edge->next_callee)
827 {
828 struct cgraph_node *callee = edge->callee;
829 if (!cgraph_node_in_set_p (callee, set))
830 {
831 /* We should have moved all the inlines. */
832 gcc_assert (!callee->global.inlined_to);
833 add_node_to (encoder, callee, false);
834 }
835 }
836 }
837 }
838
839 /* Output the part of the cgraph in SET. */
840
841 void
842 output_cgraph (cgraph_node_set set, varpool_node_set vset)
843 {
844 struct cgraph_node *node;
845 struct lto_simple_output_block *ob;
846 cgraph_node_set_iterator csi;
847 int i, n_nodes;
848 lto_cgraph_encoder_t encoder;
849 lto_varpool_encoder_t varpool_encoder;
850 struct cgraph_asm_node *can;
851 static bool asm_nodes_output = false;
852
853 if (flag_wpa)
854 output_cgraph_opt_summary ();
855
856 ob = lto_create_simple_output_block (LTO_section_cgraph);
857
858 output_profile_summary (ob);
859
860 /* An encoder for cgraph nodes should have been created by
861 ipa_write_summaries_1. */
862 gcc_assert (ob->decl_state->cgraph_node_encoder);
863 gcc_assert (ob->decl_state->varpool_node_encoder);
864 encoder = ob->decl_state->cgraph_node_encoder;
865 varpool_encoder = ob->decl_state->varpool_node_encoder;
866
867 /* Write out the nodes. We must first output a node and then its clones,
868 otherwise at a time reading back the node there would be nothing to clone
869 from. */
870 n_nodes = lto_cgraph_encoder_size (encoder);
871 for (i = 0; i < n_nodes; i++)
872 {
873 node = lto_cgraph_encoder_deref (encoder, i);
874 lto_output_node (ob, node, encoder, set, vset);
875 }
876
877 /* Go over the nodes in SET again to write edges. */
878 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
879 {
880 node = csi_node (csi);
881 output_outgoing_cgraph_edges (node->callees, ob, encoder);
882 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
883 }
884
885 lto_output_uleb128_stream (ob->main_stream, 0);
886
887 /* Emit toplevel asms.
888 When doing WPA we must output every asm just once. Since we do not partition asm
889 nodes at all, output them to first output. This is kind of hack, but should work
890 well. */
891 if (!asm_nodes_output)
892 {
893 asm_nodes_output = true;
894 for (can = cgraph_asm_nodes; can; can = can->next)
895 {
896 int len = TREE_STRING_LENGTH (can->asm_str);
897 lto_output_uleb128_stream (ob->main_stream, len);
898 for (i = 0; i < len; ++i)
899 lto_output_1_stream (ob->main_stream,
900 TREE_STRING_POINTER (can->asm_str)[i]);
901 }
902 }
903
904 lto_output_uleb128_stream (ob->main_stream, 0);
905
906 lto_destroy_simple_output_block (ob);
907 output_varpool (set, vset);
908 output_refs (set, vset, encoder, varpool_encoder);
909 }
910
911 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
912 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
913 NODE or to replace the values in it, for instance because the first
914 time we saw it, the function body was not available but now it
915 is. BP is a bitpack with all the bitflags for NODE read from the
916 stream. */
917
918 static void
919 input_overwrite_node (struct lto_file_decl_data *file_data,
920 struct cgraph_node *node,
921 enum LTO_cgraph_tags tag,
922 struct bitpack_d *bp,
923 unsigned int stack_size,
924 unsigned int self_time,
925 unsigned int time_inlining_benefit,
926 unsigned int self_size,
927 unsigned int size_inlining_benefit)
928 {
929 node->aux = (void *) tag;
930 node->local.inline_summary.estimated_self_stack_size = stack_size;
931 node->local.inline_summary.self_time = self_time;
932 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
933 node->local.inline_summary.self_size = self_size;
934 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
935 node->global.time = self_time;
936 node->global.size = self_size;
937 node->global.estimated_stack_size = stack_size;
938 node->global.estimated_growth = INT_MIN;
939 node->local.lto_file_data = file_data;
940
941 node->local.local = bp_unpack_value (bp, 1);
942 node->local.externally_visible = bp_unpack_value (bp, 1);
943 node->local.finalized = bp_unpack_value (bp, 1);
944 node->local.inlinable = bp_unpack_value (bp, 1);
945 node->local.versionable = bp_unpack_value (bp, 1);
946 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
947 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
948 node->local.vtable_method = bp_unpack_value (bp, 1);
949 node->needed = bp_unpack_value (bp, 1);
950 node->address_taken = bp_unpack_value (bp, 1);
951 node->abstract_and_needed = bp_unpack_value (bp, 1);
952 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
953 node->lowered = bp_unpack_value (bp, 1);
954 node->analyzed = tag == LTO_cgraph_analyzed_node;
955 node->in_other_partition = bp_unpack_value (bp, 1);
956 node->alias = bp_unpack_value (bp, 1);
957 node->finalized_by_frontend = bp_unpack_value (bp, 1);
958 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
959 }
960
961 /* Output the part of the cgraph in SET. */
962
963 static void
964 output_varpool (cgraph_node_set set, varpool_node_set vset)
965 {
966 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
967 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
968 int len = lto_varpool_encoder_size (varpool_encoder), i;
969
970 lto_output_uleb128_stream (ob->main_stream, len);
971
972 /* Write out the nodes. We must first output a node and then its clones,
973 otherwise at a time reading back the node there would be nothing to clone
974 from. */
975 for (i = 0; i < len; i++)
976 {
977 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
978 varpool_encoder,
979 set, vset);
980 }
981
982 lto_destroy_simple_output_block (ob);
983 }
984
985 /* Read a node from input_block IB. TAG is the node's tag just read.
986 Return the node read or overwriten. */
987
988 static struct cgraph_node *
989 input_node (struct lto_file_decl_data *file_data,
990 struct lto_input_block *ib,
991 enum LTO_cgraph_tags tag,
992 VEC(cgraph_node_ptr, heap) *nodes)
993 {
994 tree fn_decl;
995 struct cgraph_node *node;
996 struct bitpack_d bp;
997 int stack_size = 0;
998 unsigned decl_index;
999 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1000 int self_time = 0;
1001 int self_size = 0;
1002 int time_inlining_benefit = 0;
1003 int size_inlining_benefit = 0;
1004 unsigned long same_body_count = 0;
1005 int clone_ref;
1006
1007 clone_ref = lto_input_sleb128 (ib);
1008
1009 decl_index = lto_input_uleb128 (ib);
1010 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1011
1012 if (clone_ref != LCC_NOT_FOUND)
1013 {
1014 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1015 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1016 }
1017 else
1018 node = cgraph_node (fn_decl);
1019
1020 node->count = lto_input_sleb128 (ib);
1021
1022 if (tag == LTO_cgraph_analyzed_node)
1023 {
1024 stack_size = lto_input_sleb128 (ib);
1025 self_size = lto_input_sleb128 (ib);
1026 size_inlining_benefit = lto_input_sleb128 (ib);
1027 self_time = lto_input_sleb128 (ib);
1028 time_inlining_benefit = lto_input_sleb128 (ib);
1029
1030 ref = lto_input_sleb128 (ib);
1031 }
1032
1033 ref2 = lto_input_sleb128 (ib);
1034
1035 /* Make sure that we have not read this node before. Nodes that
1036 have already been read will have their tag stored in the 'aux'
1037 field. Since built-in functions can be referenced in multiple
1038 functions, they are expected to be read more than once. */
1039 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1040 internal_error ("bytecode stream: found multiple instances of cgraph "
1041 "node %d", node->uid);
1042
1043 bp = lto_input_bitpack (ib);
1044 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1045 time_inlining_benefit, self_size,
1046 size_inlining_benefit);
1047
1048 /* Store a reference for now, and fix up later to be a pointer. */
1049 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1050
1051 /* Store a reference for now, and fix up later to be a pointer. */
1052 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1053
1054 same_body_count = lto_input_uleb128 (ib);
1055 while (same_body_count-- > 0)
1056 {
1057 tree alias_decl;
1058 int type;
1059 decl_index = lto_input_uleb128 (ib);
1060 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1061 type = lto_input_uleb128 (ib);
1062 if (!type)
1063 {
1064 tree real_alias;
1065 decl_index = lto_input_uleb128 (ib);
1066 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1067 cgraph_same_body_alias (alias_decl, real_alias);
1068 }
1069 else
1070 {
1071 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1072 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1073 tree real_alias;
1074 decl_index = lto_input_uleb128 (ib);
1075 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1076 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1077 virtual_value,
1078 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1079 real_alias);
1080 }
1081 }
1082 return node;
1083 }
1084
1085 /* Read a node from input_block IB. TAG is the node's tag just read.
1086 Return the node read or overwriten. */
1087
1088 static struct varpool_node *
1089 input_varpool_node (struct lto_file_decl_data *file_data,
1090 struct lto_input_block *ib)
1091 {
1092 int decl_index;
1093 tree var_decl;
1094 struct varpool_node *node;
1095 struct bitpack_d bp;
1096 bool aliases_p;
1097 int count;
1098 int ref = LCC_NOT_FOUND;
1099
1100 decl_index = lto_input_uleb128 (ib);
1101 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1102 node = varpool_node (var_decl);
1103 node->lto_file_data = file_data;
1104
1105 bp = lto_input_bitpack (ib);
1106 node->externally_visible = bp_unpack_value (&bp, 1);
1107 node->force_output = bp_unpack_value (&bp, 1);
1108 node->finalized = bp_unpack_value (&bp, 1);
1109 node->alias = bp_unpack_value (&bp, 1);
1110 node->const_value_known = bp_unpack_value (&bp, 1);
1111 node->analyzed = node->finalized;
1112 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1113 node->in_other_partition = bp_unpack_value (&bp, 1);
1114 aliases_p = bp_unpack_value (&bp, 1);
1115 if (node->finalized)
1116 varpool_mark_needed_node (node);
1117 ref = lto_input_sleb128 (ib);
1118 /* Store a reference for now, and fix up later to be a pointer. */
1119 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1120 if (aliases_p)
1121 {
1122 count = lto_input_uleb128 (ib);
1123 for (; count > 0; count --)
1124 {
1125 tree decl = lto_file_decl_data_get_var_decl (file_data,
1126 lto_input_uleb128 (ib));
1127 varpool_extra_name_alias (decl, var_decl);
1128 }
1129 }
1130 return node;
1131 }
1132
1133 /* Read a node from input_block IB. TAG is the node's tag just read.
1134 Return the node read or overwriten. */
1135
1136 static void
1137 input_ref (struct lto_input_block *ib,
1138 struct cgraph_node *refering_node,
1139 struct varpool_node *refering_varpool_node,
1140 VEC(cgraph_node_ptr, heap) *nodes,
1141 VEC(varpool_node_ptr, heap) *varpool_nodes)
1142 {
1143 struct cgraph_node *node = NULL;
1144 struct varpool_node *varpool_node = NULL;
1145 struct bitpack_d bp;
1146 enum ipa_ref_type type;
1147 enum ipa_ref_use use;
1148
1149 bp = lto_input_bitpack (ib);
1150 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1151 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1152 if (type == IPA_REF_CGRAPH)
1153 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1154 else
1155 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1156 ipa_record_reference (refering_node, refering_varpool_node,
1157 node, varpool_node, use, NULL);
1158 }
1159
1160 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1161 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1162 edge being read is indirect (in the sense that it has
1163 indirect_unknown_callee set). */
1164
1165 static void
1166 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1167 bool indirect)
1168 {
1169 struct cgraph_node *caller, *callee;
1170 struct cgraph_edge *edge;
1171 unsigned int stmt_id;
1172 gcov_type count;
1173 int freq;
1174 unsigned int nest;
1175 cgraph_inline_failed_t inline_failed;
1176 struct bitpack_d bp;
1177 int ecf_flags = 0;
1178
1179 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1180 if (caller == NULL || caller->decl == NULL_TREE)
1181 internal_error ("bytecode stream: no caller found while reading edge");
1182
1183 if (!indirect)
1184 {
1185 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1186 if (callee == NULL || callee->decl == NULL_TREE)
1187 internal_error ("bytecode stream: no callee found while reading edge");
1188 }
1189 else
1190 callee = NULL;
1191
1192 count = (gcov_type) lto_input_sleb128 (ib);
1193
1194 bp = lto_input_bitpack (ib);
1195 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1196 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1197 HOST_BITS_PER_INT);
1198 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1199 nest = (unsigned) bp_unpack_value (&bp, 30);
1200
1201 if (indirect)
1202 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1203 else
1204 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1205
1206 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1207 edge->lto_stmt_uid = stmt_id;
1208 edge->inline_failed = inline_failed;
1209 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1210 edge->can_throw_external = bp_unpack_value (&bp, 1);
1211 if (indirect)
1212 {
1213 if (bp_unpack_value (&bp, 1))
1214 ecf_flags |= ECF_CONST;
1215 if (bp_unpack_value (&bp, 1))
1216 ecf_flags |= ECF_PURE;
1217 if (bp_unpack_value (&bp, 1))
1218 ecf_flags |= ECF_NORETURN;
1219 if (bp_unpack_value (&bp, 1))
1220 ecf_flags |= ECF_MALLOC;
1221 if (bp_unpack_value (&bp, 1))
1222 ecf_flags |= ECF_NOTHROW;
1223 if (bp_unpack_value (&bp, 1))
1224 ecf_flags |= ECF_RETURNS_TWICE;
1225 edge->indirect_info->ecf_flags = ecf_flags;
1226 }
1227 }
1228
1229
1230 /* Read a cgraph from IB using the info in FILE_DATA. */
1231
1232 static VEC(cgraph_node_ptr, heap) *
1233 input_cgraph_1 (struct lto_file_decl_data *file_data,
1234 struct lto_input_block *ib)
1235 {
1236 enum LTO_cgraph_tags tag;
1237 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1238 struct cgraph_node *node;
1239 unsigned i;
1240 unsigned HOST_WIDE_INT len;
1241
1242 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1243 while (tag)
1244 {
1245 if (tag == LTO_cgraph_edge)
1246 input_edge (ib, nodes, false);
1247 else if (tag == LTO_cgraph_indirect_edge)
1248 input_edge (ib, nodes, true);
1249 else
1250 {
1251 node = input_node (file_data, ib, tag,nodes);
1252 if (node == NULL || node->decl == NULL_TREE)
1253 internal_error ("bytecode stream: found empty cgraph node");
1254 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1255 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1256 }
1257
1258 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1259 }
1260
1261 /* Input toplevel asms. */
1262 len = lto_input_uleb128 (ib);
1263 while (len)
1264 {
1265 char *str = (char *)xmalloc (len + 1);
1266 for (i = 0; i < len; ++i)
1267 str[i] = lto_input_1_unsigned (ib);
1268 cgraph_add_asm_node (build_string (len, str));
1269 free (str);
1270
1271 len = lto_input_uleb128 (ib);
1272 }
1273
1274 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1275 {
1276 int ref = (int) (intptr_t) node->global.inlined_to;
1277
1278 /* Fixup inlined_to from reference to pointer. */
1279 if (ref != LCC_NOT_FOUND)
1280 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1281 else
1282 node->global.inlined_to = NULL;
1283
1284 ref = (int) (intptr_t) node->same_comdat_group;
1285
1286 /* Fixup same_comdat_group from reference to pointer. */
1287 if (ref != LCC_NOT_FOUND)
1288 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1289 else
1290 node->same_comdat_group = NULL;
1291 }
1292 return nodes;
1293 }
1294
1295 /* Read a varpool from IB using the info in FILE_DATA. */
1296
1297 static VEC(varpool_node_ptr, heap) *
1298 input_varpool_1 (struct lto_file_decl_data *file_data,
1299 struct lto_input_block *ib)
1300 {
1301 unsigned HOST_WIDE_INT len;
1302 VEC(varpool_node_ptr, heap) *varpool = NULL;
1303 int i;
1304 struct varpool_node *node;
1305
1306 len = lto_input_uleb128 (ib);
1307 while (len)
1308 {
1309 VEC_safe_push (varpool_node_ptr, heap, varpool,
1310 input_varpool_node (file_data, ib));
1311 len--;
1312 }
1313 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1314 {
1315 int ref = (int) (intptr_t) node->same_comdat_group;
1316
1317 /* Fixup same_comdat_group from reference to pointer. */
1318 if (ref != LCC_NOT_FOUND)
1319 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1320 else
1321 node->same_comdat_group = NULL;
1322 }
1323 return varpool;
1324 }
1325
1326 /* Input ipa_refs. */
1327
1328 static void
1329 input_refs (struct lto_input_block *ib,
1330 VEC(cgraph_node_ptr, heap) *nodes,
1331 VEC(varpool_node_ptr, heap) *varpool)
1332 {
1333 int count;
1334 int idx;
1335 while (true)
1336 {
1337 struct cgraph_node *node;
1338 count = lto_input_uleb128 (ib);
1339 if (!count)
1340 break;
1341 idx = lto_input_uleb128 (ib);
1342 node = VEC_index (cgraph_node_ptr, nodes, idx);
1343 while (count)
1344 {
1345 input_ref (ib, node, NULL, nodes, varpool);
1346 count--;
1347 }
1348 }
1349 while (true)
1350 {
1351 struct varpool_node *node;
1352 count = lto_input_uleb128 (ib);
1353 if (!count)
1354 break;
1355 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1356 while (count)
1357 {
1358 input_ref (ib, NULL, node, nodes, varpool);
1359 count--;
1360 }
1361 }
1362 }
1363
1364
1365 static struct gcov_ctr_summary lto_gcov_summary;
1366
1367 /* Input profile_info from IB. */
1368 static void
1369 input_profile_summary (struct lto_input_block *ib)
1370 {
1371 unsigned int runs = lto_input_uleb128 (ib);
1372 if (runs)
1373 {
1374 if (!profile_info)
1375 {
1376 profile_info = &lto_gcov_summary;
1377 lto_gcov_summary.runs = runs;
1378 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1379 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1380 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1381 }
1382 /* We can support this by scaling all counts to nearest common multiple
1383 of all different runs, but it is perhaps not worth the effort. */
1384 else if (profile_info->runs != runs
1385 || profile_info->sum_all != lto_input_sleb128 (ib)
1386 || profile_info->run_max != lto_input_sleb128 (ib)
1387 || profile_info->sum_max != lto_input_sleb128 (ib))
1388 sorry ("Combining units with different profiles is not supported.");
1389 /* We allow some units to have profile and other to not have one. This will
1390 just make unprofiled units to be size optimized that is sane. */
1391 }
1392
1393 }
1394
1395 /* Input and merge the cgraph from each of the .o files passed to
1396 lto1. */
1397
1398 void
1399 input_cgraph (void)
1400 {
1401 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1402 struct lto_file_decl_data *file_data;
1403 unsigned int j = 0;
1404 struct cgraph_node *node;
1405
1406 while ((file_data = file_data_vec[j++]))
1407 {
1408 const char *data;
1409 size_t len;
1410 struct lto_input_block *ib;
1411 VEC(cgraph_node_ptr, heap) *nodes;
1412 VEC(varpool_node_ptr, heap) *varpool;
1413
1414 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1415 &data, &len);
1416 input_profile_summary (ib);
1417 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1418 nodes = input_cgraph_1 (file_data, ib);
1419 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1420 ib, data, len);
1421
1422 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1423 &data, &len);
1424 varpool = input_varpool_1 (file_data, ib);
1425 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1426 ib, data, len);
1427
1428 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1429 &data, &len);
1430 input_refs (ib, nodes, varpool);
1431 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1432 ib, data, len);
1433 if (flag_ltrans)
1434 input_cgraph_opt_summary (nodes);
1435 VEC_free (cgraph_node_ptr, heap, nodes);
1436 VEC_free (varpool_node_ptr, heap, varpool);
1437 }
1438
1439 /* Clear out the aux field that was used to store enough state to
1440 tell which nodes should be overwritten. */
1441 for (node = cgraph_nodes; node; node = node->next)
1442 {
1443 /* Some nodes may have been created by cgraph_node. This
1444 happens when the callgraph contains nested functions. If the
1445 node for the parent function was never emitted to the gimple
1446 file, cgraph_node will create a node for it when setting the
1447 context of the nested function. */
1448 if (node->local.lto_file_data)
1449 node->aux = NULL;
1450 }
1451 }
1452
1453 /* True when we need optimization summary for NODE. */
1454
1455 static int
1456 output_cgraph_opt_summary_p (struct cgraph_node *node)
1457 {
1458 if (!node->clone_of)
1459 return false;
1460 return (node->clone.tree_map
1461 || node->clone.args_to_skip
1462 || node->clone.combined_args_to_skip);
1463 }
1464
1465 /* Output optimization summary for NODE to OB. */
1466
1467 static void
1468 output_node_opt_summary (struct output_block *ob,
1469 struct cgraph_node *node)
1470 {
1471 unsigned int index;
1472 bitmap_iterator bi;
1473 struct ipa_replace_map *map;
1474 struct bitpack_d bp;
1475 int i;
1476
1477 lto_output_uleb128_stream (ob->main_stream,
1478 bitmap_count_bits (node->clone.args_to_skip));
1479 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1480 lto_output_uleb128_stream (ob->main_stream, index);
1481 lto_output_uleb128_stream (ob->main_stream,
1482 bitmap_count_bits (node->clone.combined_args_to_skip));
1483 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1484 lto_output_uleb128_stream (ob->main_stream, index);
1485 lto_output_uleb128_stream (ob->main_stream,
1486 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1487 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1488 {
1489 int parm_num;
1490 tree parm;
1491
1492 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1493 parm = DECL_CHAIN (parm), parm_num++)
1494 if (map->old_tree == parm)
1495 break;
1496 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1497 mechanism to store function local declarations into summaries. */
1498 gcc_assert (parm);
1499 lto_output_uleb128_stream (ob->main_stream, parm_num);
1500 lto_output_tree (ob, map->new_tree, true);
1501 bp = bitpack_create (ob->main_stream);
1502 bp_pack_value (&bp, map->replace_p, 1);
1503 bp_pack_value (&bp, map->ref_p, 1);
1504 lto_output_bitpack (&bp);
1505 }
1506 }
1507
1508 /* Output optimization summaries stored in callgraph.
1509 At the moment it is the clone info structure. */
1510
1511 static void
1512 output_cgraph_opt_summary (void)
1513 {
1514 struct cgraph_node *node;
1515 int i, n_nodes;
1516 lto_cgraph_encoder_t encoder;
1517 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1518 unsigned count = 0;
1519
1520 ob->cgraph_node = NULL;
1521 encoder = ob->decl_state->cgraph_node_encoder;
1522 n_nodes = lto_cgraph_encoder_size (encoder);
1523 for (i = 0; i < n_nodes; i++)
1524 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i)))
1525 count++;
1526 lto_output_uleb128_stream (ob->main_stream, count);
1527 for (i = 0; i < n_nodes; i++)
1528 {
1529 node = lto_cgraph_encoder_deref (encoder, i);
1530 if (output_cgraph_opt_summary_p (node))
1531 {
1532 lto_output_uleb128_stream (ob->main_stream, i);
1533 output_node_opt_summary (ob, node);
1534 }
1535 }
1536 produce_asm (ob, NULL);
1537 destroy_output_block (ob);
1538 }
1539
1540 /* Input optimiation summary of NODE. */
1541
1542 static void
1543 input_node_opt_summary (struct cgraph_node *node,
1544 struct lto_input_block *ib_main,
1545 struct data_in *data_in)
1546 {
1547 int i;
1548 int count;
1549 int bit;
1550 struct bitpack_d bp;
1551
1552 count = lto_input_uleb128 (ib_main);
1553 if (count)
1554 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1555 for (i = 0; i < count; i++)
1556 {
1557 bit = lto_input_uleb128 (ib_main);
1558 bitmap_set_bit (node->clone.args_to_skip, bit);
1559 }
1560 count = lto_input_uleb128 (ib_main);
1561 if (count)
1562 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1563 for (i = 0; i < count; i++)
1564 {
1565 bit = lto_input_uleb128 (ib_main);
1566 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1567 }
1568 count = lto_input_uleb128 (ib_main);
1569 for (i = 0; i < count; i++)
1570 {
1571 int parm_num;
1572 tree parm;
1573 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1574
1575 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1576 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1577 parm = DECL_CHAIN (parm))
1578 parm_num --;
1579 map->parm_num = lto_input_uleb128 (ib_main);
1580 map->old_tree = NULL;
1581 map->new_tree = lto_input_tree (ib_main, data_in);
1582 bp = lto_input_bitpack (ib_main);
1583 map->replace_p = bp_unpack_value (&bp, 1);
1584 map->ref_p = bp_unpack_value (&bp, 1);
1585 }
1586 }
1587
1588 /* Read section in file FILE_DATA of length LEN with data DATA. */
1589
1590 static void
1591 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1592 const char *data, size_t len, VEC (cgraph_node_ptr,
1593 heap) * nodes)
1594 {
1595 const struct lto_function_header *header =
1596 (const struct lto_function_header *) data;
1597 const int32_t cfg_offset = sizeof (struct lto_function_header);
1598 const int32_t main_offset = cfg_offset + header->cfg_size;
1599 const int32_t string_offset = main_offset + header->main_size;
1600 struct data_in *data_in;
1601 struct lto_input_block ib_main;
1602 unsigned int i;
1603 unsigned int count;
1604
1605 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1606 header->main_size);
1607
1608 data_in =
1609 lto_data_in_create (file_data, (const char *) data + string_offset,
1610 header->string_size, NULL);
1611 count = lto_input_uleb128 (&ib_main);
1612
1613 for (i = 0; i < count; i++)
1614 {
1615 int ref = lto_input_uleb128 (&ib_main);
1616 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1617 &ib_main, data_in);
1618 }
1619 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
1620 len);
1621 lto_data_in_delete (data_in);
1622 }
1623
1624 /* Input optimization summary of cgraph. */
1625
1626 static void
1627 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1628 {
1629 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1630 struct lto_file_decl_data *file_data;
1631 unsigned int j = 0;
1632
1633 while ((file_data = file_data_vec[j++]))
1634 {
1635 size_t len;
1636 const char *data =
1637 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1638 &len);
1639
1640 if (data)
1641 input_cgraph_opt_section (file_data, data, len, nodes);
1642 }
1643 }