cgraph.c (dump_cgraph_node): Dump alias flag.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_cgraph_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_cgraph_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_cgraph_analyzed_node,
70 /* Cgraph edges. */
71 LTO_cgraph_edge,
72 LTO_cgraph_indirect_edge,
73 LTO_cgraph_last_tag
74 };
75
76 /* Create a new cgraph encoder. */
77
78 lto_cgraph_encoder_t
79 lto_cgraph_encoder_new (void)
80 {
81 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
82 encoder->map = pointer_map_create ();
83 encoder->nodes = NULL;
84 encoder->body = pointer_set_create ();
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
93 {
94 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
95 pointer_map_destroy (encoder->map);
96 pointer_set_destroy (encoder->body);
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the cgraph encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
107 struct cgraph_node *node)
108 {
109 int ref;
110 void **slot;
111
112 slot = pointer_map_contains (encoder->map, node);
113 if (!slot)
114 {
115 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
116 slot = pointer_map_insert (encoder->map, node);
117 *slot = (void *) (intptr_t) ref;
118 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
119 }
120 else
121 ref = (int) (intptr_t) *slot;
122
123 return ref;
124 }
125
126 #define LCC_NOT_FOUND (-1)
127
128 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
129 or LCC_NOT_FOUND if it is not there. */
130
131 int
132 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
133 struct cgraph_node *node)
134 {
135 void **slot = pointer_map_contains (encoder->map, node);
136 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 }
138
139
140 /* Return the cgraph node corresponding to REF using ENCODER. */
141
142 struct cgraph_node *
143 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
144 {
145 if (ref == LCC_NOT_FOUND)
146 return NULL;
147
148 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 }
150
151
152 /* Return TRUE if we should encode initializer of NODE (if any). */
153
154 bool
155 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
156 struct cgraph_node *node)
157 {
158 return pointer_set_contains (encoder->body, node);
159 }
160
161 /* Return TRUE if we should encode body of NODE (if any). */
162
163 static void
164 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
165 struct cgraph_node *node)
166 {
167 pointer_set_insert (encoder->body, node);
168 }
169
170 /* Create a new varpool encoder. */
171
172 lto_varpool_encoder_t
173 lto_varpool_encoder_new (void)
174 {
175 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
176 encoder->map = pointer_map_create ();
177 encoder->initializer = pointer_set_create ();
178 encoder->nodes = NULL;
179 return encoder;
180 }
181
182
183 /* Delete ENCODER and its components. */
184
185 void
186 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
187 {
188 VEC_free (varpool_node_ptr, heap, encoder->nodes);
189 pointer_map_destroy (encoder->map);
190 pointer_set_destroy (encoder->initializer);
191 free (encoder);
192 }
193
194
195 /* Return the existing reference number of NODE in the varpool encoder in
196 output block OB. Assign a new reference if this is the first time
197 NODE is encoded. */
198
199 int
200 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
201 struct varpool_node *node)
202 {
203 int ref;
204 void **slot;
205
206 slot = pointer_map_contains (encoder->map, node);
207 if (!slot)
208 {
209 ref = VEC_length (varpool_node_ptr, encoder->nodes);
210 slot = pointer_map_insert (encoder->map, node);
211 *slot = (void *) (intptr_t) ref;
212 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
213 }
214 else
215 ref = (int) (intptr_t) *slot;
216
217 return ref;
218 }
219
220 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
221 or LCC_NOT_FOUND if it is not there. */
222
223 int
224 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
225 struct varpool_node *node)
226 {
227 void **slot = pointer_map_contains (encoder->map, node);
228 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 }
230
231
232 /* Return the varpool node corresponding to REF using ENCODER. */
233
234 struct varpool_node *
235 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
236 {
237 if (ref == LCC_NOT_FOUND)
238 return NULL;
239
240 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 }
242
243
244 /* Return TRUE if we should encode initializer of NODE (if any). */
245
246 bool
247 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
248 struct varpool_node *node)
249 {
250 return pointer_set_contains (encoder->initializer, node);
251 }
252
253 /* Return TRUE if we should encode initializer of NODE (if any). */
254
255 static void
256 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
257 struct varpool_node *node)
258 {
259 pointer_set_insert (encoder->initializer, node);
260 }
261
262 /* Output the cgraph EDGE to OB using ENCODER. */
263
264 static void
265 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
266 lto_cgraph_encoder_t encoder)
267 {
268 unsigned int uid;
269 intptr_t ref;
270 struct bitpack_d bp;
271
272 if (edge->indirect_unknown_callee)
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_indirect_edge);
275 else
276 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
277 LTO_cgraph_edge);
278
279 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
280 gcc_assert (ref != LCC_NOT_FOUND);
281 streamer_write_hwi_stream (ob->main_stream, ref);
282
283 if (!edge->indirect_unknown_callee)
284 {
285 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
286 gcc_assert (ref != LCC_NOT_FOUND);
287 streamer_write_hwi_stream (ob->main_stream, ref);
288 }
289
290 streamer_write_hwi_stream (ob->main_stream, edge->count);
291
292 bp = bitpack_create (ob->main_stream);
293 uid = (!gimple_has_body_p (edge->caller->decl)
294 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
295 bp_pack_enum (&bp, cgraph_inline_failed_enum,
296 CIF_N_REASONS, edge->inline_failed);
297 bp_pack_var_len_unsigned (&bp, uid);
298 bp_pack_var_len_unsigned (&bp, edge->frequency);
299 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 if (edge->indirect_unknown_callee)
303 {
304 int flags = edge->indirect_info->ecf_flags;
305 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
311 /* Flags that should not appear on indirect calls. */
312 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
313 | ECF_MAY_BE_ALLOCA
314 | ECF_SIBCALL
315 | ECF_LEAF
316 | ECF_NOVOPS)));
317 }
318 streamer_write_bitpack (&bp);
319 }
320
321 /* Return if LIST contain references from other partitions. */
322
323 bool
324 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
325 varpool_node_set vset)
326 {
327 int i;
328 struct ipa_ref *ref;
329 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
330 {
331 if (ref->refering_type == IPA_REF_CGRAPH)
332 {
333 if (ipa_ref_refering_node (ref)->in_other_partition
334 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
335 return true;
336 }
337 else
338 {
339 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
340 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
341 vset))
342 return true;
343 }
344 }
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
352 {
353 struct cgraph_edge *e;
354 if (!node->analyzed)
355 return false;
356 if (node->global.inlined_to)
357 return false;
358 for (e = node->callers; e; e = e->next_caller)
359 if (e->caller->in_other_partition
360 || !cgraph_node_in_set_p (e->caller, set))
361 return true;
362 return false;
363 }
364
365 /* Return if LIST contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
369 varpool_node_set vset)
370 {
371 int i;
372 struct ipa_ref *ref;
373 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
374 {
375 if (ref->refering_type == IPA_REF_CGRAPH)
376 {
377 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
378 return true;
379 }
380 else
381 {
382 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
383 vset))
384 return true;
385 }
386 }
387 return false;
388 }
389
390 /* Return true when node is reachable from other partition. */
391
392 bool
393 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
394 {
395 struct cgraph_edge *e;
396 for (e = node->callers; e; e = e->next_caller)
397 if (cgraph_node_in_set_p (e->caller, set))
398 return true;
399 return false;
400 }
401
402 /* Output the cgraph NODE to OB. ENCODER is used to find the
403 reference number of NODE->inlined_to. SET is the set of nodes we
404 are writing to the current file. If NODE is not in SET, then NODE
405 is a boundary of a cgraph_node_set and we pretend NODE just has a
406 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
407 that have had their callgraph node written so far. This is used to
408 determine if NODE is a clone of a previously written node. */
409
410 static void
411 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
412 lto_cgraph_encoder_t encoder, cgraph_node_set set,
413 varpool_node_set vset)
414 {
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of;
421
422 boundary_p = !cgraph_node_in_set_p (node, set);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_cgraph_analyzed_node;
426 else
427 tag = LTO_cgraph_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed)
444 {
445 /* Inline clones can not be part of boundary.
446 gcc_assert (!node->global.inlined_to);
447
448 FIXME: At the moment they can be, when partition contains an inline
449 clone that is clone of inline clone from outside partition. We can
450 reshape the clone tree and make other tree to be the root, but it
451 needs a bit extra work and will be promplty done by cgraph_remove_node
452 after reading back. */
453 in_other_partition = 1;
454 }
455
456 clone_of = node->clone_of;
457 while (clone_of
458 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
459 if (clone_of->prev_sibling_clone)
460 clone_of = clone_of->prev_sibling_clone;
461 else
462 clone_of = clone_of->clone_of;
463
464 if (LTO_cgraph_analyzed_node)
465 gcc_assert (clone_of || !node->clone_of);
466 if (!clone_of)
467 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
468 else
469 streamer_write_hwi_stream (ob->main_stream, ref);
470
471
472 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
473 streamer_write_hwi_stream (ob->main_stream, node->count);
474 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
475
476 if (tag == LTO_cgraph_analyzed_node)
477 {
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 streamer_write_hwi_stream (ob->main_stream, ref);
487 }
488
489 if (node->same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
492 gcc_assert (ref != LCC_NOT_FOUND);
493 }
494 else
495 ref = LCC_NOT_FOUND;
496 streamer_write_hwi_stream (ob->main_stream, ref);
497
498 bp = bitpack_create (ob->main_stream);
499 bp_pack_value (&bp, node->local.local, 1);
500 bp_pack_value (&bp, node->local.externally_visible, 1);
501 bp_pack_value (&bp, node->local.finalized, 1);
502 bp_pack_value (&bp, node->local.versionable, 1);
503 bp_pack_value (&bp, node->local.can_change_signature, 1);
504 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
505 bp_pack_value (&bp, node->needed, 1);
506 bp_pack_value (&bp, node->address_taken, 1);
507 bp_pack_value (&bp, node->abstract_and_needed, 1);
508 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
509 && !DECL_EXTERNAL (node->decl)
510 && !DECL_COMDAT (node->decl)
511 && (reachable_from_other_partition_p (node, set)
512 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
513 bp_pack_value (&bp, node->lowered, 1);
514 bp_pack_value (&bp, in_other_partition, 1);
515 /* Real aliases in a boundary become non-aliases. However we still stream
516 alias info on weakrefs.
517 TODO: We lose a bit of information here - when we know that variable is
518 defined in other unit, we may use the info on aliases to resolve
519 symbol1 != symbol2 type tests that we can do only for locally defined objects
520 otherwise. */
521 bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->decl)), 1);
522 bp_pack_value (&bp, node->frequency, 2);
523 bp_pack_value (&bp, node->only_called_at_startup, 1);
524 bp_pack_value (&bp, node->only_called_at_exit, 1);
525 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
526 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
527 LDPR_NUM_KNOWN, node->resolution);
528 streamer_write_bitpack (&bp);
529
530 if (node->thunk.thunk_p && !boundary_p)
531 {
532 streamer_write_uhwi_stream
533 (ob->main_stream,
534 1 + (node->thunk.this_adjusting != 0) * 2
535 + (node->thunk.virtual_offset_p != 0) * 4);
536 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
537 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
538 }
539 if ((node->alias || node->thunk.thunk_p)
540 && (!boundary_p || (node->alias && DECL_EXTERNAL (node->decl))))
541 {
542 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
543 node->thunk.alias != NULL);
544 if (node->thunk.alias != NULL)
545 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
546 node->thunk.alias);
547 }
548 }
549
550 /* Output the varpool NODE to OB.
551 If NODE is not in SET, then NODE is a boundary. */
552
553 static void
554 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
555 lto_varpool_encoder_t varpool_encoder,
556 cgraph_node_set set, varpool_node_set vset)
557 {
558 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
559 struct bitpack_d bp;
560 int ref;
561
562 streamer_write_hwi_stream (ob->main_stream, node->order);
563 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
564 bp = bitpack_create (ob->main_stream);
565 bp_pack_value (&bp, node->externally_visible, 1);
566 bp_pack_value (&bp, node->force_output, 1);
567 bp_pack_value (&bp, node->finalized, 1);
568 bp_pack_value (&bp, node->alias, 1);
569 bp_pack_value (&bp, node->alias_of != NULL, 1);
570 gcc_assert (node->finalized || !node->analyzed);
571 gcc_assert (node->needed);
572 /* Constant pool initializers can be de-unified into individual ltrans units.
573 FIXME: Alternatively at -Os we may want to avoid generating for them the local
574 labels and share them across LTRANS partitions. */
575 if (DECL_IN_CONSTANT_POOL (node->decl)
576 && !DECL_COMDAT (node->decl))
577 {
578 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
579 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
580 }
581 else
582 {
583 bp_pack_value (&bp, node->analyzed
584 && referenced_from_other_partition_p (&node->ref_list,
585 set, vset), 1);
586 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
587 }
588 streamer_write_bitpack (&bp);
589 if (node->alias_of)
590 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
591 if (node->same_comdat_group && !boundary_p)
592 {
593 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
594 gcc_assert (ref != LCC_NOT_FOUND);
595 }
596 else
597 ref = LCC_NOT_FOUND;
598 streamer_write_hwi_stream (ob->main_stream, ref);
599 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
600 LDPR_NUM_KNOWN, node->resolution);
601 }
602
603 /* Output the varpool NODE to OB.
604 If NODE is not in SET, then NODE is a boundary. */
605
606 static void
607 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
608 lto_cgraph_encoder_t encoder,
609 lto_varpool_encoder_t varpool_encoder)
610 {
611 struct bitpack_d bp;
612 bp = bitpack_create (ob->main_stream);
613 bp_pack_value (&bp, ref->refered_type, 1);
614 bp_pack_value (&bp, ref->use, 2);
615 streamer_write_bitpack (&bp);
616 if (ref->refered_type == IPA_REF_CGRAPH)
617 {
618 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
619 gcc_assert (nref != LCC_NOT_FOUND);
620 streamer_write_hwi_stream (ob->main_stream, nref);
621 }
622 else
623 {
624 int nref = lto_varpool_encoder_lookup (varpool_encoder,
625 ipa_ref_varpool_node (ref));
626 gcc_assert (nref != LCC_NOT_FOUND);
627 streamer_write_hwi_stream (ob->main_stream, nref);
628 }
629 }
630
631 /* Stream out profile_summary to OB. */
632
633 static void
634 output_profile_summary (struct lto_simple_output_block *ob)
635 {
636 if (profile_info)
637 {
638 /* We do not output num, sum_all and run_max, they are not used by
639 GCC profile feedback and they are difficult to merge from multiple
640 units. */
641 gcc_assert (profile_info->runs);
642 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
643 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
644 }
645 else
646 streamer_write_uhwi_stream (ob->main_stream, 0);
647 }
648
649 /* Add NODE into encoder as well as nodes it is cloned from.
650 Do it in a way so clones appear first. */
651
652 static void
653 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
654 bool include_body)
655 {
656 if (node->clone_of)
657 add_node_to (encoder, node->clone_of, include_body);
658 else if (include_body)
659 lto_set_cgraph_encoder_encode_body (encoder, node);
660 lto_cgraph_encoder_encode (encoder, node);
661 }
662
663 /* Add all references in LIST to encoders. */
664
665 static void
666 add_references (lto_cgraph_encoder_t encoder,
667 lto_varpool_encoder_t varpool_encoder,
668 struct ipa_ref_list *list)
669 {
670 int i;
671 struct ipa_ref *ref;
672 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
673 if (ref->refered_type == IPA_REF_CGRAPH)
674 add_node_to (encoder, ipa_ref_node (ref), false);
675 else
676 {
677 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
678 lto_varpool_encoder_encode (varpool_encoder, vnode);
679 }
680 }
681
682 /* Output all callees or indirect outgoing edges. EDGE must be the first such
683 edge. */
684
685 static void
686 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
687 struct lto_simple_output_block *ob,
688 lto_cgraph_encoder_t encoder)
689 {
690 if (!edge)
691 return;
692
693 /* Output edges in backward direction, so the reconstructed callgraph match
694 and it is easy to associate call sites in the IPA pass summaries. */
695 while (edge->next_callee)
696 edge = edge->next_callee;
697 for (; edge; edge = edge->prev_callee)
698 lto_output_edge (ob, edge, encoder);
699 }
700
701 /* Output the part of the cgraph in SET. */
702
703 static void
704 output_refs (cgraph_node_set set, varpool_node_set vset,
705 lto_cgraph_encoder_t encoder,
706 lto_varpool_encoder_t varpool_encoder)
707 {
708 cgraph_node_set_iterator csi;
709 varpool_node_set_iterator vsi;
710 struct lto_simple_output_block *ob;
711 int count;
712 struct ipa_ref *ref;
713 int i;
714
715 ob = lto_create_simple_output_block (LTO_section_refs);
716
717 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
718 {
719 struct cgraph_node *node = csi_node (csi);
720
721 count = ipa_ref_list_nreferences (&node->ref_list);
722 if (count)
723 {
724 streamer_write_uhwi_stream (ob->main_stream, count);
725 streamer_write_uhwi_stream (ob->main_stream,
726 lto_cgraph_encoder_lookup (encoder, node));
727 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
728 lto_output_ref (ob, ref, encoder, varpool_encoder);
729 }
730 }
731
732 streamer_write_uhwi_stream (ob->main_stream, 0);
733
734 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
735 {
736 struct varpool_node *node = vsi_node (vsi);
737
738 count = ipa_ref_list_nreferences (&node->ref_list);
739 if (count)
740 {
741 streamer_write_uhwi_stream (ob->main_stream, count);
742 streamer_write_uhwi_stream (ob->main_stream,
743 lto_varpool_encoder_lookup (varpool_encoder,
744 node));
745 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
746 lto_output_ref (ob, ref, encoder, varpool_encoder);
747 }
748 }
749
750 streamer_write_uhwi_stream (ob->main_stream, 0);
751
752 lto_destroy_simple_output_block (ob);
753 }
754
755 /* Find out all cgraph and varpool nodes we want to encode in current unit
756 and insert them to encoders. */
757 void
758 compute_ltrans_boundary (struct lto_out_decl_state *state,
759 cgraph_node_set set, varpool_node_set vset)
760 {
761 struct cgraph_node *node;
762 cgraph_node_set_iterator csi;
763 varpool_node_set_iterator vsi;
764 struct cgraph_edge *edge;
765 int i;
766 lto_cgraph_encoder_t encoder;
767 lto_varpool_encoder_t varpool_encoder;
768
769 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
770 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
771
772 /* Go over all the nodes in SET and assign references. */
773 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
774 {
775 node = csi_node (csi);
776 add_node_to (encoder, node, true);
777 add_references (encoder, varpool_encoder, &node->ref_list);
778 }
779 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
780 {
781 struct varpool_node *vnode = vsi_node (vsi);
782 gcc_assert (!vnode->alias || vnode->alias_of);
783 lto_varpool_encoder_encode (varpool_encoder, vnode);
784 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
785 add_references (encoder, varpool_encoder, &vnode->ref_list);
786 }
787 /* Pickle in also the initializer of all referenced readonly variables
788 to help folding. Constant pool variables are not shared, so we must
789 pickle those too. */
790 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
791 {
792 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
793 if (DECL_INITIAL (vnode->decl)
794 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
795 vnode)
796 && const_value_known_p (vnode->decl))
797 {
798 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
799 add_references (encoder, varpool_encoder, &vnode->ref_list);
800 }
801 }
802
803 /* Go over all the nodes again to include callees that are not in
804 SET. */
805 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
806 {
807 node = csi_node (csi);
808 for (edge = node->callees; edge; edge = edge->next_callee)
809 {
810 struct cgraph_node *callee = edge->callee;
811 if (!cgraph_node_in_set_p (callee, set))
812 {
813 /* We should have moved all the inlines. */
814 gcc_assert (!callee->global.inlined_to);
815 add_node_to (encoder, callee, false);
816 }
817 }
818 }
819 }
820
821 /* Output the part of the cgraph in SET. */
822
823 void
824 output_cgraph (cgraph_node_set set, varpool_node_set vset)
825 {
826 struct cgraph_node *node;
827 struct lto_simple_output_block *ob;
828 cgraph_node_set_iterator csi;
829 int i, n_nodes;
830 lto_cgraph_encoder_t encoder;
831 lto_varpool_encoder_t varpool_encoder;
832 static bool asm_nodes_output = false;
833
834 if (flag_wpa)
835 output_cgraph_opt_summary (set);
836
837 ob = lto_create_simple_output_block (LTO_section_cgraph);
838
839 output_profile_summary (ob);
840
841 /* An encoder for cgraph nodes should have been created by
842 ipa_write_summaries_1. */
843 gcc_assert (ob->decl_state->cgraph_node_encoder);
844 gcc_assert (ob->decl_state->varpool_node_encoder);
845 encoder = ob->decl_state->cgraph_node_encoder;
846 varpool_encoder = ob->decl_state->varpool_node_encoder;
847
848 /* Write out the nodes. We must first output a node and then its clones,
849 otherwise at a time reading back the node there would be nothing to clone
850 from. */
851 n_nodes = lto_cgraph_encoder_size (encoder);
852 for (i = 0; i < n_nodes; i++)
853 {
854 node = lto_cgraph_encoder_deref (encoder, i);
855 lto_output_node (ob, node, encoder, set, vset);
856 }
857
858 /* Go over the nodes in SET again to write edges. */
859 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
860 {
861 node = csi_node (csi);
862 output_outgoing_cgraph_edges (node->callees, ob, encoder);
863 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
864 }
865
866 streamer_write_uhwi_stream (ob->main_stream, 0);
867
868 lto_destroy_simple_output_block (ob);
869
870 /* Emit toplevel asms.
871 When doing WPA we must output every asm just once. Since we do not partition asm
872 nodes at all, output them to first output. This is kind of hack, but should work
873 well. */
874 if (!asm_nodes_output)
875 {
876 asm_nodes_output = true;
877 lto_output_toplevel_asms ();
878 }
879
880 output_varpool (set, vset);
881 output_refs (set, vset, encoder, varpool_encoder);
882 }
883
884 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
885 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
886 NODE or to replace the values in it, for instance because the first
887 time we saw it, the function body was not available but now it
888 is. BP is a bitpack with all the bitflags for NODE read from the
889 stream. */
890
891 static void
892 input_overwrite_node (struct lto_file_decl_data *file_data,
893 struct cgraph_node *node,
894 enum LTO_cgraph_tags tag,
895 struct bitpack_d *bp)
896 {
897 node->aux = (void *) tag;
898 node->local.lto_file_data = file_data;
899
900 node->local.local = bp_unpack_value (bp, 1);
901 node->local.externally_visible = bp_unpack_value (bp, 1);
902 node->local.finalized = bp_unpack_value (bp, 1);
903 node->local.versionable = bp_unpack_value (bp, 1);
904 node->local.can_change_signature = bp_unpack_value (bp, 1);
905 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
906 node->needed = bp_unpack_value (bp, 1);
907 node->address_taken = bp_unpack_value (bp, 1);
908 node->abstract_and_needed = bp_unpack_value (bp, 1);
909 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
910 node->lowered = bp_unpack_value (bp, 1);
911 node->analyzed = tag == LTO_cgraph_analyzed_node;
912 node->in_other_partition = bp_unpack_value (bp, 1);
913 if (node->in_other_partition
914 /* Avoid updating decl when we are seeing just inline clone.
915 When inlining function that has functions already inlined into it,
916 we produce clones of inline clones.
917
918 WPA partitioning might put each clone into different unit and
919 we might end up streaming inline clone from other partition
920 to support clone we are interested in. */
921 && (!node->clone_of
922 || node->clone_of->decl != node->decl))
923 {
924 DECL_EXTERNAL (node->decl) = 1;
925 TREE_STATIC (node->decl) = 0;
926 }
927 node->alias = bp_unpack_value (bp, 1);
928 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
929 node->only_called_at_startup = bp_unpack_value (bp, 1);
930 node->only_called_at_exit = bp_unpack_value (bp, 1);
931 node->thunk.thunk_p = bp_unpack_value (bp, 1);
932 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
933 LDPR_NUM_KNOWN);
934 }
935
936 /* Output the part of the cgraph in SET. */
937
938 static void
939 output_varpool (cgraph_node_set set, varpool_node_set vset)
940 {
941 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
942 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
943 int len = lto_varpool_encoder_size (varpool_encoder), i;
944
945 streamer_write_uhwi_stream (ob->main_stream, len);
946
947 /* Write out the nodes. We must first output a node and then its clones,
948 otherwise at a time reading back the node there would be nothing to clone
949 from. */
950 for (i = 0; i < len; i++)
951 {
952 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
953 varpool_encoder,
954 set, vset);
955 }
956
957 lto_destroy_simple_output_block (ob);
958 }
959
960 /* Read a node from input_block IB. TAG is the node's tag just read.
961 Return the node read or overwriten. */
962
963 static struct cgraph_node *
964 input_node (struct lto_file_decl_data *file_data,
965 struct lto_input_block *ib,
966 enum LTO_cgraph_tags tag,
967 VEC(cgraph_node_ptr, heap) *nodes)
968 {
969 tree fn_decl;
970 struct cgraph_node *node;
971 struct bitpack_d bp;
972 unsigned decl_index;
973 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
974 int clone_ref;
975 int order;
976
977 order = streamer_read_hwi (ib) + order_base;
978 clone_ref = streamer_read_hwi (ib);
979
980 decl_index = streamer_read_uhwi (ib);
981 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
982
983 if (clone_ref != LCC_NOT_FOUND)
984 {
985 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
986 0, CGRAPH_FREQ_BASE, false, NULL, false);
987 }
988 else
989 node = cgraph_get_create_node (fn_decl);
990
991 node->order = order;
992 if (order >= cgraph_order)
993 cgraph_order = order + 1;
994
995 node->count = streamer_read_hwi (ib);
996 node->count_materialization_scale = streamer_read_hwi (ib);
997
998 if (tag == LTO_cgraph_analyzed_node)
999 ref = streamer_read_hwi (ib);
1000
1001 ref2 = streamer_read_hwi (ib);
1002
1003 /* Make sure that we have not read this node before. Nodes that
1004 have already been read will have their tag stored in the 'aux'
1005 field. Since built-in functions can be referenced in multiple
1006 functions, they are expected to be read more than once. */
1007 if (node->aux && !DECL_BUILT_IN (node->decl))
1008 internal_error ("bytecode stream: found multiple instances of cgraph "
1009 "node %d", node->uid);
1010
1011 bp = streamer_read_bitpack (ib);
1012 input_overwrite_node (file_data, node, tag, &bp);
1013
1014 /* Store a reference for now, and fix up later to be a pointer. */
1015 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1016
1017 /* Store a reference for now, and fix up later to be a pointer. */
1018 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1019
1020 if (node->thunk.thunk_p)
1021 {
1022 int type = streamer_read_uhwi (ib);
1023 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1024 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1025
1026 node->thunk.fixed_offset = fixed_offset;
1027 node->thunk.this_adjusting = (type & 2);
1028 node->thunk.virtual_value = virtual_value;
1029 node->thunk.virtual_offset_p = (type & 4);
1030 }
1031 if (node->thunk.thunk_p || node->alias)
1032 {
1033 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1034 {
1035 decl_index = streamer_read_uhwi (ib);
1036 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1037 decl_index);
1038 }
1039 }
1040 return node;
1041 }
1042
1043 /* Read a node from input_block IB. TAG is the node's tag just read.
1044 Return the node read or overwriten. */
1045
1046 static struct varpool_node *
1047 input_varpool_node (struct lto_file_decl_data *file_data,
1048 struct lto_input_block *ib)
1049 {
1050 int decl_index;
1051 tree var_decl;
1052 struct varpool_node *node;
1053 struct bitpack_d bp;
1054 int ref = LCC_NOT_FOUND;
1055 bool non_null_aliasof;
1056 int order;
1057
1058 order = streamer_read_hwi (ib) + order_base;
1059 decl_index = streamer_read_uhwi (ib);
1060 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1061 node = varpool_node (var_decl);
1062 node->order = order;
1063 if (order >= cgraph_order)
1064 cgraph_order = order + 1;
1065 node->lto_file_data = file_data;
1066
1067 bp = streamer_read_bitpack (ib);
1068 node->externally_visible = bp_unpack_value (&bp, 1);
1069 node->force_output = bp_unpack_value (&bp, 1);
1070 node->finalized = bp_unpack_value (&bp, 1);
1071 node->alias = bp_unpack_value (&bp, 1);
1072 non_null_aliasof = bp_unpack_value (&bp, 1);
1073 node->analyzed = node->finalized;
1074 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1075 node->in_other_partition = bp_unpack_value (&bp, 1);
1076 if (node->in_other_partition)
1077 {
1078 DECL_EXTERNAL (node->decl) = 1;
1079 TREE_STATIC (node->decl) = 0;
1080 }
1081 if (node->finalized)
1082 varpool_mark_needed_node (node);
1083 if (non_null_aliasof)
1084 {
1085 decl_index = streamer_read_uhwi (ib);
1086 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1087 }
1088 ref = streamer_read_hwi (ib);
1089 /* Store a reference for now, and fix up later to be a pointer. */
1090 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1091 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1092 LDPR_NUM_KNOWN);
1093
1094 return node;
1095 }
1096
1097 /* Read a node from input_block IB. TAG is the node's tag just read.
1098 Return the node read or overwriten. */
1099
1100 static void
1101 input_ref (struct lto_input_block *ib,
1102 struct cgraph_node *refering_node,
1103 struct varpool_node *refering_varpool_node,
1104 VEC(cgraph_node_ptr, heap) *nodes,
1105 VEC(varpool_node_ptr, heap) *varpool_nodes)
1106 {
1107 struct cgraph_node *node = NULL;
1108 struct varpool_node *varpool_node = NULL;
1109 struct bitpack_d bp;
1110 enum ipa_ref_type type;
1111 enum ipa_ref_use use;
1112
1113 bp = streamer_read_bitpack (ib);
1114 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1115 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1116 if (type == IPA_REF_CGRAPH)
1117 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1118 else
1119 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
1120 streamer_read_hwi (ib));
1121 ipa_record_reference (refering_node, refering_varpool_node,
1122 node, varpool_node, use, NULL);
1123 }
1124
1125 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1126 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1127 edge being read is indirect (in the sense that it has
1128 indirect_unknown_callee set). */
1129
1130 static void
1131 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1132 bool indirect)
1133 {
1134 struct cgraph_node *caller, *callee;
1135 struct cgraph_edge *edge;
1136 unsigned int stmt_id;
1137 gcov_type count;
1138 int freq;
1139 cgraph_inline_failed_t inline_failed;
1140 struct bitpack_d bp;
1141 int ecf_flags = 0;
1142
1143 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1144 if (caller == NULL || caller->decl == NULL_TREE)
1145 internal_error ("bytecode stream: no caller found while reading edge");
1146
1147 if (!indirect)
1148 {
1149 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1150 if (callee == NULL || callee->decl == NULL_TREE)
1151 internal_error ("bytecode stream: no callee found while reading edge");
1152 }
1153 else
1154 callee = NULL;
1155
1156 count = (gcov_type) streamer_read_hwi (ib);
1157
1158 bp = streamer_read_bitpack (ib);
1159 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1160 stmt_id = bp_unpack_var_len_unsigned (&bp);
1161 freq = (int) bp_unpack_var_len_unsigned (&bp);
1162
1163 if (indirect)
1164 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1165 else
1166 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1167
1168 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1169 edge->lto_stmt_uid = stmt_id;
1170 edge->inline_failed = inline_failed;
1171 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1172 edge->can_throw_external = bp_unpack_value (&bp, 1);
1173 if (indirect)
1174 {
1175 if (bp_unpack_value (&bp, 1))
1176 ecf_flags |= ECF_CONST;
1177 if (bp_unpack_value (&bp, 1))
1178 ecf_flags |= ECF_PURE;
1179 if (bp_unpack_value (&bp, 1))
1180 ecf_flags |= ECF_NORETURN;
1181 if (bp_unpack_value (&bp, 1))
1182 ecf_flags |= ECF_MALLOC;
1183 if (bp_unpack_value (&bp, 1))
1184 ecf_flags |= ECF_NOTHROW;
1185 if (bp_unpack_value (&bp, 1))
1186 ecf_flags |= ECF_RETURNS_TWICE;
1187 edge->indirect_info->ecf_flags = ecf_flags;
1188 }
1189 }
1190
1191
1192 /* Read a cgraph from IB using the info in FILE_DATA. */
1193
1194 static VEC(cgraph_node_ptr, heap) *
1195 input_cgraph_1 (struct lto_file_decl_data *file_data,
1196 struct lto_input_block *ib)
1197 {
1198 enum LTO_cgraph_tags tag;
1199 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1200 struct cgraph_node *node;
1201 unsigned i;
1202
1203 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1204 order_base = cgraph_order;
1205 while (tag)
1206 {
1207 if (tag == LTO_cgraph_edge)
1208 input_edge (ib, nodes, false);
1209 else if (tag == LTO_cgraph_indirect_edge)
1210 input_edge (ib, nodes, true);
1211 else
1212 {
1213 node = input_node (file_data, ib, tag,nodes);
1214 if (node == NULL || node->decl == NULL_TREE)
1215 internal_error ("bytecode stream: found empty cgraph node");
1216 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1217 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1218 }
1219
1220 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1221 }
1222
1223 lto_input_toplevel_asms (file_data, order_base);
1224
1225 /* AUX pointers should be all non-zero for nodes read from the stream. */
1226 #ifdef ENABLE_CHECKING
1227 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1228 gcc_assert (node->aux);
1229 #endif
1230 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1231 {
1232 int ref = (int) (intptr_t) node->global.inlined_to;
1233
1234 /* We share declaration of builtins, so we may read same node twice. */
1235 if (!node->aux)
1236 continue;
1237 node->aux = NULL;
1238
1239 /* Fixup inlined_to from reference to pointer. */
1240 if (ref != LCC_NOT_FOUND)
1241 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1242 else
1243 node->global.inlined_to = NULL;
1244
1245 ref = (int) (intptr_t) node->same_comdat_group;
1246
1247 /* Fixup same_comdat_group from reference to pointer. */
1248 if (ref != LCC_NOT_FOUND)
1249 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1250 else
1251 node->same_comdat_group = NULL;
1252 }
1253 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1254 node->aux = (void *)1;
1255 return nodes;
1256 }
1257
1258 /* Read a varpool from IB using the info in FILE_DATA. */
1259
1260 static VEC(varpool_node_ptr, heap) *
1261 input_varpool_1 (struct lto_file_decl_data *file_data,
1262 struct lto_input_block *ib)
1263 {
1264 unsigned HOST_WIDE_INT len;
1265 VEC(varpool_node_ptr, heap) *varpool = NULL;
1266 int i;
1267 struct varpool_node *node;
1268
1269 len = streamer_read_uhwi (ib);
1270 while (len)
1271 {
1272 VEC_safe_push (varpool_node_ptr, heap, varpool,
1273 input_varpool_node (file_data, ib));
1274 len--;
1275 }
1276 #ifdef ENABLE_CHECKING
1277 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1278 gcc_assert (!node->aux);
1279 #endif
1280 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1281 {
1282 int ref = (int) (intptr_t) node->same_comdat_group;
1283 /* We share declaration of builtins, so we may read same node twice. */
1284 if (node->aux)
1285 continue;
1286 node->aux = (void *)1;
1287
1288 /* Fixup same_comdat_group from reference to pointer. */
1289 if (ref != LCC_NOT_FOUND)
1290 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1291 else
1292 node->same_comdat_group = NULL;
1293 }
1294 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1295 node->aux = NULL;
1296 return varpool;
1297 }
1298
1299 /* Input ipa_refs. */
1300
1301 static void
1302 input_refs (struct lto_input_block *ib,
1303 VEC(cgraph_node_ptr, heap) *nodes,
1304 VEC(varpool_node_ptr, heap) *varpool)
1305 {
1306 int count;
1307 int idx;
1308 while (true)
1309 {
1310 struct cgraph_node *node;
1311 count = streamer_read_uhwi (ib);
1312 if (!count)
1313 break;
1314 idx = streamer_read_uhwi (ib);
1315 node = VEC_index (cgraph_node_ptr, nodes, idx);
1316 while (count)
1317 {
1318 input_ref (ib, node, NULL, nodes, varpool);
1319 count--;
1320 }
1321 }
1322 while (true)
1323 {
1324 struct varpool_node *node;
1325 count = streamer_read_uhwi (ib);
1326 if (!count)
1327 break;
1328 node = VEC_index (varpool_node_ptr, varpool,
1329 streamer_read_uhwi (ib));
1330 while (count)
1331 {
1332 input_ref (ib, NULL, node, nodes, varpool);
1333 count--;
1334 }
1335 }
1336 }
1337
1338
1339 static struct gcov_ctr_summary lto_gcov_summary;
1340
1341 /* Input profile_info from IB. */
1342 static void
1343 input_profile_summary (struct lto_input_block *ib,
1344 struct lto_file_decl_data *file_data)
1345 {
1346 unsigned int runs = streamer_read_uhwi (ib);
1347 if (runs)
1348 {
1349 file_data->profile_info.runs = runs;
1350 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1351 }
1352
1353 }
1354
1355 /* Rescale profile summaries to the same number of runs in the whole unit. */
1356
1357 static void
1358 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1359 {
1360 struct lto_file_decl_data *file_data;
1361 unsigned int j;
1362 gcov_unsigned_t max_runs = 0;
1363 struct cgraph_node *node;
1364 struct cgraph_edge *edge;
1365
1366 /* Find unit with maximal number of runs. If we ever get serious about
1367 roundoff errors, we might also consider computing smallest common
1368 multiply. */
1369 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1370 if (max_runs < file_data->profile_info.runs)
1371 max_runs = file_data->profile_info.runs;
1372
1373 if (!max_runs)
1374 return;
1375
1376 /* Simple overflow check. We probably don't need to support that many train
1377 runs. Such a large value probably imply data corruption anyway. */
1378 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1379 {
1380 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1381 INT_MAX / REG_BR_PROB_BASE);
1382 return;
1383 }
1384
1385 profile_info = &lto_gcov_summary;
1386 lto_gcov_summary.runs = max_runs;
1387 lto_gcov_summary.sum_max = 0;
1388
1389 /* Rescale all units to the maximal number of runs.
1390 sum_max can not be easily merged, as we have no idea what files come from
1391 the same run. We do not use the info anyway, so leave it 0. */
1392 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1393 if (file_data->profile_info.runs)
1394 {
1395 int scale = ((REG_BR_PROB_BASE * max_runs
1396 + file_data->profile_info.runs / 2)
1397 / file_data->profile_info.runs);
1398 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1399 (file_data->profile_info.sum_max
1400 * scale
1401 + REG_BR_PROB_BASE / 2)
1402 / REG_BR_PROB_BASE);
1403 }
1404
1405 /* Watch roundoff errors. */
1406 if (lto_gcov_summary.sum_max < max_runs)
1407 lto_gcov_summary.sum_max = max_runs;
1408
1409 /* If merging already happent at WPA time, we are done. */
1410 if (flag_ltrans)
1411 return;
1412
1413 /* Now compute count_materialization_scale of each node.
1414 During LTRANS we already have values of count_materialization_scale
1415 computed, so just update them. */
1416 for (node = cgraph_nodes; node; node = node->next)
1417 if (node->local.lto_file_data
1418 && node->local.lto_file_data->profile_info.runs)
1419 {
1420 int scale;
1421
1422 scale =
1423 ((node->count_materialization_scale * max_runs
1424 + node->local.lto_file_data->profile_info.runs / 2)
1425 / node->local.lto_file_data->profile_info.runs);
1426 node->count_materialization_scale = scale;
1427 if (scale < 0)
1428 fatal_error ("Profile information in %s corrupted",
1429 file_data->file_name);
1430
1431 if (scale == REG_BR_PROB_BASE)
1432 continue;
1433 for (edge = node->callees; edge; edge = edge->next_callee)
1434 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1435 / REG_BR_PROB_BASE);
1436 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1437 / REG_BR_PROB_BASE);
1438 }
1439 }
1440
1441 /* Input and merge the cgraph from each of the .o files passed to
1442 lto1. */
1443
1444 void
1445 input_cgraph (void)
1446 {
1447 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1448 struct lto_file_decl_data *file_data;
1449 unsigned int j = 0;
1450 struct cgraph_node *node;
1451
1452 while ((file_data = file_data_vec[j++]))
1453 {
1454 const char *data;
1455 size_t len;
1456 struct lto_input_block *ib;
1457 VEC(cgraph_node_ptr, heap) *nodes;
1458 VEC(varpool_node_ptr, heap) *varpool;
1459
1460 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1461 &data, &len);
1462 if (!ib)
1463 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1464 input_profile_summary (ib, file_data);
1465 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1466 nodes = input_cgraph_1 (file_data, ib);
1467 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1468 ib, data, len);
1469
1470 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1471 &data, &len);
1472 if (!ib)
1473 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1474 varpool = input_varpool_1 (file_data, ib);
1475 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1476 ib, data, len);
1477
1478 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1479 &data, &len);
1480 if (!ib)
1481 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1482 input_refs (ib, nodes, varpool);
1483 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1484 ib, data, len);
1485 if (flag_ltrans)
1486 input_cgraph_opt_summary (nodes);
1487 VEC_free (cgraph_node_ptr, heap, nodes);
1488 VEC_free (varpool_node_ptr, heap, varpool);
1489 }
1490
1491 merge_profile_summaries (file_data_vec);
1492
1493 /* Clear out the aux field that was used to store enough state to
1494 tell which nodes should be overwritten. */
1495 for (node = cgraph_nodes; node; node = node->next)
1496 {
1497 /* Some nodes may have been created by cgraph_node. This
1498 happens when the callgraph contains nested functions. If the
1499 node for the parent function was never emitted to the gimple
1500 file, cgraph_node will create a node for it when setting the
1501 context of the nested function. */
1502 if (node->local.lto_file_data)
1503 node->aux = NULL;
1504 }
1505 }
1506
1507 /* True when we need optimization summary for NODE. */
1508
1509 static int
1510 output_cgraph_opt_summary_p (struct cgraph_node *node,
1511 cgraph_node_set set ATTRIBUTE_UNUSED)
1512 {
1513 return (node->clone_of
1514 && (node->clone.tree_map
1515 || node->clone.args_to_skip
1516 || node->clone.combined_args_to_skip));
1517 }
1518
1519 /* Output optimization summary for EDGE to OB. */
1520 static void
1521 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1522 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1523 {
1524 }
1525
1526 /* Output optimization summary for NODE to OB. */
1527
1528 static void
1529 output_node_opt_summary (struct output_block *ob,
1530 struct cgraph_node *node,
1531 cgraph_node_set set)
1532 {
1533 unsigned int index;
1534 bitmap_iterator bi;
1535 struct ipa_replace_map *map;
1536 struct bitpack_d bp;
1537 int i;
1538 struct cgraph_edge *e;
1539
1540 if (node->clone.args_to_skip)
1541 {
1542 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1543 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1544 streamer_write_uhwi (ob, index);
1545 }
1546 else
1547 streamer_write_uhwi (ob, 0);
1548 if (node->clone.combined_args_to_skip)
1549 {
1550 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1551 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1552 streamer_write_uhwi (ob, index);
1553 }
1554 else
1555 streamer_write_uhwi (ob, 0);
1556 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1557 node->clone.tree_map));
1558 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1559 {
1560 int parm_num;
1561 tree parm;
1562
1563 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1564 parm = DECL_CHAIN (parm), parm_num++)
1565 if (map->old_tree == parm)
1566 break;
1567 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1568 mechanism to store function local declarations into summaries. */
1569 gcc_assert (parm);
1570 streamer_write_uhwi (ob, parm_num);
1571 stream_write_tree (ob, map->new_tree, true);
1572 bp = bitpack_create (ob->main_stream);
1573 bp_pack_value (&bp, map->replace_p, 1);
1574 bp_pack_value (&bp, map->ref_p, 1);
1575 streamer_write_bitpack (&bp);
1576 }
1577
1578 if (cgraph_node_in_set_p (node, set))
1579 {
1580 for (e = node->callees; e; e = e->next_callee)
1581 output_edge_opt_summary (ob, e);
1582 for (e = node->indirect_calls; e; e = e->next_callee)
1583 output_edge_opt_summary (ob, e);
1584 }
1585 }
1586
1587 /* Output optimization summaries stored in callgraph.
1588 At the moment it is the clone info structure. */
1589
1590 static void
1591 output_cgraph_opt_summary (cgraph_node_set set)
1592 {
1593 struct cgraph_node *node;
1594 int i, n_nodes;
1595 lto_cgraph_encoder_t encoder;
1596 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1597 unsigned count = 0;
1598
1599 ob->cgraph_node = NULL;
1600 encoder = ob->decl_state->cgraph_node_encoder;
1601 n_nodes = lto_cgraph_encoder_size (encoder);
1602 for (i = 0; i < n_nodes; i++)
1603 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1604 set))
1605 count++;
1606 streamer_write_uhwi (ob, count);
1607 for (i = 0; i < n_nodes; i++)
1608 {
1609 node = lto_cgraph_encoder_deref (encoder, i);
1610 if (output_cgraph_opt_summary_p (node, set))
1611 {
1612 streamer_write_uhwi (ob, i);
1613 output_node_opt_summary (ob, node, set);
1614 }
1615 }
1616 produce_asm (ob, NULL);
1617 destroy_output_block (ob);
1618 }
1619
1620 /* Input optimisation summary of EDGE. */
1621
1622 static void
1623 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1624 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1625 {
1626 }
1627
1628 /* Input optimisation summary of NODE. */
1629
1630 static void
1631 input_node_opt_summary (struct cgraph_node *node,
1632 struct lto_input_block *ib_main,
1633 struct data_in *data_in)
1634 {
1635 int i;
1636 int count;
1637 int bit;
1638 struct bitpack_d bp;
1639 struct cgraph_edge *e;
1640
1641 count = streamer_read_uhwi (ib_main);
1642 if (count)
1643 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1644 for (i = 0; i < count; i++)
1645 {
1646 bit = streamer_read_uhwi (ib_main);
1647 bitmap_set_bit (node->clone.args_to_skip, bit);
1648 }
1649 count = streamer_read_uhwi (ib_main);
1650 if (count)
1651 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1652 for (i = 0; i < count; i++)
1653 {
1654 bit = streamer_read_uhwi (ib_main);
1655 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1656 }
1657 count = streamer_read_uhwi (ib_main);
1658 for (i = 0; i < count; i++)
1659 {
1660 int parm_num;
1661 tree parm;
1662 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1663
1664 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1665 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1666 parm = DECL_CHAIN (parm))
1667 parm_num --;
1668 map->parm_num = streamer_read_uhwi (ib_main);
1669 map->old_tree = NULL;
1670 map->new_tree = stream_read_tree (ib_main, data_in);
1671 bp = streamer_read_bitpack (ib_main);
1672 map->replace_p = bp_unpack_value (&bp, 1);
1673 map->ref_p = bp_unpack_value (&bp, 1);
1674 }
1675 for (e = node->callees; e; e = e->next_callee)
1676 input_edge_opt_summary (e, ib_main);
1677 for (e = node->indirect_calls; e; e = e->next_callee)
1678 input_edge_opt_summary (e, ib_main);
1679 }
1680
1681 /* Read section in file FILE_DATA of length LEN with data DATA. */
1682
1683 static void
1684 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1685 const char *data, size_t len, VEC (cgraph_node_ptr,
1686 heap) * nodes)
1687 {
1688 const struct lto_function_header *header =
1689 (const struct lto_function_header *) data;
1690 const int32_t cfg_offset = sizeof (struct lto_function_header);
1691 const int32_t main_offset = cfg_offset + header->cfg_size;
1692 const int32_t string_offset = main_offset + header->main_size;
1693 struct data_in *data_in;
1694 struct lto_input_block ib_main;
1695 unsigned int i;
1696 unsigned int count;
1697
1698 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1699 header->main_size);
1700
1701 data_in =
1702 lto_data_in_create (file_data, (const char *) data + string_offset,
1703 header->string_size, NULL);
1704 count = streamer_read_uhwi (&ib_main);
1705
1706 for (i = 0; i < count; i++)
1707 {
1708 int ref = streamer_read_uhwi (&ib_main);
1709 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1710 &ib_main, data_in);
1711 }
1712 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1713 len);
1714 lto_data_in_delete (data_in);
1715 }
1716
1717 /* Input optimization summary of cgraph. */
1718
1719 static void
1720 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1721 {
1722 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1723 struct lto_file_decl_data *file_data;
1724 unsigned int j = 0;
1725
1726 while ((file_data = file_data_vec[j++]))
1727 {
1728 size_t len;
1729 const char *data =
1730 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1731 &len);
1732
1733 if (data)
1734 input_cgraph_opt_section (file_data, data, len, nodes);
1735 }
1736 }