lto-symtab.c (lto_varpool_replace_node): Do not merge needed flags.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_cgraph_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_cgraph_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_cgraph_analyzed_node,
70 /* Cgraph edges. */
71 LTO_cgraph_edge,
72 LTO_cgraph_indirect_edge,
73 LTO_cgraph_last_tag
74 };
75
76 /* Create a new cgraph encoder. */
77
78 lto_cgraph_encoder_t
79 lto_cgraph_encoder_new (void)
80 {
81 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
82 encoder->map = pointer_map_create ();
83 encoder->nodes = NULL;
84 encoder->body = pointer_set_create ();
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
93 {
94 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
95 pointer_map_destroy (encoder->map);
96 pointer_set_destroy (encoder->body);
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the cgraph encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
107 struct cgraph_node *node)
108 {
109 int ref;
110 void **slot;
111
112 slot = pointer_map_contains (encoder->map, node);
113 if (!slot)
114 {
115 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
116 slot = pointer_map_insert (encoder->map, node);
117 *slot = (void *) (intptr_t) ref;
118 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
119 }
120 else
121 ref = (int) (intptr_t) *slot;
122
123 return ref;
124 }
125
126 #define LCC_NOT_FOUND (-1)
127
128 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
129 or LCC_NOT_FOUND if it is not there. */
130
131 int
132 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
133 struct cgraph_node *node)
134 {
135 void **slot = pointer_map_contains (encoder->map, node);
136 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 }
138
139
140 /* Return the cgraph node corresponding to REF using ENCODER. */
141
142 struct cgraph_node *
143 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
144 {
145 if (ref == LCC_NOT_FOUND)
146 return NULL;
147
148 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 }
150
151
152 /* Return TRUE if we should encode initializer of NODE (if any). */
153
154 bool
155 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
156 struct cgraph_node *node)
157 {
158 return pointer_set_contains (encoder->body, node);
159 }
160
161 /* Return TRUE if we should encode body of NODE (if any). */
162
163 static void
164 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
165 struct cgraph_node *node)
166 {
167 pointer_set_insert (encoder->body, node);
168 }
169
170 /* Create a new varpool encoder. */
171
172 lto_varpool_encoder_t
173 lto_varpool_encoder_new (void)
174 {
175 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
176 encoder->map = pointer_map_create ();
177 encoder->initializer = pointer_set_create ();
178 encoder->nodes = NULL;
179 return encoder;
180 }
181
182
183 /* Delete ENCODER and its components. */
184
185 void
186 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
187 {
188 VEC_free (varpool_node_ptr, heap, encoder->nodes);
189 pointer_map_destroy (encoder->map);
190 pointer_set_destroy (encoder->initializer);
191 free (encoder);
192 }
193
194
195 /* Return the existing reference number of NODE in the varpool encoder in
196 output block OB. Assign a new reference if this is the first time
197 NODE is encoded. */
198
199 int
200 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
201 struct varpool_node *node)
202 {
203 int ref;
204 void **slot;
205
206 slot = pointer_map_contains (encoder->map, node);
207 if (!slot)
208 {
209 ref = VEC_length (varpool_node_ptr, encoder->nodes);
210 slot = pointer_map_insert (encoder->map, node);
211 *slot = (void *) (intptr_t) ref;
212 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
213 }
214 else
215 ref = (int) (intptr_t) *slot;
216
217 return ref;
218 }
219
220 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
221 or LCC_NOT_FOUND if it is not there. */
222
223 int
224 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
225 struct varpool_node *node)
226 {
227 void **slot = pointer_map_contains (encoder->map, node);
228 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 }
230
231
232 /* Return the varpool node corresponding to REF using ENCODER. */
233
234 struct varpool_node *
235 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
236 {
237 if (ref == LCC_NOT_FOUND)
238 return NULL;
239
240 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 }
242
243
244 /* Return TRUE if we should encode initializer of NODE (if any). */
245
246 bool
247 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
248 struct varpool_node *node)
249 {
250 return pointer_set_contains (encoder->initializer, node);
251 }
252
253 /* Return TRUE if we should encode initializer of NODE (if any). */
254
255 static void
256 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
257 struct varpool_node *node)
258 {
259 pointer_set_insert (encoder->initializer, node);
260 }
261
262 /* Output the cgraph EDGE to OB using ENCODER. */
263
264 static void
265 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
266 lto_cgraph_encoder_t encoder)
267 {
268 unsigned int uid;
269 intptr_t ref;
270 struct bitpack_d bp;
271
272 if (edge->indirect_unknown_callee)
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_indirect_edge);
275 else
276 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
277 LTO_cgraph_edge);
278
279 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
280 gcc_assert (ref != LCC_NOT_FOUND);
281 streamer_write_hwi_stream (ob->main_stream, ref);
282
283 if (!edge->indirect_unknown_callee)
284 {
285 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
286 gcc_assert (ref != LCC_NOT_FOUND);
287 streamer_write_hwi_stream (ob->main_stream, ref);
288 }
289
290 streamer_write_hwi_stream (ob->main_stream, edge->count);
291
292 bp = bitpack_create (ob->main_stream);
293 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
294 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
295 bp_pack_enum (&bp, cgraph_inline_failed_enum,
296 CIF_N_REASONS, edge->inline_failed);
297 bp_pack_var_len_unsigned (&bp, uid);
298 bp_pack_var_len_unsigned (&bp, edge->frequency);
299 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 if (edge->indirect_unknown_callee)
303 {
304 int flags = edge->indirect_info->ecf_flags;
305 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
311 /* Flags that should not appear on indirect calls. */
312 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
313 | ECF_MAY_BE_ALLOCA
314 | ECF_SIBCALL
315 | ECF_LEAF
316 | ECF_NOVOPS)));
317 }
318 streamer_write_bitpack (&bp);
319 }
320
321 /* Return if LIST contain references from other partitions. */
322
323 bool
324 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
325 varpool_node_set vset)
326 {
327 int i;
328 struct ipa_ref *ref;
329 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
330 {
331 if (symtab_function_p (ref->referring))
332 {
333 if (ipa_ref_referring_node (ref)->symbol.in_other_partition
334 || !cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
335 return true;
336 }
337 else
338 {
339 if (ipa_ref_referring_varpool_node (ref)->symbol.in_other_partition
340 || !varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
341 vset))
342 return true;
343 }
344 }
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
352 {
353 struct cgraph_edge *e;
354 if (!node->analyzed)
355 return false;
356 if (node->global.inlined_to)
357 return false;
358 for (e = node->callers; e; e = e->next_caller)
359 if (e->caller->symbol.in_other_partition
360 || !cgraph_node_in_set_p (e->caller, set))
361 return true;
362 return false;
363 }
364
365 /* Return if LIST contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
369 varpool_node_set vset)
370 {
371 int i;
372 struct ipa_ref *ref;
373 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
374 {
375 if (symtab_function_p (ref->referring))
376 {
377 if (cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
378 return true;
379 }
380 else
381 {
382 if (varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
383 vset))
384 return true;
385 }
386 }
387 return false;
388 }
389
390 /* Return true when node is reachable from other partition. */
391
392 bool
393 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
394 {
395 struct cgraph_edge *e;
396 for (e = node->callers; e; e = e->next_caller)
397 if (cgraph_node_in_set_p (e->caller, set))
398 return true;
399 return false;
400 }
401
402 /* Output the cgraph NODE to OB. ENCODER is used to find the
403 reference number of NODE->inlined_to. SET is the set of nodes we
404 are writing to the current file. If NODE is not in SET, then NODE
405 is a boundary of a cgraph_node_set and we pretend NODE just has a
406 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
407 that have had their callgraph node written so far. This is used to
408 determine if NODE is a clone of a previously written node. */
409
410 static void
411 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
412 lto_cgraph_encoder_t encoder, cgraph_node_set set,
413 varpool_node_set vset)
414 {
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of;
421
422 boundary_p = !cgraph_node_in_set_p (node, set);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_cgraph_analyzed_node;
426 else
427 tag = LTO_cgraph_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed)
444 {
445 /* Inline clones can not be part of boundary.
446 gcc_assert (!node->global.inlined_to);
447
448 FIXME: At the moment they can be, when partition contains an inline
449 clone that is clone of inline clone from outside partition. We can
450 reshape the clone tree and make other tree to be the root, but it
451 needs a bit extra work and will be promplty done by cgraph_remove_node
452 after reading back. */
453 in_other_partition = 1;
454 }
455
456 clone_of = node->clone_of;
457 while (clone_of
458 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
459 if (clone_of->prev_sibling_clone)
460 clone_of = clone_of->prev_sibling_clone;
461 else
462 clone_of = clone_of->clone_of;
463
464 if (LTO_cgraph_analyzed_node)
465 gcc_assert (clone_of || !node->clone_of);
466 if (!clone_of)
467 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
468 else
469 streamer_write_hwi_stream (ob->main_stream, ref);
470
471
472 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
473 streamer_write_hwi_stream (ob->main_stream, node->count);
474 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
475
476 if (tag == LTO_cgraph_analyzed_node)
477 {
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 streamer_write_hwi_stream (ob->main_stream, ref);
487 }
488
489 if (node->symbol.same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder,
492 cgraph (node->symbol.same_comdat_group));
493 gcc_assert (ref != LCC_NOT_FOUND);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 streamer_write_hwi_stream (ob->main_stream, ref);
498
499 bp = bitpack_create (ob->main_stream);
500 bp_pack_value (&bp, node->local.local, 1);
501 bp_pack_value (&bp, node->symbol.externally_visible, 1);
502 bp_pack_value (&bp, node->local.finalized, 1);
503 bp_pack_value (&bp, node->local.versionable, 1);
504 bp_pack_value (&bp, node->local.can_change_signature, 1);
505 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
506 bp_pack_value (&bp, node->symbol.force_output, 1);
507 bp_pack_value (&bp, node->symbol.address_taken, 1);
508 bp_pack_value (&bp, node->abstract_and_needed, 1);
509 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
510 && !DECL_EXTERNAL (node->symbol.decl)
511 && !DECL_COMDAT (node->symbol.decl)
512 && (reachable_from_other_partition_p (node, set)
513 || referenced_from_other_partition_p (&node->symbol.ref_list,
514 set, vset)), 1);
515 bp_pack_value (&bp, node->lowered, 1);
516 bp_pack_value (&bp, in_other_partition, 1);
517 /* Real aliases in a boundary become non-aliases. However we still stream
518 alias info on weakrefs.
519 TODO: We lose a bit of information here - when we know that variable is
520 defined in other unit, we may use the info on aliases to resolve
521 symbol1 != symbol2 type tests that we can do only for locally defined objects
522 otherwise. */
523 bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl)), 1);
524 bp_pack_value (&bp, node->frequency, 2);
525 bp_pack_value (&bp, node->only_called_at_startup, 1);
526 bp_pack_value (&bp, node->only_called_at_exit, 1);
527 bp_pack_value (&bp, node->tm_clone, 1);
528 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
529 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
530 LDPR_NUM_KNOWN, node->symbol.resolution);
531 streamer_write_bitpack (&bp);
532
533 if (node->thunk.thunk_p && !boundary_p)
534 {
535 streamer_write_uhwi_stream
536 (ob->main_stream,
537 1 + (node->thunk.this_adjusting != 0) * 2
538 + (node->thunk.virtual_offset_p != 0) * 4);
539 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
540 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
541 }
542 if ((node->alias || node->thunk.thunk_p)
543 && (!boundary_p || (node->alias && DECL_EXTERNAL (node->symbol.decl))))
544 {
545 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
546 node->thunk.alias != NULL);
547 if (node->thunk.alias != NULL)
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
549 node->thunk.alias);
550 }
551 }
552
553 /* Output the varpool NODE to OB.
554 If NODE is not in SET, then NODE is a boundary. */
555
556 static void
557 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
558 lto_varpool_encoder_t varpool_encoder,
559 cgraph_node_set set, varpool_node_set vset)
560 {
561 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
562 struct bitpack_d bp;
563 int ref;
564
565 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
566 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
567 bp = bitpack_create (ob->main_stream);
568 bp_pack_value (&bp, node->symbol.externally_visible, 1);
569 bp_pack_value (&bp, node->symbol.force_output, 1);
570 bp_pack_value (&bp, node->finalized, 1);
571 bp_pack_value (&bp, node->alias, 1);
572 bp_pack_value (&bp, node->alias_of != NULL, 1);
573 gcc_assert (node->finalized || !node->analyzed);
574 /* Constant pool initializers can be de-unified into individual ltrans units.
575 FIXME: Alternatively at -Os we may want to avoid generating for them the local
576 labels and share them across LTRANS partitions. */
577 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
578 && !DECL_COMDAT (node->symbol.decl))
579 {
580 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
581 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
582 }
583 else
584 {
585 bp_pack_value (&bp, node->analyzed
586 && referenced_from_other_partition_p (&node->symbol.ref_list,
587 set, vset), 1);
588 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
589 }
590 streamer_write_bitpack (&bp);
591 if (node->alias_of)
592 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
593 if (node->symbol.same_comdat_group && !boundary_p)
594 {
595 ref = lto_varpool_encoder_lookup (varpool_encoder,
596 varpool (node->symbol.same_comdat_group));
597 gcc_assert (ref != LCC_NOT_FOUND);
598 }
599 else
600 ref = LCC_NOT_FOUND;
601 streamer_write_hwi_stream (ob->main_stream, ref);
602 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
603 LDPR_NUM_KNOWN, node->symbol.resolution);
604 }
605
606 /* Output the varpool NODE to OB.
607 If NODE is not in SET, then NODE is a boundary. */
608
609 static void
610 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
611 lto_cgraph_encoder_t encoder,
612 lto_varpool_encoder_t varpool_encoder)
613 {
614 struct bitpack_d bp;
615 bp = bitpack_create (ob->main_stream);
616 bp_pack_value (&bp, symtab_function_p (ref->referred), 1);
617 bp_pack_value (&bp, ref->use, 2);
618 streamer_write_bitpack (&bp);
619 if (symtab_function_p (ref->referred))
620 {
621 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
622 gcc_assert (nref != LCC_NOT_FOUND);
623 streamer_write_hwi_stream (ob->main_stream, nref);
624 }
625 else
626 {
627 int nref = lto_varpool_encoder_lookup (varpool_encoder,
628 ipa_ref_varpool_node (ref));
629 gcc_assert (nref != LCC_NOT_FOUND);
630 streamer_write_hwi_stream (ob->main_stream, nref);
631 }
632 }
633
634 /* Stream out profile_summary to OB. */
635
636 static void
637 output_profile_summary (struct lto_simple_output_block *ob)
638 {
639 if (profile_info)
640 {
641 /* We do not output num, sum_all and run_max, they are not used by
642 GCC profile feedback and they are difficult to merge from multiple
643 units. */
644 gcc_assert (profile_info->runs);
645 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
646 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
647 }
648 else
649 streamer_write_uhwi_stream (ob->main_stream, 0);
650 }
651
652 /* Add NODE into encoder as well as nodes it is cloned from.
653 Do it in a way so clones appear first. */
654
655 static void
656 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
657 bool include_body)
658 {
659 if (node->clone_of)
660 add_node_to (encoder, node->clone_of, include_body);
661 else if (include_body)
662 lto_set_cgraph_encoder_encode_body (encoder, node);
663 lto_cgraph_encoder_encode (encoder, node);
664 }
665
666 /* Add all references in LIST to encoders. */
667
668 static void
669 add_references (lto_cgraph_encoder_t encoder,
670 lto_varpool_encoder_t varpool_encoder,
671 struct ipa_ref_list *list)
672 {
673 int i;
674 struct ipa_ref *ref;
675 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
676 if (symtab_function_p (ref->referred))
677 add_node_to (encoder, ipa_ref_node (ref), false);
678 else
679 {
680 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
681 lto_varpool_encoder_encode (varpool_encoder, vnode);
682 }
683 }
684
685 /* Output all callees or indirect outgoing edges. EDGE must be the first such
686 edge. */
687
688 static void
689 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
690 struct lto_simple_output_block *ob,
691 lto_cgraph_encoder_t encoder)
692 {
693 if (!edge)
694 return;
695
696 /* Output edges in backward direction, so the reconstructed callgraph match
697 and it is easy to associate call sites in the IPA pass summaries. */
698 while (edge->next_callee)
699 edge = edge->next_callee;
700 for (; edge; edge = edge->prev_callee)
701 lto_output_edge (ob, edge, encoder);
702 }
703
704 /* Output the part of the cgraph in SET. */
705
706 static void
707 output_refs (cgraph_node_set set, varpool_node_set vset,
708 lto_cgraph_encoder_t encoder,
709 lto_varpool_encoder_t varpool_encoder)
710 {
711 cgraph_node_set_iterator csi;
712 varpool_node_set_iterator vsi;
713 struct lto_simple_output_block *ob;
714 int count;
715 struct ipa_ref *ref;
716 int i;
717
718 ob = lto_create_simple_output_block (LTO_section_refs);
719
720 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
721 {
722 struct cgraph_node *node = csi_node (csi);
723
724 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
725 if (count)
726 {
727 streamer_write_uhwi_stream (ob->main_stream, count);
728 streamer_write_uhwi_stream (ob->main_stream,
729 lto_cgraph_encoder_lookup (encoder, node));
730 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
731 i, ref); i++)
732 lto_output_ref (ob, ref, encoder, varpool_encoder);
733 }
734 }
735
736 streamer_write_uhwi_stream (ob->main_stream, 0);
737
738 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
739 {
740 struct varpool_node *node = vsi_node (vsi);
741
742 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
743 if (count)
744 {
745 streamer_write_uhwi_stream (ob->main_stream, count);
746 streamer_write_uhwi_stream (ob->main_stream,
747 lto_varpool_encoder_lookup (varpool_encoder,
748 node));
749 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
750 i, ref); i++)
751 lto_output_ref (ob, ref, encoder, varpool_encoder);
752 }
753 }
754
755 streamer_write_uhwi_stream (ob->main_stream, 0);
756
757 lto_destroy_simple_output_block (ob);
758 }
759
760 /* Find out all cgraph and varpool nodes we want to encode in current unit
761 and insert them to encoders. */
762 void
763 compute_ltrans_boundary (struct lto_out_decl_state *state,
764 cgraph_node_set set, varpool_node_set vset)
765 {
766 struct cgraph_node *node;
767 cgraph_node_set_iterator csi;
768 varpool_node_set_iterator vsi;
769 struct cgraph_edge *edge;
770 int i;
771 lto_cgraph_encoder_t encoder;
772 lto_varpool_encoder_t varpool_encoder;
773
774 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
775 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
776
777 /* Go over all the nodes in SET and assign references. */
778 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
779 {
780 node = csi_node (csi);
781 add_node_to (encoder, node, true);
782 add_references (encoder, varpool_encoder, &node->symbol.ref_list);
783 }
784 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
785 {
786 struct varpool_node *vnode = vsi_node (vsi);
787 gcc_assert (!vnode->alias || vnode->alias_of);
788 lto_varpool_encoder_encode (varpool_encoder, vnode);
789 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
790 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
791 }
792 /* Pickle in also the initializer of all referenced readonly variables
793 to help folding. Constant pool variables are not shared, so we must
794 pickle those too. */
795 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
796 {
797 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
798 if (DECL_INITIAL (vnode->symbol.decl)
799 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
800 vnode)
801 && const_value_known_p (vnode->symbol.decl))
802 {
803 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
804 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
805 }
806 else if (vnode->alias || vnode->alias_of)
807 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
808 }
809
810 /* Go over all the nodes again to include callees that are not in
811 SET. */
812 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
813 {
814 node = csi_node (csi);
815 for (edge = node->callees; edge; edge = edge->next_callee)
816 {
817 struct cgraph_node *callee = edge->callee;
818 if (!cgraph_node_in_set_p (callee, set))
819 {
820 /* We should have moved all the inlines. */
821 gcc_assert (!callee->global.inlined_to);
822 add_node_to (encoder, callee, false);
823 }
824 }
825 }
826 }
827
828 /* Output the part of the cgraph in SET. */
829
830 void
831 output_cgraph (cgraph_node_set set, varpool_node_set vset)
832 {
833 struct cgraph_node *node;
834 struct lto_simple_output_block *ob;
835 cgraph_node_set_iterator csi;
836 int i, n_nodes;
837 lto_cgraph_encoder_t encoder;
838 lto_varpool_encoder_t varpool_encoder;
839 static bool asm_nodes_output = false;
840
841 if (flag_wpa)
842 output_cgraph_opt_summary (set);
843
844 ob = lto_create_simple_output_block (LTO_section_cgraph);
845
846 output_profile_summary (ob);
847
848 /* An encoder for cgraph nodes should have been created by
849 ipa_write_summaries_1. */
850 gcc_assert (ob->decl_state->cgraph_node_encoder);
851 gcc_assert (ob->decl_state->varpool_node_encoder);
852 encoder = ob->decl_state->cgraph_node_encoder;
853 varpool_encoder = ob->decl_state->varpool_node_encoder;
854
855 /* Write out the nodes. We must first output a node and then its clones,
856 otherwise at a time reading back the node there would be nothing to clone
857 from. */
858 n_nodes = lto_cgraph_encoder_size (encoder);
859 for (i = 0; i < n_nodes; i++)
860 {
861 node = lto_cgraph_encoder_deref (encoder, i);
862 lto_output_node (ob, node, encoder, set, vset);
863 }
864
865 /* Go over the nodes in SET again to write edges. */
866 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
867 {
868 node = csi_node (csi);
869 output_outgoing_cgraph_edges (node->callees, ob, encoder);
870 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
871 }
872
873 streamer_write_uhwi_stream (ob->main_stream, 0);
874
875 lto_destroy_simple_output_block (ob);
876
877 /* Emit toplevel asms.
878 When doing WPA we must output every asm just once. Since we do not partition asm
879 nodes at all, output them to first output. This is kind of hack, but should work
880 well. */
881 if (!asm_nodes_output)
882 {
883 asm_nodes_output = true;
884 lto_output_toplevel_asms ();
885 }
886
887 output_varpool (set, vset);
888 output_refs (set, vset, encoder, varpool_encoder);
889 }
890
891 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
892 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
893 NODE or to replace the values in it, for instance because the first
894 time we saw it, the function body was not available but now it
895 is. BP is a bitpack with all the bitflags for NODE read from the
896 stream. */
897
898 static void
899 input_overwrite_node (struct lto_file_decl_data *file_data,
900 struct cgraph_node *node,
901 enum LTO_cgraph_tags tag,
902 struct bitpack_d *bp)
903 {
904 node->symbol.aux = (void *) tag;
905 node->symbol.lto_file_data = file_data;
906
907 node->local.local = bp_unpack_value (bp, 1);
908 node->symbol.externally_visible = bp_unpack_value (bp, 1);
909 node->local.finalized = bp_unpack_value (bp, 1);
910 node->local.versionable = bp_unpack_value (bp, 1);
911 node->local.can_change_signature = bp_unpack_value (bp, 1);
912 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
913 node->symbol.force_output = bp_unpack_value (bp, 1);
914 node->symbol.address_taken = bp_unpack_value (bp, 1);
915 node->abstract_and_needed = bp_unpack_value (bp, 1);
916 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
917 node->lowered = bp_unpack_value (bp, 1);
918 node->analyzed = tag == LTO_cgraph_analyzed_node;
919 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
920 if (node->symbol.in_other_partition
921 /* Avoid updating decl when we are seeing just inline clone.
922 When inlining function that has functions already inlined into it,
923 we produce clones of inline clones.
924
925 WPA partitioning might put each clone into different unit and
926 we might end up streaming inline clone from other partition
927 to support clone we are interested in. */
928 && (!node->clone_of
929 || node->clone_of->symbol.decl != node->symbol.decl))
930 {
931 DECL_EXTERNAL (node->symbol.decl) = 1;
932 TREE_STATIC (node->symbol.decl) = 0;
933 }
934 node->alias = bp_unpack_value (bp, 1);
935 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
936 node->only_called_at_startup = bp_unpack_value (bp, 1);
937 node->only_called_at_exit = bp_unpack_value (bp, 1);
938 node->tm_clone = bp_unpack_value (bp, 1);
939 node->thunk.thunk_p = bp_unpack_value (bp, 1);
940 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
941 LDPR_NUM_KNOWN);
942 }
943
944 /* Output the part of the cgraph in SET. */
945
946 static void
947 output_varpool (cgraph_node_set set, varpool_node_set vset)
948 {
949 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
950 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
951 int len = lto_varpool_encoder_size (varpool_encoder), i;
952
953 streamer_write_uhwi_stream (ob->main_stream, len);
954
955 /* Write out the nodes. We must first output a node and then its clones,
956 otherwise at a time reading back the node there would be nothing to clone
957 from. */
958 for (i = 0; i < len; i++)
959 {
960 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
961 varpool_encoder,
962 set, vset);
963 }
964
965 lto_destroy_simple_output_block (ob);
966 }
967
968 /* Read a node from input_block IB. TAG is the node's tag just read.
969 Return the node read or overwriten. */
970
971 static struct cgraph_node *
972 input_node (struct lto_file_decl_data *file_data,
973 struct lto_input_block *ib,
974 enum LTO_cgraph_tags tag,
975 VEC(cgraph_node_ptr, heap) *nodes)
976 {
977 tree fn_decl;
978 struct cgraph_node *node;
979 struct bitpack_d bp;
980 unsigned decl_index;
981 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
982 int clone_ref;
983 int order;
984
985 order = streamer_read_hwi (ib) + order_base;
986 clone_ref = streamer_read_hwi (ib);
987
988 decl_index = streamer_read_uhwi (ib);
989 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
990
991 if (clone_ref != LCC_NOT_FOUND)
992 {
993 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
994 0, CGRAPH_FREQ_BASE, false, NULL, false);
995 }
996 else
997 node = cgraph_get_create_node (fn_decl);
998
999 node->symbol.order = order;
1000 if (order >= symtab_order)
1001 symtab_order = order + 1;
1002
1003 node->count = streamer_read_hwi (ib);
1004 node->count_materialization_scale = streamer_read_hwi (ib);
1005
1006 if (tag == LTO_cgraph_analyzed_node)
1007 ref = streamer_read_hwi (ib);
1008
1009 ref2 = streamer_read_hwi (ib);
1010
1011 /* Make sure that we have not read this node before. Nodes that
1012 have already been read will have their tag stored in the 'aux'
1013 field. Since built-in functions can be referenced in multiple
1014 functions, they are expected to be read more than once. */
1015 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1016 internal_error ("bytecode stream: found multiple instances of cgraph "
1017 "node %d", node->uid);
1018
1019 bp = streamer_read_bitpack (ib);
1020 input_overwrite_node (file_data, node, tag, &bp);
1021
1022 /* Store a reference for now, and fix up later to be a pointer. */
1023 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1024
1025 /* Store a reference for now, and fix up later to be a pointer. */
1026 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1027
1028 if (node->thunk.thunk_p)
1029 {
1030 int type = streamer_read_uhwi (ib);
1031 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1032 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1033
1034 node->thunk.fixed_offset = fixed_offset;
1035 node->thunk.this_adjusting = (type & 2);
1036 node->thunk.virtual_value = virtual_value;
1037 node->thunk.virtual_offset_p = (type & 4);
1038 }
1039 if (node->thunk.thunk_p || node->alias)
1040 {
1041 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1042 {
1043 decl_index = streamer_read_uhwi (ib);
1044 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1045 decl_index);
1046 }
1047 }
1048 return node;
1049 }
1050
1051 /* Read a node from input_block IB. TAG is the node's tag just read.
1052 Return the node read or overwriten. */
1053
1054 static struct varpool_node *
1055 input_varpool_node (struct lto_file_decl_data *file_data,
1056 struct lto_input_block *ib)
1057 {
1058 int decl_index;
1059 tree var_decl;
1060 struct varpool_node *node;
1061 struct bitpack_d bp;
1062 int ref = LCC_NOT_FOUND;
1063 bool non_null_aliasof;
1064 int order;
1065
1066 order = streamer_read_hwi (ib) + order_base;
1067 decl_index = streamer_read_uhwi (ib);
1068 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1069 node = varpool_node (var_decl);
1070 node->symbol.order = order;
1071 if (order >= symtab_order)
1072 symtab_order = order + 1;
1073 node->symbol.lto_file_data = file_data;
1074
1075 bp = streamer_read_bitpack (ib);
1076 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1077 node->symbol.force_output = bp_unpack_value (&bp, 1);
1078 node->finalized = bp_unpack_value (&bp, 1);
1079 node->alias = bp_unpack_value (&bp, 1);
1080 non_null_aliasof = bp_unpack_value (&bp, 1);
1081 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1082 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1083 node->analyzed = (node->finalized && (!node->alias || !node->symbol.in_other_partition));
1084 if (node->symbol.in_other_partition)
1085 {
1086 DECL_EXTERNAL (node->symbol.decl) = 1;
1087 TREE_STATIC (node->symbol.decl) = 0;
1088 }
1089 if (non_null_aliasof)
1090 {
1091 decl_index = streamer_read_uhwi (ib);
1092 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1093 }
1094 ref = streamer_read_hwi (ib);
1095 /* Store a reference for now, and fix up later to be a pointer. */
1096 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1097 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1098 LDPR_NUM_KNOWN);
1099
1100 return node;
1101 }
1102
1103 /* Read a node from input_block IB. TAG is the node's tag just read.
1104 Return the node read or overwriten. */
1105
1106 static void
1107 input_ref (struct lto_input_block *ib,
1108 symtab_node referring_node,
1109 VEC(cgraph_node_ptr, heap) *nodes,
1110 VEC(varpool_node_ptr, heap) *varpool_nodes_vec)
1111 {
1112 struct cgraph_node *node = NULL;
1113 struct varpool_node *varpool_node = NULL;
1114 struct bitpack_d bp;
1115 int type;
1116 enum ipa_ref_use use;
1117
1118 bp = streamer_read_bitpack (ib);
1119 type = bp_unpack_value (&bp, 1);
1120 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1121 if (type)
1122 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1123 else
1124 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes_vec,
1125 streamer_read_hwi (ib));
1126 ipa_record_reference (referring_node,
1127 node ? (symtab_node) node : (symtab_node) varpool_node, use, NULL);
1128 }
1129
1130 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1131 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1132 edge being read is indirect (in the sense that it has
1133 indirect_unknown_callee set). */
1134
1135 static void
1136 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1137 bool indirect)
1138 {
1139 struct cgraph_node *caller, *callee;
1140 struct cgraph_edge *edge;
1141 unsigned int stmt_id;
1142 gcov_type count;
1143 int freq;
1144 cgraph_inline_failed_t inline_failed;
1145 struct bitpack_d bp;
1146 int ecf_flags = 0;
1147
1148 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1149 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1150 internal_error ("bytecode stream: no caller found while reading edge");
1151
1152 if (!indirect)
1153 {
1154 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1155 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1156 internal_error ("bytecode stream: no callee found while reading edge");
1157 }
1158 else
1159 callee = NULL;
1160
1161 count = (gcov_type) streamer_read_hwi (ib);
1162
1163 bp = streamer_read_bitpack (ib);
1164 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1165 stmt_id = bp_unpack_var_len_unsigned (&bp);
1166 freq = (int) bp_unpack_var_len_unsigned (&bp);
1167
1168 if (indirect)
1169 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1170 else
1171 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1172
1173 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1174 edge->lto_stmt_uid = stmt_id;
1175 edge->inline_failed = inline_failed;
1176 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1177 edge->can_throw_external = bp_unpack_value (&bp, 1);
1178 if (indirect)
1179 {
1180 if (bp_unpack_value (&bp, 1))
1181 ecf_flags |= ECF_CONST;
1182 if (bp_unpack_value (&bp, 1))
1183 ecf_flags |= ECF_PURE;
1184 if (bp_unpack_value (&bp, 1))
1185 ecf_flags |= ECF_NORETURN;
1186 if (bp_unpack_value (&bp, 1))
1187 ecf_flags |= ECF_MALLOC;
1188 if (bp_unpack_value (&bp, 1))
1189 ecf_flags |= ECF_NOTHROW;
1190 if (bp_unpack_value (&bp, 1))
1191 ecf_flags |= ECF_RETURNS_TWICE;
1192 edge->indirect_info->ecf_flags = ecf_flags;
1193 }
1194 }
1195
1196
1197 /* Read a cgraph from IB using the info in FILE_DATA. */
1198
1199 static VEC(cgraph_node_ptr, heap) *
1200 input_cgraph_1 (struct lto_file_decl_data *file_data,
1201 struct lto_input_block *ib)
1202 {
1203 enum LTO_cgraph_tags tag;
1204 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1205 struct cgraph_node *node;
1206 unsigned i;
1207
1208 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1209 order_base = symtab_order;
1210 while (tag)
1211 {
1212 if (tag == LTO_cgraph_edge)
1213 input_edge (ib, nodes, false);
1214 else if (tag == LTO_cgraph_indirect_edge)
1215 input_edge (ib, nodes, true);
1216 else
1217 {
1218 node = input_node (file_data, ib, tag,nodes);
1219 if (node == NULL || node->symbol.decl == NULL_TREE)
1220 internal_error ("bytecode stream: found empty cgraph node");
1221 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1222 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1223 }
1224
1225 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1226 }
1227
1228 lto_input_toplevel_asms (file_data, order_base);
1229
1230 /* AUX pointers should be all non-zero for nodes read from the stream. */
1231 #ifdef ENABLE_CHECKING
1232 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1233 gcc_assert (node->symbol.aux);
1234 #endif
1235 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1236 {
1237 int ref = (int) (intptr_t) node->global.inlined_to;
1238
1239 /* We share declaration of builtins, so we may read same node twice. */
1240 if (!node->symbol.aux)
1241 continue;
1242 node->symbol.aux = NULL;
1243
1244 /* Fixup inlined_to from reference to pointer. */
1245 if (ref != LCC_NOT_FOUND)
1246 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1247 else
1248 node->global.inlined_to = NULL;
1249
1250 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1251
1252 /* Fixup same_comdat_group from reference to pointer. */
1253 if (ref != LCC_NOT_FOUND)
1254 node->symbol.same_comdat_group = (symtab_node)VEC_index (cgraph_node_ptr, nodes, ref);
1255 else
1256 node->symbol.same_comdat_group = NULL;
1257 }
1258 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1259 node->symbol.aux = (void *)1;
1260 return nodes;
1261 }
1262
1263 /* Read a varpool from IB using the info in FILE_DATA. */
1264
1265 static VEC(varpool_node_ptr, heap) *
1266 input_varpool_1 (struct lto_file_decl_data *file_data,
1267 struct lto_input_block *ib)
1268 {
1269 unsigned HOST_WIDE_INT len;
1270 VEC(varpool_node_ptr, heap) *varpool = NULL;
1271 int i;
1272 struct varpool_node *node;
1273
1274 len = streamer_read_uhwi (ib);
1275 while (len)
1276 {
1277 VEC_safe_push (varpool_node_ptr, heap, varpool,
1278 input_varpool_node (file_data, ib));
1279 len--;
1280 }
1281 #ifdef ENABLE_CHECKING
1282 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1283 gcc_assert (!node->symbol.aux);
1284 #endif
1285 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1286 {
1287 int ref = (int) (intptr_t) node->symbol.same_comdat_group;
1288 /* We share declaration of builtins, so we may read same node twice. */
1289 if (node->symbol.aux)
1290 continue;
1291 node->symbol.aux = (void *)1;
1292
1293 /* Fixup same_comdat_group from reference to pointer. */
1294 if (ref != LCC_NOT_FOUND)
1295 node->symbol.same_comdat_group = (symtab_node)VEC_index (varpool_node_ptr, varpool, ref);
1296 else
1297 node->symbol.same_comdat_group = NULL;
1298 }
1299 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1300 node->symbol.aux = NULL;
1301 return varpool;
1302 }
1303
1304 /* Input ipa_refs. */
1305
1306 static void
1307 input_refs (struct lto_input_block *ib,
1308 VEC(cgraph_node_ptr, heap) *nodes,
1309 VEC(varpool_node_ptr, heap) *varpool)
1310 {
1311 int count;
1312 int idx;
1313 while (true)
1314 {
1315 struct cgraph_node *node;
1316 count = streamer_read_uhwi (ib);
1317 if (!count)
1318 break;
1319 idx = streamer_read_uhwi (ib);
1320 node = VEC_index (cgraph_node_ptr, nodes, idx);
1321 while (count)
1322 {
1323 input_ref (ib, (symtab_node) node, nodes, varpool);
1324 count--;
1325 }
1326 }
1327 while (true)
1328 {
1329 struct varpool_node *node;
1330 count = streamer_read_uhwi (ib);
1331 if (!count)
1332 break;
1333 node = VEC_index (varpool_node_ptr, varpool,
1334 streamer_read_uhwi (ib));
1335 while (count)
1336 {
1337 input_ref (ib, (symtab_node) node, nodes, varpool);
1338 count--;
1339 }
1340 }
1341 }
1342
1343
1344 static struct gcov_ctr_summary lto_gcov_summary;
1345
1346 /* Input profile_info from IB. */
1347 static void
1348 input_profile_summary (struct lto_input_block *ib,
1349 struct lto_file_decl_data *file_data)
1350 {
1351 unsigned int runs = streamer_read_uhwi (ib);
1352 if (runs)
1353 {
1354 file_data->profile_info.runs = runs;
1355 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1356 }
1357
1358 }
1359
1360 /* Rescale profile summaries to the same number of runs in the whole unit. */
1361
1362 static void
1363 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1364 {
1365 struct lto_file_decl_data *file_data;
1366 unsigned int j;
1367 gcov_unsigned_t max_runs = 0;
1368 struct cgraph_node *node;
1369 struct cgraph_edge *edge;
1370
1371 /* Find unit with maximal number of runs. If we ever get serious about
1372 roundoff errors, we might also consider computing smallest common
1373 multiply. */
1374 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1375 if (max_runs < file_data->profile_info.runs)
1376 max_runs = file_data->profile_info.runs;
1377
1378 if (!max_runs)
1379 return;
1380
1381 /* Simple overflow check. We probably don't need to support that many train
1382 runs. Such a large value probably imply data corruption anyway. */
1383 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1384 {
1385 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1386 INT_MAX / REG_BR_PROB_BASE);
1387 return;
1388 }
1389
1390 profile_info = &lto_gcov_summary;
1391 lto_gcov_summary.runs = max_runs;
1392 lto_gcov_summary.sum_max = 0;
1393
1394 /* Rescale all units to the maximal number of runs.
1395 sum_max can not be easily merged, as we have no idea what files come from
1396 the same run. We do not use the info anyway, so leave it 0. */
1397 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1398 if (file_data->profile_info.runs)
1399 {
1400 int scale = ((REG_BR_PROB_BASE * max_runs
1401 + file_data->profile_info.runs / 2)
1402 / file_data->profile_info.runs);
1403 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1404 (file_data->profile_info.sum_max
1405 * scale
1406 + REG_BR_PROB_BASE / 2)
1407 / REG_BR_PROB_BASE);
1408 }
1409
1410 /* Watch roundoff errors. */
1411 if (lto_gcov_summary.sum_max < max_runs)
1412 lto_gcov_summary.sum_max = max_runs;
1413
1414 /* If merging already happent at WPA time, we are done. */
1415 if (flag_ltrans)
1416 return;
1417
1418 /* Now compute count_materialization_scale of each node.
1419 During LTRANS we already have values of count_materialization_scale
1420 computed, so just update them. */
1421 FOR_EACH_FUNCTION (node)
1422 if (node->symbol.lto_file_data
1423 && node->symbol.lto_file_data->profile_info.runs)
1424 {
1425 int scale;
1426
1427 scale =
1428 ((node->count_materialization_scale * max_runs
1429 + node->symbol.lto_file_data->profile_info.runs / 2)
1430 / node->symbol.lto_file_data->profile_info.runs);
1431 node->count_materialization_scale = scale;
1432 if (scale < 0)
1433 fatal_error ("Profile information in %s corrupted",
1434 file_data->file_name);
1435
1436 if (scale == REG_BR_PROB_BASE)
1437 continue;
1438 for (edge = node->callees; edge; edge = edge->next_callee)
1439 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1440 / REG_BR_PROB_BASE);
1441 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1442 / REG_BR_PROB_BASE);
1443 }
1444 }
1445
1446 /* Input and merge the cgraph from each of the .o files passed to
1447 lto1. */
1448
1449 void
1450 input_cgraph (void)
1451 {
1452 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1453 struct lto_file_decl_data *file_data;
1454 unsigned int j = 0;
1455 struct cgraph_node *node;
1456
1457 cgraph_state = CGRAPH_STATE_IPA_SSA;
1458
1459 while ((file_data = file_data_vec[j++]))
1460 {
1461 const char *data;
1462 size_t len;
1463 struct lto_input_block *ib;
1464 VEC(cgraph_node_ptr, heap) *nodes;
1465 VEC(varpool_node_ptr, heap) *varpool;
1466
1467 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1468 &data, &len);
1469 if (!ib)
1470 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1471 input_profile_summary (ib, file_data);
1472 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1473 nodes = input_cgraph_1 (file_data, ib);
1474 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1475 ib, data, len);
1476
1477 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1478 &data, &len);
1479 if (!ib)
1480 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1481 varpool = input_varpool_1 (file_data, ib);
1482 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1483 ib, data, len);
1484
1485 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1486 &data, &len);
1487 if (!ib)
1488 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1489 input_refs (ib, nodes, varpool);
1490 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1491 ib, data, len);
1492 if (flag_ltrans)
1493 input_cgraph_opt_summary (nodes);
1494 VEC_free (cgraph_node_ptr, heap, nodes);
1495 VEC_free (varpool_node_ptr, heap, varpool);
1496 }
1497
1498 merge_profile_summaries (file_data_vec);
1499
1500 /* Clear out the aux field that was used to store enough state to
1501 tell which nodes should be overwritten. */
1502 FOR_EACH_FUNCTION (node)
1503 {
1504 /* Some nodes may have been created by cgraph_node. This
1505 happens when the callgraph contains nested functions. If the
1506 node for the parent function was never emitted to the gimple
1507 file, cgraph_node will create a node for it when setting the
1508 context of the nested function. */
1509 if (node->symbol.lto_file_data)
1510 node->symbol.aux = NULL;
1511 }
1512 }
1513
1514 /* True when we need optimization summary for NODE. */
1515
1516 static int
1517 output_cgraph_opt_summary_p (struct cgraph_node *node,
1518 cgraph_node_set set ATTRIBUTE_UNUSED)
1519 {
1520 return (node->clone_of
1521 && (node->clone.tree_map
1522 || node->clone.args_to_skip
1523 || node->clone.combined_args_to_skip));
1524 }
1525
1526 /* Output optimization summary for EDGE to OB. */
1527 static void
1528 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1529 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1530 {
1531 }
1532
1533 /* Output optimization summary for NODE to OB. */
1534
1535 static void
1536 output_node_opt_summary (struct output_block *ob,
1537 struct cgraph_node *node,
1538 cgraph_node_set set)
1539 {
1540 unsigned int index;
1541 bitmap_iterator bi;
1542 struct ipa_replace_map *map;
1543 struct bitpack_d bp;
1544 int i;
1545 struct cgraph_edge *e;
1546
1547 if (node->clone.args_to_skip)
1548 {
1549 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1550 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1551 streamer_write_uhwi (ob, index);
1552 }
1553 else
1554 streamer_write_uhwi (ob, 0);
1555 if (node->clone.combined_args_to_skip)
1556 {
1557 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1558 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1559 streamer_write_uhwi (ob, index);
1560 }
1561 else
1562 streamer_write_uhwi (ob, 0);
1563 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1564 node->clone.tree_map));
1565 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1566 {
1567 int parm_num;
1568 tree parm;
1569
1570 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1571 parm = DECL_CHAIN (parm), parm_num++)
1572 if (map->old_tree == parm)
1573 break;
1574 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1575 mechanism to store function local declarations into summaries. */
1576 gcc_assert (parm);
1577 streamer_write_uhwi (ob, parm_num);
1578 stream_write_tree (ob, map->new_tree, true);
1579 bp = bitpack_create (ob->main_stream);
1580 bp_pack_value (&bp, map->replace_p, 1);
1581 bp_pack_value (&bp, map->ref_p, 1);
1582 streamer_write_bitpack (&bp);
1583 }
1584
1585 if (cgraph_node_in_set_p (node, set))
1586 {
1587 for (e = node->callees; e; e = e->next_callee)
1588 output_edge_opt_summary (ob, e);
1589 for (e = node->indirect_calls; e; e = e->next_callee)
1590 output_edge_opt_summary (ob, e);
1591 }
1592 }
1593
1594 /* Output optimization summaries stored in callgraph.
1595 At the moment it is the clone info structure. */
1596
1597 static void
1598 output_cgraph_opt_summary (cgraph_node_set set)
1599 {
1600 struct cgraph_node *node;
1601 int i, n_nodes;
1602 lto_cgraph_encoder_t encoder;
1603 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1604 unsigned count = 0;
1605
1606 ob->cgraph_node = NULL;
1607 encoder = ob->decl_state->cgraph_node_encoder;
1608 n_nodes = lto_cgraph_encoder_size (encoder);
1609 for (i = 0; i < n_nodes; i++)
1610 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1611 set))
1612 count++;
1613 streamer_write_uhwi (ob, count);
1614 for (i = 0; i < n_nodes; i++)
1615 {
1616 node = lto_cgraph_encoder_deref (encoder, i);
1617 if (output_cgraph_opt_summary_p (node, set))
1618 {
1619 streamer_write_uhwi (ob, i);
1620 output_node_opt_summary (ob, node, set);
1621 }
1622 }
1623 produce_asm (ob, NULL);
1624 destroy_output_block (ob);
1625 }
1626
1627 /* Input optimisation summary of EDGE. */
1628
1629 static void
1630 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1631 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1632 {
1633 }
1634
1635 /* Input optimisation summary of NODE. */
1636
1637 static void
1638 input_node_opt_summary (struct cgraph_node *node,
1639 struct lto_input_block *ib_main,
1640 struct data_in *data_in)
1641 {
1642 int i;
1643 int count;
1644 int bit;
1645 struct bitpack_d bp;
1646 struct cgraph_edge *e;
1647
1648 count = streamer_read_uhwi (ib_main);
1649 if (count)
1650 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1651 for (i = 0; i < count; i++)
1652 {
1653 bit = streamer_read_uhwi (ib_main);
1654 bitmap_set_bit (node->clone.args_to_skip, bit);
1655 }
1656 count = streamer_read_uhwi (ib_main);
1657 if (count)
1658 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1659 for (i = 0; i < count; i++)
1660 {
1661 bit = streamer_read_uhwi (ib_main);
1662 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1663 }
1664 count = streamer_read_uhwi (ib_main);
1665 for (i = 0; i < count; i++)
1666 {
1667 int parm_num;
1668 tree parm;
1669 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1670
1671 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1672 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm_num;
1673 parm = DECL_CHAIN (parm))
1674 parm_num --;
1675 map->parm_num = streamer_read_uhwi (ib_main);
1676 map->old_tree = NULL;
1677 map->new_tree = stream_read_tree (ib_main, data_in);
1678 bp = streamer_read_bitpack (ib_main);
1679 map->replace_p = bp_unpack_value (&bp, 1);
1680 map->ref_p = bp_unpack_value (&bp, 1);
1681 }
1682 for (e = node->callees; e; e = e->next_callee)
1683 input_edge_opt_summary (e, ib_main);
1684 for (e = node->indirect_calls; e; e = e->next_callee)
1685 input_edge_opt_summary (e, ib_main);
1686 }
1687
1688 /* Read section in file FILE_DATA of length LEN with data DATA. */
1689
1690 static void
1691 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1692 const char *data, size_t len, VEC (cgraph_node_ptr,
1693 heap) * nodes)
1694 {
1695 const struct lto_function_header *header =
1696 (const struct lto_function_header *) data;
1697 const int cfg_offset = sizeof (struct lto_function_header);
1698 const int main_offset = cfg_offset + header->cfg_size;
1699 const int string_offset = main_offset + header->main_size;
1700 struct data_in *data_in;
1701 struct lto_input_block ib_main;
1702 unsigned int i;
1703 unsigned int count;
1704
1705 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1706 header->main_size);
1707
1708 data_in =
1709 lto_data_in_create (file_data, (const char *) data + string_offset,
1710 header->string_size, NULL);
1711 count = streamer_read_uhwi (&ib_main);
1712
1713 for (i = 0; i < count; i++)
1714 {
1715 int ref = streamer_read_uhwi (&ib_main);
1716 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1717 &ib_main, data_in);
1718 }
1719 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1720 len);
1721 lto_data_in_delete (data_in);
1722 }
1723
1724 /* Input optimization summary of cgraph. */
1725
1726 static void
1727 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1728 {
1729 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1730 struct lto_file_decl_data *file_data;
1731 unsigned int j = 0;
1732
1733 while ((file_data = file_data_vec[j++]))
1734 {
1735 size_t len;
1736 const char *data =
1737 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1738 &len);
1739
1740 if (data)
1741 input_cgraph_opt_section (file_data, data, len, nodes);
1742 }
1743 }