lto-symtab.c (lto_cgraph_replace_node): Merge needed instead of force flags.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_cgraph_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_cgraph_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_cgraph_analyzed_node,
70 /* Cgraph edges. */
71 LTO_cgraph_edge,
72 LTO_cgraph_indirect_edge,
73 LTO_cgraph_last_tag
74 };
75
76 /* Create a new cgraph encoder. */
77
78 lto_cgraph_encoder_t
79 lto_cgraph_encoder_new (void)
80 {
81 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
82 encoder->map = pointer_map_create ();
83 encoder->nodes = NULL;
84 encoder->body = pointer_set_create ();
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
93 {
94 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
95 pointer_map_destroy (encoder->map);
96 pointer_set_destroy (encoder->body);
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the cgraph encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
107 struct cgraph_node *node)
108 {
109 int ref;
110 void **slot;
111
112 slot = pointer_map_contains (encoder->map, node);
113 if (!slot)
114 {
115 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
116 slot = pointer_map_insert (encoder->map, node);
117 *slot = (void *) (intptr_t) ref;
118 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
119 }
120 else
121 ref = (int) (intptr_t) *slot;
122
123 return ref;
124 }
125
126 #define LCC_NOT_FOUND (-1)
127
128 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
129 or LCC_NOT_FOUND if it is not there. */
130
131 int
132 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
133 struct cgraph_node *node)
134 {
135 void **slot = pointer_map_contains (encoder->map, node);
136 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 }
138
139
140 /* Return the cgraph node corresponding to REF using ENCODER. */
141
142 struct cgraph_node *
143 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
144 {
145 if (ref == LCC_NOT_FOUND)
146 return NULL;
147
148 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 }
150
151
152 /* Return TRUE if we should encode initializer of NODE (if any). */
153
154 bool
155 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
156 struct cgraph_node *node)
157 {
158 return pointer_set_contains (encoder->body, node);
159 }
160
161 /* Return TRUE if we should encode body of NODE (if any). */
162
163 static void
164 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
165 struct cgraph_node *node)
166 {
167 pointer_set_insert (encoder->body, node);
168 }
169
170 /* Create a new varpool encoder. */
171
172 lto_varpool_encoder_t
173 lto_varpool_encoder_new (void)
174 {
175 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
176 encoder->map = pointer_map_create ();
177 encoder->initializer = pointer_set_create ();
178 encoder->nodes = NULL;
179 return encoder;
180 }
181
182
183 /* Delete ENCODER and its components. */
184
185 void
186 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
187 {
188 VEC_free (varpool_node_ptr, heap, encoder->nodes);
189 pointer_map_destroy (encoder->map);
190 pointer_set_destroy (encoder->initializer);
191 free (encoder);
192 }
193
194
195 /* Return the existing reference number of NODE in the varpool encoder in
196 output block OB. Assign a new reference if this is the first time
197 NODE is encoded. */
198
199 int
200 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
201 struct varpool_node *node)
202 {
203 int ref;
204 void **slot;
205
206 slot = pointer_map_contains (encoder->map, node);
207 if (!slot)
208 {
209 ref = VEC_length (varpool_node_ptr, encoder->nodes);
210 slot = pointer_map_insert (encoder->map, node);
211 *slot = (void *) (intptr_t) ref;
212 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
213 }
214 else
215 ref = (int) (intptr_t) *slot;
216
217 return ref;
218 }
219
220 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
221 or LCC_NOT_FOUND if it is not there. */
222
223 int
224 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
225 struct varpool_node *node)
226 {
227 void **slot = pointer_map_contains (encoder->map, node);
228 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 }
230
231
232 /* Return the varpool node corresponding to REF using ENCODER. */
233
234 struct varpool_node *
235 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
236 {
237 if (ref == LCC_NOT_FOUND)
238 return NULL;
239
240 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 }
242
243
244 /* Return TRUE if we should encode initializer of NODE (if any). */
245
246 bool
247 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
248 struct varpool_node *node)
249 {
250 return pointer_set_contains (encoder->initializer, node);
251 }
252
253 /* Return TRUE if we should encode initializer of NODE (if any). */
254
255 static void
256 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
257 struct varpool_node *node)
258 {
259 pointer_set_insert (encoder->initializer, node);
260 }
261
262 /* Output the cgraph EDGE to OB using ENCODER. */
263
264 static void
265 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
266 lto_cgraph_encoder_t encoder)
267 {
268 unsigned int uid;
269 intptr_t ref;
270 struct bitpack_d bp;
271
272 if (edge->indirect_unknown_callee)
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_indirect_edge);
275 else
276 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
277 LTO_cgraph_edge);
278
279 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
280 gcc_assert (ref != LCC_NOT_FOUND);
281 streamer_write_hwi_stream (ob->main_stream, ref);
282
283 if (!edge->indirect_unknown_callee)
284 {
285 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
286 gcc_assert (ref != LCC_NOT_FOUND);
287 streamer_write_hwi_stream (ob->main_stream, ref);
288 }
289
290 streamer_write_hwi_stream (ob->main_stream, edge->count);
291
292 bp = bitpack_create (ob->main_stream);
293 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
294 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
295 bp_pack_enum (&bp, cgraph_inline_failed_enum,
296 CIF_N_REASONS, edge->inline_failed);
297 bp_pack_var_len_unsigned (&bp, uid);
298 bp_pack_var_len_unsigned (&bp, edge->frequency);
299 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 if (edge->indirect_unknown_callee)
303 {
304 int flags = edge->indirect_info->ecf_flags;
305 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
311 /* Flags that should not appear on indirect calls. */
312 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
313 | ECF_MAY_BE_ALLOCA
314 | ECF_SIBCALL
315 | ECF_LEAF
316 | ECF_NOVOPS)));
317 }
318 streamer_write_bitpack (&bp);
319 }
320
321 /* Return if LIST contain references from other partitions. */
322
323 bool
324 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
325 varpool_node_set vset)
326 {
327 int i;
328 struct ipa_ref *ref;
329 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
330 {
331 if (symtab_function_p (ref->referring))
332 {
333 if (ipa_ref_referring_node (ref)->symbol.in_other_partition
334 || !cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
335 return true;
336 }
337 else
338 {
339 if (ipa_ref_referring_varpool_node (ref)->symbol.in_other_partition
340 || !varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
341 vset))
342 return true;
343 }
344 }
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
352 {
353 struct cgraph_edge *e;
354 if (!node->analyzed)
355 return false;
356 if (node->global.inlined_to)
357 return false;
358 for (e = node->callers; e; e = e->next_caller)
359 if (e->caller->symbol.in_other_partition
360 || !cgraph_node_in_set_p (e->caller, set))
361 return true;
362 return false;
363 }
364
365 /* Return if LIST contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
369 varpool_node_set vset)
370 {
371 int i;
372 struct ipa_ref *ref;
373 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
374 {
375 if (symtab_function_p (ref->referring))
376 {
377 if (cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
378 return true;
379 }
380 else
381 {
382 if (varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
383 vset))
384 return true;
385 }
386 }
387 return false;
388 }
389
390 /* Return true when node is reachable from other partition. */
391
392 bool
393 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
394 {
395 struct cgraph_edge *e;
396 for (e = node->callers; e; e = e->next_caller)
397 if (cgraph_node_in_set_p (e->caller, set))
398 return true;
399 return false;
400 }
401
402 /* Output the cgraph NODE to OB. ENCODER is used to find the
403 reference number of NODE->inlined_to. SET is the set of nodes we
404 are writing to the current file. If NODE is not in SET, then NODE
405 is a boundary of a cgraph_node_set and we pretend NODE just has a
406 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
407 that have had their callgraph node written so far. This is used to
408 determine if NODE is a clone of a previously written node. */
409
410 static void
411 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
412 lto_cgraph_encoder_t encoder, cgraph_node_set set,
413 varpool_node_set vset)
414 {
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of;
421
422 boundary_p = !cgraph_node_in_set_p (node, set);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_cgraph_analyzed_node;
426 else
427 tag = LTO_cgraph_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed)
444 {
445 /* Inline clones can not be part of boundary.
446 gcc_assert (!node->global.inlined_to);
447
448 FIXME: At the moment they can be, when partition contains an inline
449 clone that is clone of inline clone from outside partition. We can
450 reshape the clone tree and make other tree to be the root, but it
451 needs a bit extra work and will be promplty done by cgraph_remove_node
452 after reading back. */
453 in_other_partition = 1;
454 }
455
456 clone_of = node->clone_of;
457 while (clone_of
458 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
459 if (clone_of->prev_sibling_clone)
460 clone_of = clone_of->prev_sibling_clone;
461 else
462 clone_of = clone_of->clone_of;
463
464 if (LTO_cgraph_analyzed_node)
465 gcc_assert (clone_of || !node->clone_of);
466 if (!clone_of)
467 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
468 else
469 streamer_write_hwi_stream (ob->main_stream, ref);
470
471
472 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
473 streamer_write_hwi_stream (ob->main_stream, node->count);
474 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
475
476 if (tag == LTO_cgraph_analyzed_node)
477 {
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 streamer_write_hwi_stream (ob->main_stream, ref);
487 }
488
489 if (node->symbol.same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder,
492 cgraph (node->symbol.same_comdat_group));
493 gcc_assert (ref != LCC_NOT_FOUND);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 streamer_write_hwi_stream (ob->main_stream, ref);
498
499 bp = bitpack_create (ob->main_stream);
500 bp_pack_value (&bp, node->local.local, 1);
501 bp_pack_value (&bp, node->symbol.externally_visible, 1);
502 bp_pack_value (&bp, node->local.finalized, 1);
503 bp_pack_value (&bp, node->local.versionable, 1);
504 bp_pack_value (&bp, node->local.can_change_signature, 1);
505 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
506 bp_pack_value (&bp, node->symbol.force_output, 1);
507 bp_pack_value (&bp, node->symbol.address_taken, 1);
508 bp_pack_value (&bp, node->abstract_and_needed, 1);
509 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
510 && !DECL_EXTERNAL (node->symbol.decl)
511 && !DECL_COMDAT (node->symbol.decl)
512 && (reachable_from_other_partition_p (node, set)
513 || referenced_from_other_partition_p (&node->symbol.ref_list,
514 set, vset)), 1);
515 bp_pack_value (&bp, node->lowered, 1);
516 bp_pack_value (&bp, in_other_partition, 1);
517 /* Real aliases in a boundary become non-aliases. However we still stream
518 alias info on weakrefs.
519 TODO: We lose a bit of information here - when we know that variable is
520 defined in other unit, we may use the info on aliases to resolve
521 symbol1 != symbol2 type tests that we can do only for locally defined objects
522 otherwise. */
523 bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl)), 1);
524 bp_pack_value (&bp, node->frequency, 2);
525 bp_pack_value (&bp, node->only_called_at_startup, 1);
526 bp_pack_value (&bp, node->only_called_at_exit, 1);
527 bp_pack_value (&bp, node->tm_clone, 1);
528 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
529 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
530 LDPR_NUM_KNOWN, node->symbol.resolution);
531 streamer_write_bitpack (&bp);
532
533 if (node->thunk.thunk_p && !boundary_p)
534 {
535 streamer_write_uhwi_stream
536 (ob->main_stream,
537 1 + (node->thunk.this_adjusting != 0) * 2
538 + (node->thunk.virtual_offset_p != 0) * 4);
539 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
540 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
541 }
542 if ((node->alias || node->thunk.thunk_p)
543 && (!boundary_p || (node->alias && DECL_EXTERNAL (node->symbol.decl))))
544 {
545 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
546 node->thunk.alias != NULL);
547 if (node->thunk.alias != NULL)
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
549 node->thunk.alias);
550 }
551 }
552
553 /* Output the varpool NODE to OB.
554 If NODE is not in SET, then NODE is a boundary. */
555
556 static void
557 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
558 lto_varpool_encoder_t varpool_encoder,
559 cgraph_node_set set, varpool_node_set vset)
560 {
561 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
562 struct bitpack_d bp;
563 int ref;
564
565 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
566 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
567 bp = bitpack_create (ob->main_stream);
568 bp_pack_value (&bp, node->symbol.externally_visible, 1);
569 bp_pack_value (&bp, node->symbol.force_output, 1);
570 bp_pack_value (&bp, node->finalized, 1);
571 bp_pack_value (&bp, node->alias, 1);
572 bp_pack_value (&bp, node->alias_of != NULL, 1);
573 gcc_assert (node->finalized || !node->analyzed);
574 gcc_assert (node->needed);
575 /* Constant pool initializers can be de-unified into individual ltrans units.
576 FIXME: Alternatively at -Os we may want to avoid generating for them the local
577 labels and share them across LTRANS partitions. */
578 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
579 && !DECL_COMDAT (node->symbol.decl))
580 {
581 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
582 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
583 }
584 else
585 {
586 bp_pack_value (&bp, node->analyzed
587 && referenced_from_other_partition_p (&node->symbol.ref_list,
588 set, vset), 1);
589 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
590 }
591 streamer_write_bitpack (&bp);
592 if (node->alias_of)
593 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
594 if (node->symbol.same_comdat_group && !boundary_p)
595 {
596 ref = lto_varpool_encoder_lookup (varpool_encoder,
597 varpool (node->symbol.same_comdat_group));
598 gcc_assert (ref != LCC_NOT_FOUND);
599 }
600 else
601 ref = LCC_NOT_FOUND;
602 streamer_write_hwi_stream (ob->main_stream, ref);
603 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
604 LDPR_NUM_KNOWN, node->symbol.resolution);
605 }
606
607 /* Output the varpool NODE to OB.
608 If NODE is not in SET, then NODE is a boundary. */
609
610 static void
611 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
612 lto_cgraph_encoder_t encoder,
613 lto_varpool_encoder_t varpool_encoder)
614 {
615 struct bitpack_d bp;
616 bp = bitpack_create (ob->main_stream);
617 bp_pack_value (&bp, symtab_function_p (ref->referred), 1);
618 bp_pack_value (&bp, ref->use, 2);
619 streamer_write_bitpack (&bp);
620 if (symtab_function_p (ref->referred))
621 {
622 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
623 gcc_assert (nref != LCC_NOT_FOUND);
624 streamer_write_hwi_stream (ob->main_stream, nref);
625 }
626 else
627 {
628 int nref = lto_varpool_encoder_lookup (varpool_encoder,
629 ipa_ref_varpool_node (ref));
630 gcc_assert (nref != LCC_NOT_FOUND);
631 streamer_write_hwi_stream (ob->main_stream, nref);
632 }
633 }
634
635 /* Stream out profile_summary to OB. */
636
637 static void
638 output_profile_summary (struct lto_simple_output_block *ob)
639 {
640 if (profile_info)
641 {
642 /* We do not output num, sum_all and run_max, they are not used by
643 GCC profile feedback and they are difficult to merge from multiple
644 units. */
645 gcc_assert (profile_info->runs);
646 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
647 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
648 }
649 else
650 streamer_write_uhwi_stream (ob->main_stream, 0);
651 }
652
653 /* Add NODE into encoder as well as nodes it is cloned from.
654 Do it in a way so clones appear first. */
655
656 static void
657 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
658 bool include_body)
659 {
660 if (node->clone_of)
661 add_node_to (encoder, node->clone_of, include_body);
662 else if (include_body)
663 lto_set_cgraph_encoder_encode_body (encoder, node);
664 lto_cgraph_encoder_encode (encoder, node);
665 }
666
667 /* Add all references in LIST to encoders. */
668
669 static void
670 add_references (lto_cgraph_encoder_t encoder,
671 lto_varpool_encoder_t varpool_encoder,
672 struct ipa_ref_list *list)
673 {
674 int i;
675 struct ipa_ref *ref;
676 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
677 if (symtab_function_p (ref->referred))
678 add_node_to (encoder, ipa_ref_node (ref), false);
679 else
680 {
681 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
682 lto_varpool_encoder_encode (varpool_encoder, vnode);
683 }
684 }
685
686 /* Output all callees or indirect outgoing edges. EDGE must be the first such
687 edge. */
688
689 static void
690 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
691 struct lto_simple_output_block *ob,
692 lto_cgraph_encoder_t encoder)
693 {
694 if (!edge)
695 return;
696
697 /* Output edges in backward direction, so the reconstructed callgraph match
698 and it is easy to associate call sites in the IPA pass summaries. */
699 while (edge->next_callee)
700 edge = edge->next_callee;
701 for (; edge; edge = edge->prev_callee)
702 lto_output_edge (ob, edge, encoder);
703 }
704
705 /* Output the part of the cgraph in SET. */
706
707 static void
708 output_refs (cgraph_node_set set, varpool_node_set vset,
709 lto_cgraph_encoder_t encoder,
710 lto_varpool_encoder_t varpool_encoder)
711 {
712 cgraph_node_set_iterator csi;
713 varpool_node_set_iterator vsi;
714 struct lto_simple_output_block *ob;
715 int count;
716 struct ipa_ref *ref;
717 int i;
718
719 ob = lto_create_simple_output_block (LTO_section_refs);
720
721 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
722 {
723 struct cgraph_node *node = csi_node (csi);
724
725 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
726 if (count)
727 {
728 streamer_write_uhwi_stream (ob->main_stream, count);
729 streamer_write_uhwi_stream (ob->main_stream,
730 lto_cgraph_encoder_lookup (encoder, node));
731 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
732 i, ref); i++)
733 lto_output_ref (ob, ref, encoder, varpool_encoder);
734 }
735 }
736
737 streamer_write_uhwi_stream (ob->main_stream, 0);
738
739 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
740 {
741 struct varpool_node *node = vsi_node (vsi);
742
743 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
744 if (count)
745 {
746 streamer_write_uhwi_stream (ob->main_stream, count);
747 streamer_write_uhwi_stream (ob->main_stream,
748 lto_varpool_encoder_lookup (varpool_encoder,
749 node));
750 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
751 i, ref); i++)
752 lto_output_ref (ob, ref, encoder, varpool_encoder);
753 }
754 }
755
756 streamer_write_uhwi_stream (ob->main_stream, 0);
757
758 lto_destroy_simple_output_block (ob);
759 }
760
761 /* Find out all cgraph and varpool nodes we want to encode in current unit
762 and insert them to encoders. */
763 void
764 compute_ltrans_boundary (struct lto_out_decl_state *state,
765 cgraph_node_set set, varpool_node_set vset)
766 {
767 struct cgraph_node *node;
768 cgraph_node_set_iterator csi;
769 varpool_node_set_iterator vsi;
770 struct cgraph_edge *edge;
771 int i;
772 lto_cgraph_encoder_t encoder;
773 lto_varpool_encoder_t varpool_encoder;
774
775 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
776 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
777
778 /* Go over all the nodes in SET and assign references. */
779 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
780 {
781 node = csi_node (csi);
782 add_node_to (encoder, node, true);
783 add_references (encoder, varpool_encoder, &node->symbol.ref_list);
784 }
785 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
786 {
787 struct varpool_node *vnode = vsi_node (vsi);
788 gcc_assert (!vnode->alias || vnode->alias_of);
789 lto_varpool_encoder_encode (varpool_encoder, vnode);
790 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
791 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
792 }
793 /* Pickle in also the initializer of all referenced readonly variables
794 to help folding. Constant pool variables are not shared, so we must
795 pickle those too. */
796 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
797 {
798 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
799 if (DECL_INITIAL (vnode->symbol.decl)
800 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
801 vnode)
802 && const_value_known_p (vnode->symbol.decl))
803 {
804 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
805 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
806 }
807 else if (vnode->alias || vnode->alias_of)
808 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
809 }
810
811 /* Go over all the nodes again to include callees that are not in
812 SET. */
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
814 {
815 node = csi_node (csi);
816 for (edge = node->callees; edge; edge = edge->next_callee)
817 {
818 struct cgraph_node *callee = edge->callee;
819 if (!cgraph_node_in_set_p (callee, set))
820 {
821 /* We should have moved all the inlines. */
822 gcc_assert (!callee->global.inlined_to);
823 add_node_to (encoder, callee, false);
824 }
825 }
826 }
827 }
828
829 /* Output the part of the cgraph in SET. */
830
831 void
832 output_cgraph (cgraph_node_set set, varpool_node_set vset)
833 {
834 struct cgraph_node *node;
835 struct lto_simple_output_block *ob;
836 cgraph_node_set_iterator csi;
837 int i, n_nodes;
838 lto_cgraph_encoder_t encoder;
839 lto_varpool_encoder_t varpool_encoder;
840 static bool asm_nodes_output = false;
841
842 if (flag_wpa)
843 output_cgraph_opt_summary (set);
844
845 ob = lto_create_simple_output_block (LTO_section_cgraph);
846
847 output_profile_summary (ob);
848
849 /* An encoder for cgraph nodes should have been created by
850 ipa_write_summaries_1. */
851 gcc_assert (ob->decl_state->cgraph_node_encoder);
852 gcc_assert (ob->decl_state->varpool_node_encoder);
853 encoder = ob->decl_state->cgraph_node_encoder;
854 varpool_encoder = ob->decl_state->varpool_node_encoder;
855
856 /* Write out the nodes. We must first output a node and then its clones,
857 otherwise at a time reading back the node there would be nothing to clone
858 from. */
859 n_nodes = lto_cgraph_encoder_size (encoder);
860 for (i = 0; i < n_nodes; i++)
861 {
862 node = lto_cgraph_encoder_deref (encoder, i);
863 lto_output_node (ob, node, encoder, set, vset);
864 }
865
866 /* Go over the nodes in SET again to write edges. */
867 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
868 {
869 node = csi_node (csi);
870 output_outgoing_cgraph_edges (node->callees, ob, encoder);
871 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
872 }
873
874 streamer_write_uhwi_stream (ob->main_stream, 0);
875
876 lto_destroy_simple_output_block (ob);
877
878 /* Emit toplevel asms.
879 When doing WPA we must output every asm just once. Since we do not partition asm
880 nodes at all, output them to first output. This is kind of hack, but should work
881 well. */
882 if (!asm_nodes_output)
883 {
884 asm_nodes_output = true;
885 lto_output_toplevel_asms ();
886 }
887
888 output_varpool (set, vset);
889 output_refs (set, vset, encoder, varpool_encoder);
890 }
891
892 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
893 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
894 NODE or to replace the values in it, for instance because the first
895 time we saw it, the function body was not available but now it
896 is. BP is a bitpack with all the bitflags for NODE read from the
897 stream. */
898
899 static void
900 input_overwrite_node (struct lto_file_decl_data *file_data,
901 struct cgraph_node *node,
902 enum LTO_cgraph_tags tag,
903 struct bitpack_d *bp)
904 {
905 node->symbol.aux = (void *) tag;
906 node->symbol.lto_file_data = file_data;
907
908 node->local.local = bp_unpack_value (bp, 1);
909 node->symbol.externally_visible = bp_unpack_value (bp, 1);
910 node->local.finalized = bp_unpack_value (bp, 1);
911 node->local.versionable = bp_unpack_value (bp, 1);
912 node->local.can_change_signature = bp_unpack_value (bp, 1);
913 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
914 node->symbol.force_output = bp_unpack_value (bp, 1);
915 node->symbol.address_taken = bp_unpack_value (bp, 1);
916 node->abstract_and_needed = bp_unpack_value (bp, 1);
917 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
918 node->lowered = bp_unpack_value (bp, 1);
919 node->analyzed = tag == LTO_cgraph_analyzed_node;
920 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
921 if (node->symbol.in_other_partition
922 /* Avoid updating decl when we are seeing just inline clone.
923 When inlining function that has functions already inlined into it,
924 we produce clones of inline clones.
925
926 WPA partitioning might put each clone into different unit and
927 we might end up streaming inline clone from other partition
928 to support clone we are interested in. */
929 && (!node->clone_of
930 || node->clone_of->symbol.decl != node->symbol.decl))
931 {
932 DECL_EXTERNAL (node->symbol.decl) = 1;
933 TREE_STATIC (node->symbol.decl) = 0;
934 }
935 node->alias = bp_unpack_value (bp, 1);
936 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
937 node->only_called_at_startup = bp_unpack_value (bp, 1);
938 node->only_called_at_exit = bp_unpack_value (bp, 1);
939 node->tm_clone = bp_unpack_value (bp, 1);
940 node->thunk.thunk_p = bp_unpack_value (bp, 1);
941 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
942 LDPR_NUM_KNOWN);
943 }
944
945 /* Output the part of the cgraph in SET. */
946
947 static void
948 output_varpool (cgraph_node_set set, varpool_node_set vset)
949 {
950 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
951 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
952 int len = lto_varpool_encoder_size (varpool_encoder), i;
953
954 streamer_write_uhwi_stream (ob->main_stream, len);
955
956 /* Write out the nodes. We must first output a node and then its clones,
957 otherwise at a time reading back the node there would be nothing to clone
958 from. */
959 for (i = 0; i < len; i++)
960 {
961 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
962 varpool_encoder,
963 set, vset);
964 }
965
966 lto_destroy_simple_output_block (ob);
967 }
968
969 /* Read a node from input_block IB. TAG is the node's tag just read.
970 Return the node read or overwriten. */
971
972 static struct cgraph_node *
973 input_node (struct lto_file_decl_data *file_data,
974 struct lto_input_block *ib,
975 enum LTO_cgraph_tags tag,
976 VEC(cgraph_node_ptr, heap) *nodes)
977 {
978 tree fn_decl;
979 struct cgraph_node *node;
980 struct bitpack_d bp;
981 unsigned decl_index;
982 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
983 int clone_ref;
984 int order;
985
986 order = streamer_read_hwi (ib) + order_base;
987 clone_ref = streamer_read_hwi (ib);
988
989 decl_index = streamer_read_uhwi (ib);
990 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
991
992 if (clone_ref != LCC_NOT_FOUND)
993 {
994 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
995 0, CGRAPH_FREQ_BASE, false, NULL, false);
996 }
997 else
998 node = cgraph_get_create_node (fn_decl);
999
1000 node->symbol.order = order;
1001 if (order >= symtab_order)
1002 symtab_order = order + 1;
1003
1004 node->count = streamer_read_hwi (ib);
1005 node->count_materialization_scale = streamer_read_hwi (ib);
1006
1007 if (tag == LTO_cgraph_analyzed_node)
1008 ref = streamer_read_hwi (ib);
1009
1010 ref2 = streamer_read_hwi (ib);
1011
1012 /* Make sure that we have not read this node before. Nodes that
1013 have already been read will have their tag stored in the 'aux'
1014 field. Since built-in functions can be referenced in multiple
1015 functions, they are expected to be read more than once. */
1016 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1017 internal_error ("bytecode stream: found multiple instances of cgraph "
1018 "node %d", node->uid);
1019
1020 bp = streamer_read_bitpack (ib);
1021 input_overwrite_node (file_data, node, tag, &bp);
1022
1023 /* Store a reference for now, and fix up later to be a pointer. */
1024 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1025
1026 /* Store a reference for now, and fix up later to be a pointer. */
1027 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1028
1029 if (node->thunk.thunk_p)
1030 {
1031 int type = streamer_read_uhwi (ib);
1032 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1033 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1034
1035 node->thunk.fixed_offset = fixed_offset;
1036 node->thunk.this_adjusting = (type & 2);
1037 node->thunk.virtual_value = virtual_value;
1038 node->thunk.virtual_offset_p = (type & 4);
1039 }
1040 if (node->thunk.thunk_p || node->alias)
1041 {
1042 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1043 {
1044 decl_index = streamer_read_uhwi (ib);
1045 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1046 decl_index);
1047 }
1048 }
1049 return node;
1050 }
1051
1052 /* Read a node from input_block IB. TAG is the node's tag just read.
1053 Return the node read or overwriten. */
1054
1055 static struct varpool_node *
1056 input_varpool_node (struct lto_file_decl_data *file_data,
1057 struct lto_input_block *ib)
1058 {
1059 int decl_index;
1060 tree var_decl;
1061 struct varpool_node *node;
1062 struct bitpack_d bp;
1063 int ref = LCC_NOT_FOUND;
1064 bool non_null_aliasof;
1065 int order;
1066
1067 order = streamer_read_hwi (ib) + order_base;
1068 decl_index = streamer_read_uhwi (ib);
1069 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1070 node = varpool_node (var_decl);
1071 node->symbol.order = order;
1072 if (order >= symtab_order)
1073 symtab_order = order + 1;
1074 node->symbol.lto_file_data = file_data;
1075
1076 bp = streamer_read_bitpack (ib);
1077 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1078 node->symbol.force_output = bp_unpack_value (&bp, 1);
1079 node->finalized = bp_unpack_value (&bp, 1);
1080 node->alias = bp_unpack_value (&bp, 1);
1081 non_null_aliasof = bp_unpack_value (&bp, 1);
1082 node->analyzed = node->finalized;
1083 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1084 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1085 if (node->symbol.in_other_partition)
1086 {
1087 DECL_EXTERNAL (node->symbol.decl) = 1;
1088 TREE_STATIC (node->symbol.decl) = 0;
1089 }
1090 if (node->finalized)
1091 varpool_mark_needed_node (node);
1092 if (non_null_aliasof)
1093 {
1094 decl_index = streamer_read_uhwi (ib);
1095 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1096 }
1097 ref = streamer_read_hwi (ib);
1098 /* Store a reference for now, and fix up later to be a pointer. */
1099 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1100 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1101 LDPR_NUM_KNOWN);
1102
1103 return node;
1104 }
1105
1106 /* Read a node from input_block IB. TAG is the node's tag just read.
1107 Return the node read or overwriten. */
1108
1109 static void
1110 input_ref (struct lto_input_block *ib,
1111 symtab_node referring_node,
1112 VEC(cgraph_node_ptr, heap) *nodes,
1113 VEC(varpool_node_ptr, heap) *varpool_nodes_vec)
1114 {
1115 struct cgraph_node *node = NULL;
1116 struct varpool_node *varpool_node = NULL;
1117 struct bitpack_d bp;
1118 int type;
1119 enum ipa_ref_use use;
1120
1121 bp = streamer_read_bitpack (ib);
1122 type = bp_unpack_value (&bp, 1);
1123 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1124 if (type)
1125 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1126 else
1127 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes_vec,
1128 streamer_read_hwi (ib));
1129 ipa_record_reference (referring_node,
1130 node ? (symtab_node) node : (symtab_node) varpool_node, use, NULL);
1131 }
1132
1133 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1134 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1135 edge being read is indirect (in the sense that it has
1136 indirect_unknown_callee set). */
1137
1138 static void
1139 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1140 bool indirect)
1141 {
1142 struct cgraph_node *caller, *callee;
1143 struct cgraph_edge *edge;
1144 unsigned int stmt_id;
1145 gcov_type count;
1146 int freq;
1147 cgraph_inline_failed_t inline_failed;
1148 struct bitpack_d bp;
1149 int ecf_flags = 0;
1150
1151 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1152 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1153 internal_error ("bytecode stream: no caller found while reading edge");
1154
1155 if (!indirect)
1156 {
1157 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1158 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1159 internal_error ("bytecode stream: no callee found while reading edge");
1160 }
1161 else
1162 callee = NULL;
1163
1164 count = (gcov_type) streamer_read_hwi (ib);
1165
1166 bp = streamer_read_bitpack (ib);
1167 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1168 stmt_id = bp_unpack_var_len_unsigned (&bp);
1169 freq = (int) bp_unpack_var_len_unsigned (&bp);
1170
1171 if (indirect)
1172 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1173 else
1174 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1175
1176 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1177 edge->lto_stmt_uid = stmt_id;
1178 edge->inline_failed = inline_failed;
1179 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1180 edge->can_throw_external = bp_unpack_value (&bp, 1);
1181 if (indirect)
1182 {
1183 if (bp_unpack_value (&bp, 1))
1184 ecf_flags |= ECF_CONST;
1185 if (bp_unpack_value (&bp, 1))
1186 ecf_flags |= ECF_PURE;
1187 if (bp_unpack_value (&bp, 1))
1188 ecf_flags |= ECF_NORETURN;
1189 if (bp_unpack_value (&bp, 1))
1190 ecf_flags |= ECF_MALLOC;
1191 if (bp_unpack_value (&bp, 1))
1192 ecf_flags |= ECF_NOTHROW;
1193 if (bp_unpack_value (&bp, 1))
1194 ecf_flags |= ECF_RETURNS_TWICE;
1195 edge->indirect_info->ecf_flags = ecf_flags;
1196 }
1197 }
1198
1199
1200 /* Read a cgraph from IB using the info in FILE_DATA. */
1201
1202 static VEC(cgraph_node_ptr, heap) *
1203 input_cgraph_1 (struct lto_file_decl_data *file_data,
1204 struct lto_input_block *ib)
1205 {
1206 enum LTO_cgraph_tags tag;
1207 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1208 struct cgraph_node *node;
1209 unsigned i;
1210
1211 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1212 order_base = symtab_order;
1213 while (tag)
1214 {
1215 if (tag == LTO_cgraph_edge)
1216 input_edge (ib, nodes, false);
1217 else if (tag == LTO_cgraph_indirect_edge)
1218 input_edge (ib, nodes, true);
1219 else
1220 {
1221 node = input_node (file_data, ib, tag,nodes);
1222 if (node == NULL || node->symbol.decl == NULL_TREE)
1223 internal_error ("bytecode stream: found empty cgraph node");
1224 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1225 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1226 }
1227
1228 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1229 }
1230
1231 lto_input_toplevel_asms (file_data, order_base);
1232
1233 /* AUX pointers should be all non-zero for nodes read from the stream. */
1234 #ifdef ENABLE_CHECKING
1235 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1236 gcc_assert (node->symbol.aux);
1237 #endif
1238 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1239 {
1240 int ref = (int) (intptr_t) node->global.inlined_to;
1241
1242 /* We share declaration of builtins, so we may read same node twice. */
1243 if (!node->symbol.aux)
1244 continue;
1245 node->symbol.aux = NULL;
1246
1247 /* Fixup inlined_to from reference to pointer. */
1248 if (ref != LCC_NOT_FOUND)
1249 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1250 else
1251 node->global.inlined_to = NULL;
1252
1253 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1254
1255 /* Fixup same_comdat_group from reference to pointer. */
1256 if (ref != LCC_NOT_FOUND)
1257 node->symbol.same_comdat_group = (symtab_node)VEC_index (cgraph_node_ptr, nodes, ref);
1258 else
1259 node->symbol.same_comdat_group = NULL;
1260 }
1261 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1262 node->symbol.aux = (void *)1;
1263 return nodes;
1264 }
1265
1266 /* Read a varpool from IB using the info in FILE_DATA. */
1267
1268 static VEC(varpool_node_ptr, heap) *
1269 input_varpool_1 (struct lto_file_decl_data *file_data,
1270 struct lto_input_block *ib)
1271 {
1272 unsigned HOST_WIDE_INT len;
1273 VEC(varpool_node_ptr, heap) *varpool = NULL;
1274 int i;
1275 struct varpool_node *node;
1276
1277 len = streamer_read_uhwi (ib);
1278 while (len)
1279 {
1280 VEC_safe_push (varpool_node_ptr, heap, varpool,
1281 input_varpool_node (file_data, ib));
1282 len--;
1283 }
1284 #ifdef ENABLE_CHECKING
1285 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1286 gcc_assert (!node->symbol.aux);
1287 #endif
1288 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1289 {
1290 int ref = (int) (intptr_t) node->symbol.same_comdat_group;
1291 /* We share declaration of builtins, so we may read same node twice. */
1292 if (node->symbol.aux)
1293 continue;
1294 node->symbol.aux = (void *)1;
1295
1296 /* Fixup same_comdat_group from reference to pointer. */
1297 if (ref != LCC_NOT_FOUND)
1298 node->symbol.same_comdat_group = (symtab_node)VEC_index (varpool_node_ptr, varpool, ref);
1299 else
1300 node->symbol.same_comdat_group = NULL;
1301 }
1302 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1303 node->symbol.aux = NULL;
1304 return varpool;
1305 }
1306
1307 /* Input ipa_refs. */
1308
1309 static void
1310 input_refs (struct lto_input_block *ib,
1311 VEC(cgraph_node_ptr, heap) *nodes,
1312 VEC(varpool_node_ptr, heap) *varpool)
1313 {
1314 int count;
1315 int idx;
1316 while (true)
1317 {
1318 struct cgraph_node *node;
1319 count = streamer_read_uhwi (ib);
1320 if (!count)
1321 break;
1322 idx = streamer_read_uhwi (ib);
1323 node = VEC_index (cgraph_node_ptr, nodes, idx);
1324 while (count)
1325 {
1326 input_ref (ib, (symtab_node) node, nodes, varpool);
1327 count--;
1328 }
1329 }
1330 while (true)
1331 {
1332 struct varpool_node *node;
1333 count = streamer_read_uhwi (ib);
1334 if (!count)
1335 break;
1336 node = VEC_index (varpool_node_ptr, varpool,
1337 streamer_read_uhwi (ib));
1338 while (count)
1339 {
1340 input_ref (ib, (symtab_node) node, nodes, varpool);
1341 count--;
1342 }
1343 }
1344 }
1345
1346
1347 static struct gcov_ctr_summary lto_gcov_summary;
1348
1349 /* Input profile_info from IB. */
1350 static void
1351 input_profile_summary (struct lto_input_block *ib,
1352 struct lto_file_decl_data *file_data)
1353 {
1354 unsigned int runs = streamer_read_uhwi (ib);
1355 if (runs)
1356 {
1357 file_data->profile_info.runs = runs;
1358 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1359 }
1360
1361 }
1362
1363 /* Rescale profile summaries to the same number of runs in the whole unit. */
1364
1365 static void
1366 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1367 {
1368 struct lto_file_decl_data *file_data;
1369 unsigned int j;
1370 gcov_unsigned_t max_runs = 0;
1371 struct cgraph_node *node;
1372 struct cgraph_edge *edge;
1373
1374 /* Find unit with maximal number of runs. If we ever get serious about
1375 roundoff errors, we might also consider computing smallest common
1376 multiply. */
1377 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1378 if (max_runs < file_data->profile_info.runs)
1379 max_runs = file_data->profile_info.runs;
1380
1381 if (!max_runs)
1382 return;
1383
1384 /* Simple overflow check. We probably don't need to support that many train
1385 runs. Such a large value probably imply data corruption anyway. */
1386 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1387 {
1388 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1389 INT_MAX / REG_BR_PROB_BASE);
1390 return;
1391 }
1392
1393 profile_info = &lto_gcov_summary;
1394 lto_gcov_summary.runs = max_runs;
1395 lto_gcov_summary.sum_max = 0;
1396
1397 /* Rescale all units to the maximal number of runs.
1398 sum_max can not be easily merged, as we have no idea what files come from
1399 the same run. We do not use the info anyway, so leave it 0. */
1400 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1401 if (file_data->profile_info.runs)
1402 {
1403 int scale = ((REG_BR_PROB_BASE * max_runs
1404 + file_data->profile_info.runs / 2)
1405 / file_data->profile_info.runs);
1406 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1407 (file_data->profile_info.sum_max
1408 * scale
1409 + REG_BR_PROB_BASE / 2)
1410 / REG_BR_PROB_BASE);
1411 }
1412
1413 /* Watch roundoff errors. */
1414 if (lto_gcov_summary.sum_max < max_runs)
1415 lto_gcov_summary.sum_max = max_runs;
1416
1417 /* If merging already happent at WPA time, we are done. */
1418 if (flag_ltrans)
1419 return;
1420
1421 /* Now compute count_materialization_scale of each node.
1422 During LTRANS we already have values of count_materialization_scale
1423 computed, so just update them. */
1424 FOR_EACH_FUNCTION (node)
1425 if (node->symbol.lto_file_data
1426 && node->symbol.lto_file_data->profile_info.runs)
1427 {
1428 int scale;
1429
1430 scale =
1431 ((node->count_materialization_scale * max_runs
1432 + node->symbol.lto_file_data->profile_info.runs / 2)
1433 / node->symbol.lto_file_data->profile_info.runs);
1434 node->count_materialization_scale = scale;
1435 if (scale < 0)
1436 fatal_error ("Profile information in %s corrupted",
1437 file_data->file_name);
1438
1439 if (scale == REG_BR_PROB_BASE)
1440 continue;
1441 for (edge = node->callees; edge; edge = edge->next_callee)
1442 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1443 / REG_BR_PROB_BASE);
1444 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1445 / REG_BR_PROB_BASE);
1446 }
1447 }
1448
1449 /* Input and merge the cgraph from each of the .o files passed to
1450 lto1. */
1451
1452 void
1453 input_cgraph (void)
1454 {
1455 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1456 struct lto_file_decl_data *file_data;
1457 unsigned int j = 0;
1458 struct cgraph_node *node;
1459
1460 while ((file_data = file_data_vec[j++]))
1461 {
1462 const char *data;
1463 size_t len;
1464 struct lto_input_block *ib;
1465 VEC(cgraph_node_ptr, heap) *nodes;
1466 VEC(varpool_node_ptr, heap) *varpool;
1467
1468 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1469 &data, &len);
1470 if (!ib)
1471 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1472 input_profile_summary (ib, file_data);
1473 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1474 nodes = input_cgraph_1 (file_data, ib);
1475 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1476 ib, data, len);
1477
1478 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1479 &data, &len);
1480 if (!ib)
1481 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1482 varpool = input_varpool_1 (file_data, ib);
1483 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1484 ib, data, len);
1485
1486 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1487 &data, &len);
1488 if (!ib)
1489 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1490 input_refs (ib, nodes, varpool);
1491 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1492 ib, data, len);
1493 if (flag_ltrans)
1494 input_cgraph_opt_summary (nodes);
1495 VEC_free (cgraph_node_ptr, heap, nodes);
1496 VEC_free (varpool_node_ptr, heap, varpool);
1497 }
1498
1499 merge_profile_summaries (file_data_vec);
1500
1501 /* Clear out the aux field that was used to store enough state to
1502 tell which nodes should be overwritten. */
1503 FOR_EACH_FUNCTION (node)
1504 {
1505 /* Some nodes may have been created by cgraph_node. This
1506 happens when the callgraph contains nested functions. If the
1507 node for the parent function was never emitted to the gimple
1508 file, cgraph_node will create a node for it when setting the
1509 context of the nested function. */
1510 if (node->symbol.lto_file_data)
1511 node->symbol.aux = NULL;
1512 }
1513 }
1514
1515 /* True when we need optimization summary for NODE. */
1516
1517 static int
1518 output_cgraph_opt_summary_p (struct cgraph_node *node,
1519 cgraph_node_set set ATTRIBUTE_UNUSED)
1520 {
1521 return (node->clone_of
1522 && (node->clone.tree_map
1523 || node->clone.args_to_skip
1524 || node->clone.combined_args_to_skip));
1525 }
1526
1527 /* Output optimization summary for EDGE to OB. */
1528 static void
1529 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1530 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1531 {
1532 }
1533
1534 /* Output optimization summary for NODE to OB. */
1535
1536 static void
1537 output_node_opt_summary (struct output_block *ob,
1538 struct cgraph_node *node,
1539 cgraph_node_set set)
1540 {
1541 unsigned int index;
1542 bitmap_iterator bi;
1543 struct ipa_replace_map *map;
1544 struct bitpack_d bp;
1545 int i;
1546 struct cgraph_edge *e;
1547
1548 if (node->clone.args_to_skip)
1549 {
1550 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1551 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1552 streamer_write_uhwi (ob, index);
1553 }
1554 else
1555 streamer_write_uhwi (ob, 0);
1556 if (node->clone.combined_args_to_skip)
1557 {
1558 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1559 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1560 streamer_write_uhwi (ob, index);
1561 }
1562 else
1563 streamer_write_uhwi (ob, 0);
1564 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1565 node->clone.tree_map));
1566 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1567 {
1568 int parm_num;
1569 tree parm;
1570
1571 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1572 parm = DECL_CHAIN (parm), parm_num++)
1573 if (map->old_tree == parm)
1574 break;
1575 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1576 mechanism to store function local declarations into summaries. */
1577 gcc_assert (parm);
1578 streamer_write_uhwi (ob, parm_num);
1579 stream_write_tree (ob, map->new_tree, true);
1580 bp = bitpack_create (ob->main_stream);
1581 bp_pack_value (&bp, map->replace_p, 1);
1582 bp_pack_value (&bp, map->ref_p, 1);
1583 streamer_write_bitpack (&bp);
1584 }
1585
1586 if (cgraph_node_in_set_p (node, set))
1587 {
1588 for (e = node->callees; e; e = e->next_callee)
1589 output_edge_opt_summary (ob, e);
1590 for (e = node->indirect_calls; e; e = e->next_callee)
1591 output_edge_opt_summary (ob, e);
1592 }
1593 }
1594
1595 /* Output optimization summaries stored in callgraph.
1596 At the moment it is the clone info structure. */
1597
1598 static void
1599 output_cgraph_opt_summary (cgraph_node_set set)
1600 {
1601 struct cgraph_node *node;
1602 int i, n_nodes;
1603 lto_cgraph_encoder_t encoder;
1604 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1605 unsigned count = 0;
1606
1607 ob->cgraph_node = NULL;
1608 encoder = ob->decl_state->cgraph_node_encoder;
1609 n_nodes = lto_cgraph_encoder_size (encoder);
1610 for (i = 0; i < n_nodes; i++)
1611 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1612 set))
1613 count++;
1614 streamer_write_uhwi (ob, count);
1615 for (i = 0; i < n_nodes; i++)
1616 {
1617 node = lto_cgraph_encoder_deref (encoder, i);
1618 if (output_cgraph_opt_summary_p (node, set))
1619 {
1620 streamer_write_uhwi (ob, i);
1621 output_node_opt_summary (ob, node, set);
1622 }
1623 }
1624 produce_asm (ob, NULL);
1625 destroy_output_block (ob);
1626 }
1627
1628 /* Input optimisation summary of EDGE. */
1629
1630 static void
1631 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1632 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1633 {
1634 }
1635
1636 /* Input optimisation summary of NODE. */
1637
1638 static void
1639 input_node_opt_summary (struct cgraph_node *node,
1640 struct lto_input_block *ib_main,
1641 struct data_in *data_in)
1642 {
1643 int i;
1644 int count;
1645 int bit;
1646 struct bitpack_d bp;
1647 struct cgraph_edge *e;
1648
1649 count = streamer_read_uhwi (ib_main);
1650 if (count)
1651 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1652 for (i = 0; i < count; i++)
1653 {
1654 bit = streamer_read_uhwi (ib_main);
1655 bitmap_set_bit (node->clone.args_to_skip, bit);
1656 }
1657 count = streamer_read_uhwi (ib_main);
1658 if (count)
1659 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1660 for (i = 0; i < count; i++)
1661 {
1662 bit = streamer_read_uhwi (ib_main);
1663 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1664 }
1665 count = streamer_read_uhwi (ib_main);
1666 for (i = 0; i < count; i++)
1667 {
1668 int parm_num;
1669 tree parm;
1670 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1671
1672 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1673 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm_num;
1674 parm = DECL_CHAIN (parm))
1675 parm_num --;
1676 map->parm_num = streamer_read_uhwi (ib_main);
1677 map->old_tree = NULL;
1678 map->new_tree = stream_read_tree (ib_main, data_in);
1679 bp = streamer_read_bitpack (ib_main);
1680 map->replace_p = bp_unpack_value (&bp, 1);
1681 map->ref_p = bp_unpack_value (&bp, 1);
1682 }
1683 for (e = node->callees; e; e = e->next_callee)
1684 input_edge_opt_summary (e, ib_main);
1685 for (e = node->indirect_calls; e; e = e->next_callee)
1686 input_edge_opt_summary (e, ib_main);
1687 }
1688
1689 /* Read section in file FILE_DATA of length LEN with data DATA. */
1690
1691 static void
1692 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1693 const char *data, size_t len, VEC (cgraph_node_ptr,
1694 heap) * nodes)
1695 {
1696 const struct lto_function_header *header =
1697 (const struct lto_function_header *) data;
1698 const int cfg_offset = sizeof (struct lto_function_header);
1699 const int main_offset = cfg_offset + header->cfg_size;
1700 const int string_offset = main_offset + header->main_size;
1701 struct data_in *data_in;
1702 struct lto_input_block ib_main;
1703 unsigned int i;
1704 unsigned int count;
1705
1706 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1707 header->main_size);
1708
1709 data_in =
1710 lto_data_in_create (file_data, (const char *) data + string_offset,
1711 header->string_size, NULL);
1712 count = streamer_read_uhwi (&ib_main);
1713
1714 for (i = 0; i < count; i++)
1715 {
1716 int ref = streamer_read_uhwi (&ib_main);
1717 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1718 &ib_main, data_in);
1719 }
1720 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1721 len);
1722 lto_data_in_delete (data_in);
1723 }
1724
1725 /* Input optimization summary of cgraph. */
1726
1727 static void
1728 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1729 {
1730 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1731 struct lto_file_decl_data *file_data;
1732 unsigned int j = 0;
1733
1734 while ((file_data = file_data_vec[j++]))
1735 {
1736 size_t len;
1737 const char *data =
1738 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1739 &len);
1740
1741 if (data)
1742 input_cgraph_opt_section (file_data, data, len, nodes);
1743 }
1744 }