cgraph.h (symtab_node_base): Add next and previous pointers.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_cgraph_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_cgraph_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_cgraph_analyzed_node,
70 /* Cgraph edges. */
71 LTO_cgraph_edge,
72 LTO_cgraph_indirect_edge,
73 LTO_cgraph_last_tag
74 };
75
76 /* Create a new cgraph encoder. */
77
78 lto_cgraph_encoder_t
79 lto_cgraph_encoder_new (void)
80 {
81 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
82 encoder->map = pointer_map_create ();
83 encoder->nodes = NULL;
84 encoder->body = pointer_set_create ();
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
93 {
94 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
95 pointer_map_destroy (encoder->map);
96 pointer_set_destroy (encoder->body);
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the cgraph encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
107 struct cgraph_node *node)
108 {
109 int ref;
110 void **slot;
111
112 slot = pointer_map_contains (encoder->map, node);
113 if (!slot)
114 {
115 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
116 slot = pointer_map_insert (encoder->map, node);
117 *slot = (void *) (intptr_t) ref;
118 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
119 }
120 else
121 ref = (int) (intptr_t) *slot;
122
123 return ref;
124 }
125
126 #define LCC_NOT_FOUND (-1)
127
128 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
129 or LCC_NOT_FOUND if it is not there. */
130
131 int
132 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
133 struct cgraph_node *node)
134 {
135 void **slot = pointer_map_contains (encoder->map, node);
136 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 }
138
139
140 /* Return the cgraph node corresponding to REF using ENCODER. */
141
142 struct cgraph_node *
143 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
144 {
145 if (ref == LCC_NOT_FOUND)
146 return NULL;
147
148 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 }
150
151
152 /* Return TRUE if we should encode initializer of NODE (if any). */
153
154 bool
155 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
156 struct cgraph_node *node)
157 {
158 return pointer_set_contains (encoder->body, node);
159 }
160
161 /* Return TRUE if we should encode body of NODE (if any). */
162
163 static void
164 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
165 struct cgraph_node *node)
166 {
167 pointer_set_insert (encoder->body, node);
168 }
169
170 /* Create a new varpool encoder. */
171
172 lto_varpool_encoder_t
173 lto_varpool_encoder_new (void)
174 {
175 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
176 encoder->map = pointer_map_create ();
177 encoder->initializer = pointer_set_create ();
178 encoder->nodes = NULL;
179 return encoder;
180 }
181
182
183 /* Delete ENCODER and its components. */
184
185 void
186 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
187 {
188 VEC_free (varpool_node_ptr, heap, encoder->nodes);
189 pointer_map_destroy (encoder->map);
190 pointer_set_destroy (encoder->initializer);
191 free (encoder);
192 }
193
194
195 /* Return the existing reference number of NODE in the varpool encoder in
196 output block OB. Assign a new reference if this is the first time
197 NODE is encoded. */
198
199 int
200 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
201 struct varpool_node *node)
202 {
203 int ref;
204 void **slot;
205
206 slot = pointer_map_contains (encoder->map, node);
207 if (!slot)
208 {
209 ref = VEC_length (varpool_node_ptr, encoder->nodes);
210 slot = pointer_map_insert (encoder->map, node);
211 *slot = (void *) (intptr_t) ref;
212 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
213 }
214 else
215 ref = (int) (intptr_t) *slot;
216
217 return ref;
218 }
219
220 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
221 or LCC_NOT_FOUND if it is not there. */
222
223 int
224 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
225 struct varpool_node *node)
226 {
227 void **slot = pointer_map_contains (encoder->map, node);
228 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 }
230
231
232 /* Return the varpool node corresponding to REF using ENCODER. */
233
234 struct varpool_node *
235 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
236 {
237 if (ref == LCC_NOT_FOUND)
238 return NULL;
239
240 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 }
242
243
244 /* Return TRUE if we should encode initializer of NODE (if any). */
245
246 bool
247 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
248 struct varpool_node *node)
249 {
250 return pointer_set_contains (encoder->initializer, node);
251 }
252
253 /* Return TRUE if we should encode initializer of NODE (if any). */
254
255 static void
256 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
257 struct varpool_node *node)
258 {
259 pointer_set_insert (encoder->initializer, node);
260 }
261
262 /* Output the cgraph EDGE to OB using ENCODER. */
263
264 static void
265 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
266 lto_cgraph_encoder_t encoder)
267 {
268 unsigned int uid;
269 intptr_t ref;
270 struct bitpack_d bp;
271
272 if (edge->indirect_unknown_callee)
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_indirect_edge);
275 else
276 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
277 LTO_cgraph_edge);
278
279 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
280 gcc_assert (ref != LCC_NOT_FOUND);
281 streamer_write_hwi_stream (ob->main_stream, ref);
282
283 if (!edge->indirect_unknown_callee)
284 {
285 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
286 gcc_assert (ref != LCC_NOT_FOUND);
287 streamer_write_hwi_stream (ob->main_stream, ref);
288 }
289
290 streamer_write_hwi_stream (ob->main_stream, edge->count);
291
292 bp = bitpack_create (ob->main_stream);
293 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
294 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
295 bp_pack_enum (&bp, cgraph_inline_failed_enum,
296 CIF_N_REASONS, edge->inline_failed);
297 bp_pack_var_len_unsigned (&bp, uid);
298 bp_pack_var_len_unsigned (&bp, edge->frequency);
299 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 if (edge->indirect_unknown_callee)
303 {
304 int flags = edge->indirect_info->ecf_flags;
305 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
311 /* Flags that should not appear on indirect calls. */
312 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
313 | ECF_MAY_BE_ALLOCA
314 | ECF_SIBCALL
315 | ECF_LEAF
316 | ECF_NOVOPS)));
317 }
318 streamer_write_bitpack (&bp);
319 }
320
321 /* Return if LIST contain references from other partitions. */
322
323 bool
324 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
325 varpool_node_set vset)
326 {
327 int i;
328 struct ipa_ref *ref;
329 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
330 {
331 if (ref->refering_type == IPA_REF_CGRAPH)
332 {
333 if (ipa_ref_refering_node (ref)->symbol.in_other_partition
334 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
335 return true;
336 }
337 else
338 {
339 if (ipa_ref_refering_varpool_node (ref)->symbol.in_other_partition
340 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
341 vset))
342 return true;
343 }
344 }
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
352 {
353 struct cgraph_edge *e;
354 if (!node->analyzed)
355 return false;
356 if (node->global.inlined_to)
357 return false;
358 for (e = node->callers; e; e = e->next_caller)
359 if (e->caller->symbol.in_other_partition
360 || !cgraph_node_in_set_p (e->caller, set))
361 return true;
362 return false;
363 }
364
365 /* Return if LIST contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
369 varpool_node_set vset)
370 {
371 int i;
372 struct ipa_ref *ref;
373 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
374 {
375 if (ref->refering_type == IPA_REF_CGRAPH)
376 {
377 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
378 return true;
379 }
380 else
381 {
382 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
383 vset))
384 return true;
385 }
386 }
387 return false;
388 }
389
390 /* Return true when node is reachable from other partition. */
391
392 bool
393 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
394 {
395 struct cgraph_edge *e;
396 for (e = node->callers; e; e = e->next_caller)
397 if (cgraph_node_in_set_p (e->caller, set))
398 return true;
399 return false;
400 }
401
402 /* Output the cgraph NODE to OB. ENCODER is used to find the
403 reference number of NODE->inlined_to. SET is the set of nodes we
404 are writing to the current file. If NODE is not in SET, then NODE
405 is a boundary of a cgraph_node_set and we pretend NODE just has a
406 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
407 that have had their callgraph node written so far. This is used to
408 determine if NODE is a clone of a previously written node. */
409
410 static void
411 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
412 lto_cgraph_encoder_t encoder, cgraph_node_set set,
413 varpool_node_set vset)
414 {
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of;
421
422 boundary_p = !cgraph_node_in_set_p (node, set);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_cgraph_analyzed_node;
426 else
427 tag = LTO_cgraph_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed)
444 {
445 /* Inline clones can not be part of boundary.
446 gcc_assert (!node->global.inlined_to);
447
448 FIXME: At the moment they can be, when partition contains an inline
449 clone that is clone of inline clone from outside partition. We can
450 reshape the clone tree and make other tree to be the root, but it
451 needs a bit extra work and will be promplty done by cgraph_remove_node
452 after reading back. */
453 in_other_partition = 1;
454 }
455
456 clone_of = node->clone_of;
457 while (clone_of
458 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
459 if (clone_of->prev_sibling_clone)
460 clone_of = clone_of->prev_sibling_clone;
461 else
462 clone_of = clone_of->clone_of;
463
464 if (LTO_cgraph_analyzed_node)
465 gcc_assert (clone_of || !node->clone_of);
466 if (!clone_of)
467 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
468 else
469 streamer_write_hwi_stream (ob->main_stream, ref);
470
471
472 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
473 streamer_write_hwi_stream (ob->main_stream, node->count);
474 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
475
476 if (tag == LTO_cgraph_analyzed_node)
477 {
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 streamer_write_hwi_stream (ob->main_stream, ref);
487 }
488
489 if (node->symbol.same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder,
492 cgraph (node->symbol.same_comdat_group));
493 gcc_assert (ref != LCC_NOT_FOUND);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 streamer_write_hwi_stream (ob->main_stream, ref);
498
499 bp = bitpack_create (ob->main_stream);
500 bp_pack_value (&bp, node->local.local, 1);
501 bp_pack_value (&bp, node->symbol.externally_visible, 1);
502 bp_pack_value (&bp, node->local.finalized, 1);
503 bp_pack_value (&bp, node->local.versionable, 1);
504 bp_pack_value (&bp, node->local.can_change_signature, 1);
505 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
506 bp_pack_value (&bp, node->needed, 1);
507 bp_pack_value (&bp, node->symbol.address_taken, 1);
508 bp_pack_value (&bp, node->abstract_and_needed, 1);
509 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
510 && !DECL_EXTERNAL (node->symbol.decl)
511 && !DECL_COMDAT (node->symbol.decl)
512 && (reachable_from_other_partition_p (node, set)
513 || referenced_from_other_partition_p (&node->symbol.ref_list,
514 set, vset)), 1);
515 bp_pack_value (&bp, node->lowered, 1);
516 bp_pack_value (&bp, in_other_partition, 1);
517 /* Real aliases in a boundary become non-aliases. However we still stream
518 alias info on weakrefs.
519 TODO: We lose a bit of information here - when we know that variable is
520 defined in other unit, we may use the info on aliases to resolve
521 symbol1 != symbol2 type tests that we can do only for locally defined objects
522 otherwise. */
523 bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl)), 1);
524 bp_pack_value (&bp, node->frequency, 2);
525 bp_pack_value (&bp, node->only_called_at_startup, 1);
526 bp_pack_value (&bp, node->only_called_at_exit, 1);
527 bp_pack_value (&bp, node->tm_clone, 1);
528 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
529 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
530 LDPR_NUM_KNOWN, node->symbol.resolution);
531 streamer_write_bitpack (&bp);
532
533 if (node->thunk.thunk_p && !boundary_p)
534 {
535 streamer_write_uhwi_stream
536 (ob->main_stream,
537 1 + (node->thunk.this_adjusting != 0) * 2
538 + (node->thunk.virtual_offset_p != 0) * 4);
539 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
540 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
541 }
542 if ((node->alias || node->thunk.thunk_p)
543 && (!boundary_p || (node->alias && DECL_EXTERNAL (node->symbol.decl))))
544 {
545 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
546 node->thunk.alias != NULL);
547 if (node->thunk.alias != NULL)
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
549 node->thunk.alias);
550 }
551 }
552
553 /* Output the varpool NODE to OB.
554 If NODE is not in SET, then NODE is a boundary. */
555
556 static void
557 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
558 lto_varpool_encoder_t varpool_encoder,
559 cgraph_node_set set, varpool_node_set vset)
560 {
561 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
562 struct bitpack_d bp;
563 int ref;
564
565 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
566 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
567 bp = bitpack_create (ob->main_stream);
568 bp_pack_value (&bp, node->symbol.externally_visible, 1);
569 bp_pack_value (&bp, node->force_output, 1);
570 bp_pack_value (&bp, node->finalized, 1);
571 bp_pack_value (&bp, node->alias, 1);
572 bp_pack_value (&bp, node->alias_of != NULL, 1);
573 gcc_assert (node->finalized || !node->analyzed);
574 gcc_assert (node->needed);
575 /* Constant pool initializers can be de-unified into individual ltrans units.
576 FIXME: Alternatively at -Os we may want to avoid generating for them the local
577 labels and share them across LTRANS partitions. */
578 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
579 && !DECL_COMDAT (node->symbol.decl))
580 {
581 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
582 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
583 }
584 else
585 {
586 bp_pack_value (&bp, node->analyzed
587 && referenced_from_other_partition_p (&node->symbol.ref_list,
588 set, vset), 1);
589 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
590 }
591 streamer_write_bitpack (&bp);
592 if (node->alias_of)
593 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
594 if (node->symbol.same_comdat_group && !boundary_p)
595 {
596 ref = lto_varpool_encoder_lookup (varpool_encoder,
597 varpool (node->symbol.same_comdat_group));
598 gcc_assert (ref != LCC_NOT_FOUND);
599 }
600 else
601 ref = LCC_NOT_FOUND;
602 streamer_write_hwi_stream (ob->main_stream, ref);
603 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
604 LDPR_NUM_KNOWN, node->symbol.resolution);
605 }
606
607 /* Output the varpool NODE to OB.
608 If NODE is not in SET, then NODE is a boundary. */
609
610 static void
611 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
612 lto_cgraph_encoder_t encoder,
613 lto_varpool_encoder_t varpool_encoder)
614 {
615 struct bitpack_d bp;
616 bp = bitpack_create (ob->main_stream);
617 bp_pack_value (&bp, ref->refered_type, 1);
618 bp_pack_value (&bp, ref->use, 2);
619 streamer_write_bitpack (&bp);
620 if (ref->refered_type == IPA_REF_CGRAPH)
621 {
622 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
623 gcc_assert (nref != LCC_NOT_FOUND);
624 streamer_write_hwi_stream (ob->main_stream, nref);
625 }
626 else
627 {
628 int nref = lto_varpool_encoder_lookup (varpool_encoder,
629 ipa_ref_varpool_node (ref));
630 gcc_assert (nref != LCC_NOT_FOUND);
631 streamer_write_hwi_stream (ob->main_stream, nref);
632 }
633 }
634
635 /* Stream out profile_summary to OB. */
636
637 static void
638 output_profile_summary (struct lto_simple_output_block *ob)
639 {
640 if (profile_info)
641 {
642 /* We do not output num, sum_all and run_max, they are not used by
643 GCC profile feedback and they are difficult to merge from multiple
644 units. */
645 gcc_assert (profile_info->runs);
646 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
647 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
648 }
649 else
650 streamer_write_uhwi_stream (ob->main_stream, 0);
651 }
652
653 /* Add NODE into encoder as well as nodes it is cloned from.
654 Do it in a way so clones appear first. */
655
656 static void
657 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
658 bool include_body)
659 {
660 if (node->clone_of)
661 add_node_to (encoder, node->clone_of, include_body);
662 else if (include_body)
663 lto_set_cgraph_encoder_encode_body (encoder, node);
664 lto_cgraph_encoder_encode (encoder, node);
665 }
666
667 /* Add all references in LIST to encoders. */
668
669 static void
670 add_references (lto_cgraph_encoder_t encoder,
671 lto_varpool_encoder_t varpool_encoder,
672 struct ipa_ref_list *list)
673 {
674 int i;
675 struct ipa_ref *ref;
676 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
677 if (ref->refered_type == IPA_REF_CGRAPH)
678 add_node_to (encoder, ipa_ref_node (ref), false);
679 else
680 {
681 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
682 lto_varpool_encoder_encode (varpool_encoder, vnode);
683 }
684 }
685
686 /* Output all callees or indirect outgoing edges. EDGE must be the first such
687 edge. */
688
689 static void
690 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
691 struct lto_simple_output_block *ob,
692 lto_cgraph_encoder_t encoder)
693 {
694 if (!edge)
695 return;
696
697 /* Output edges in backward direction, so the reconstructed callgraph match
698 and it is easy to associate call sites in the IPA pass summaries. */
699 while (edge->next_callee)
700 edge = edge->next_callee;
701 for (; edge; edge = edge->prev_callee)
702 lto_output_edge (ob, edge, encoder);
703 }
704
705 /* Output the part of the cgraph in SET. */
706
707 static void
708 output_refs (cgraph_node_set set, varpool_node_set vset,
709 lto_cgraph_encoder_t encoder,
710 lto_varpool_encoder_t varpool_encoder)
711 {
712 cgraph_node_set_iterator csi;
713 varpool_node_set_iterator vsi;
714 struct lto_simple_output_block *ob;
715 int count;
716 struct ipa_ref *ref;
717 int i;
718
719 ob = lto_create_simple_output_block (LTO_section_refs);
720
721 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
722 {
723 struct cgraph_node *node = csi_node (csi);
724
725 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
726 if (count)
727 {
728 streamer_write_uhwi_stream (ob->main_stream, count);
729 streamer_write_uhwi_stream (ob->main_stream,
730 lto_cgraph_encoder_lookup (encoder, node));
731 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
732 i, ref); i++)
733 lto_output_ref (ob, ref, encoder, varpool_encoder);
734 }
735 }
736
737 streamer_write_uhwi_stream (ob->main_stream, 0);
738
739 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
740 {
741 struct varpool_node *node = vsi_node (vsi);
742
743 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
744 if (count)
745 {
746 streamer_write_uhwi_stream (ob->main_stream, count);
747 streamer_write_uhwi_stream (ob->main_stream,
748 lto_varpool_encoder_lookup (varpool_encoder,
749 node));
750 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
751 i, ref); i++)
752 lto_output_ref (ob, ref, encoder, varpool_encoder);
753 }
754 }
755
756 streamer_write_uhwi_stream (ob->main_stream, 0);
757
758 lto_destroy_simple_output_block (ob);
759 }
760
761 /* Find out all cgraph and varpool nodes we want to encode in current unit
762 and insert them to encoders. */
763 void
764 compute_ltrans_boundary (struct lto_out_decl_state *state,
765 cgraph_node_set set, varpool_node_set vset)
766 {
767 struct cgraph_node *node;
768 cgraph_node_set_iterator csi;
769 varpool_node_set_iterator vsi;
770 struct cgraph_edge *edge;
771 int i;
772 lto_cgraph_encoder_t encoder;
773 lto_varpool_encoder_t varpool_encoder;
774
775 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
776 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
777
778 /* Go over all the nodes in SET and assign references. */
779 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
780 {
781 node = csi_node (csi);
782 add_node_to (encoder, node, true);
783 add_references (encoder, varpool_encoder, &node->symbol.ref_list);
784 }
785 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
786 {
787 struct varpool_node *vnode = vsi_node (vsi);
788 gcc_assert (!vnode->alias || vnode->alias_of);
789 lto_varpool_encoder_encode (varpool_encoder, vnode);
790 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
791 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
792 }
793 /* Pickle in also the initializer of all referenced readonly variables
794 to help folding. Constant pool variables are not shared, so we must
795 pickle those too. */
796 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
797 {
798 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
799 if (DECL_INITIAL (vnode->symbol.decl)
800 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
801 vnode)
802 && const_value_known_p (vnode->symbol.decl))
803 {
804 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
805 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
806 }
807 else if (vnode->alias || vnode->alias_of)
808 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
809 }
810
811 /* Go over all the nodes again to include callees that are not in
812 SET. */
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
814 {
815 node = csi_node (csi);
816 for (edge = node->callees; edge; edge = edge->next_callee)
817 {
818 struct cgraph_node *callee = edge->callee;
819 if (!cgraph_node_in_set_p (callee, set))
820 {
821 /* We should have moved all the inlines. */
822 gcc_assert (!callee->global.inlined_to);
823 add_node_to (encoder, callee, false);
824 }
825 }
826 }
827 }
828
829 /* Output the part of the cgraph in SET. */
830
831 void
832 output_cgraph (cgraph_node_set set, varpool_node_set vset)
833 {
834 struct cgraph_node *node;
835 struct lto_simple_output_block *ob;
836 cgraph_node_set_iterator csi;
837 int i, n_nodes;
838 lto_cgraph_encoder_t encoder;
839 lto_varpool_encoder_t varpool_encoder;
840 static bool asm_nodes_output = false;
841
842 if (flag_wpa)
843 output_cgraph_opt_summary (set);
844
845 ob = lto_create_simple_output_block (LTO_section_cgraph);
846
847 output_profile_summary (ob);
848
849 /* An encoder for cgraph nodes should have been created by
850 ipa_write_summaries_1. */
851 gcc_assert (ob->decl_state->cgraph_node_encoder);
852 gcc_assert (ob->decl_state->varpool_node_encoder);
853 encoder = ob->decl_state->cgraph_node_encoder;
854 varpool_encoder = ob->decl_state->varpool_node_encoder;
855
856 /* Write out the nodes. We must first output a node and then its clones,
857 otherwise at a time reading back the node there would be nothing to clone
858 from. */
859 n_nodes = lto_cgraph_encoder_size (encoder);
860 for (i = 0; i < n_nodes; i++)
861 {
862 node = lto_cgraph_encoder_deref (encoder, i);
863 lto_output_node (ob, node, encoder, set, vset);
864 }
865
866 /* Go over the nodes in SET again to write edges. */
867 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
868 {
869 node = csi_node (csi);
870 output_outgoing_cgraph_edges (node->callees, ob, encoder);
871 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
872 }
873
874 streamer_write_uhwi_stream (ob->main_stream, 0);
875
876 lto_destroy_simple_output_block (ob);
877
878 /* Emit toplevel asms.
879 When doing WPA we must output every asm just once. Since we do not partition asm
880 nodes at all, output them to first output. This is kind of hack, but should work
881 well. */
882 if (!asm_nodes_output)
883 {
884 asm_nodes_output = true;
885 lto_output_toplevel_asms ();
886 }
887
888 output_varpool (set, vset);
889 output_refs (set, vset, encoder, varpool_encoder);
890 }
891
892 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
893 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
894 NODE or to replace the values in it, for instance because the first
895 time we saw it, the function body was not available but now it
896 is. BP is a bitpack with all the bitflags for NODE read from the
897 stream. */
898
899 static void
900 input_overwrite_node (struct lto_file_decl_data *file_data,
901 struct cgraph_node *node,
902 enum LTO_cgraph_tags tag,
903 struct bitpack_d *bp)
904 {
905 node->symbol.aux = (void *) tag;
906 node->symbol.lto_file_data = file_data;
907
908 node->local.local = bp_unpack_value (bp, 1);
909 node->symbol.externally_visible = bp_unpack_value (bp, 1);
910 node->local.finalized = bp_unpack_value (bp, 1);
911 node->local.versionable = bp_unpack_value (bp, 1);
912 node->local.can_change_signature = bp_unpack_value (bp, 1);
913 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
914 node->needed = bp_unpack_value (bp, 1);
915 node->symbol.address_taken = bp_unpack_value (bp, 1);
916 node->abstract_and_needed = bp_unpack_value (bp, 1);
917 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
918 node->lowered = bp_unpack_value (bp, 1);
919 node->analyzed = tag == LTO_cgraph_analyzed_node;
920 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
921 if (node->symbol.in_other_partition
922 /* Avoid updating decl when we are seeing just inline clone.
923 When inlining function that has functions already inlined into it,
924 we produce clones of inline clones.
925
926 WPA partitioning might put each clone into different unit and
927 we might end up streaming inline clone from other partition
928 to support clone we are interested in. */
929 && (!node->clone_of
930 || node->clone_of->symbol.decl != node->symbol.decl))
931 {
932 DECL_EXTERNAL (node->symbol.decl) = 1;
933 TREE_STATIC (node->symbol.decl) = 0;
934 }
935 node->alias = bp_unpack_value (bp, 1);
936 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
937 node->only_called_at_startup = bp_unpack_value (bp, 1);
938 node->only_called_at_exit = bp_unpack_value (bp, 1);
939 node->tm_clone = bp_unpack_value (bp, 1);
940 node->thunk.thunk_p = bp_unpack_value (bp, 1);
941 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
942 LDPR_NUM_KNOWN);
943 }
944
945 /* Output the part of the cgraph in SET. */
946
947 static void
948 output_varpool (cgraph_node_set set, varpool_node_set vset)
949 {
950 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
951 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
952 int len = lto_varpool_encoder_size (varpool_encoder), i;
953
954 streamer_write_uhwi_stream (ob->main_stream, len);
955
956 /* Write out the nodes. We must first output a node and then its clones,
957 otherwise at a time reading back the node there would be nothing to clone
958 from. */
959 for (i = 0; i < len; i++)
960 {
961 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
962 varpool_encoder,
963 set, vset);
964 }
965
966 lto_destroy_simple_output_block (ob);
967 }
968
969 /* Read a node from input_block IB. TAG is the node's tag just read.
970 Return the node read or overwriten. */
971
972 static struct cgraph_node *
973 input_node (struct lto_file_decl_data *file_data,
974 struct lto_input_block *ib,
975 enum LTO_cgraph_tags tag,
976 VEC(cgraph_node_ptr, heap) *nodes)
977 {
978 tree fn_decl;
979 struct cgraph_node *node;
980 struct bitpack_d bp;
981 unsigned decl_index;
982 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
983 int clone_ref;
984 int order;
985
986 order = streamer_read_hwi (ib) + order_base;
987 clone_ref = streamer_read_hwi (ib);
988
989 decl_index = streamer_read_uhwi (ib);
990 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
991
992 if (clone_ref != LCC_NOT_FOUND)
993 {
994 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
995 0, CGRAPH_FREQ_BASE, false, NULL, false);
996 }
997 else
998 node = cgraph_get_create_node (fn_decl);
999
1000 node->symbol.order = order;
1001 if (order >= symtab_order)
1002 symtab_order = order + 1;
1003
1004 node->count = streamer_read_hwi (ib);
1005 node->count_materialization_scale = streamer_read_hwi (ib);
1006
1007 if (tag == LTO_cgraph_analyzed_node)
1008 ref = streamer_read_hwi (ib);
1009
1010 ref2 = streamer_read_hwi (ib);
1011
1012 /* Make sure that we have not read this node before. Nodes that
1013 have already been read will have their tag stored in the 'aux'
1014 field. Since built-in functions can be referenced in multiple
1015 functions, they are expected to be read more than once. */
1016 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1017 internal_error ("bytecode stream: found multiple instances of cgraph "
1018 "node %d", node->uid);
1019
1020 bp = streamer_read_bitpack (ib);
1021 input_overwrite_node (file_data, node, tag, &bp);
1022
1023 /* Store a reference for now, and fix up later to be a pointer. */
1024 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1025
1026 /* Store a reference for now, and fix up later to be a pointer. */
1027 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1028
1029 if (node->thunk.thunk_p)
1030 {
1031 int type = streamer_read_uhwi (ib);
1032 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1033 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1034
1035 node->thunk.fixed_offset = fixed_offset;
1036 node->thunk.this_adjusting = (type & 2);
1037 node->thunk.virtual_value = virtual_value;
1038 node->thunk.virtual_offset_p = (type & 4);
1039 }
1040 if (node->thunk.thunk_p || node->alias)
1041 {
1042 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1043 {
1044 decl_index = streamer_read_uhwi (ib);
1045 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1046 decl_index);
1047 }
1048 }
1049 return node;
1050 }
1051
1052 /* Read a node from input_block IB. TAG is the node's tag just read.
1053 Return the node read or overwriten. */
1054
1055 static struct varpool_node *
1056 input_varpool_node (struct lto_file_decl_data *file_data,
1057 struct lto_input_block *ib)
1058 {
1059 int decl_index;
1060 tree var_decl;
1061 struct varpool_node *node;
1062 struct bitpack_d bp;
1063 int ref = LCC_NOT_FOUND;
1064 bool non_null_aliasof;
1065 int order;
1066
1067 order = streamer_read_hwi (ib) + order_base;
1068 decl_index = streamer_read_uhwi (ib);
1069 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1070 node = varpool_node (var_decl);
1071 node->symbol.order = order;
1072 if (order >= symtab_order)
1073 symtab_order = order + 1;
1074 node->symbol.lto_file_data = file_data;
1075
1076 bp = streamer_read_bitpack (ib);
1077 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1078 node->force_output = bp_unpack_value (&bp, 1);
1079 node->finalized = bp_unpack_value (&bp, 1);
1080 node->alias = bp_unpack_value (&bp, 1);
1081 non_null_aliasof = bp_unpack_value (&bp, 1);
1082 node->analyzed = node->finalized;
1083 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1084 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1085 if (node->symbol.in_other_partition)
1086 {
1087 DECL_EXTERNAL (node->symbol.decl) = 1;
1088 TREE_STATIC (node->symbol.decl) = 0;
1089 }
1090 if (node->finalized)
1091 varpool_mark_needed_node (node);
1092 if (non_null_aliasof)
1093 {
1094 decl_index = streamer_read_uhwi (ib);
1095 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1096 }
1097 ref = streamer_read_hwi (ib);
1098 /* Store a reference for now, and fix up later to be a pointer. */
1099 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1100 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1101 LDPR_NUM_KNOWN);
1102
1103 return node;
1104 }
1105
1106 /* Read a node from input_block IB. TAG is the node's tag just read.
1107 Return the node read or overwriten. */
1108
1109 static void
1110 input_ref (struct lto_input_block *ib,
1111 struct cgraph_node *refering_node,
1112 struct varpool_node *refering_varpool_node,
1113 VEC(cgraph_node_ptr, heap) *nodes,
1114 VEC(varpool_node_ptr, heap) *varpool_nodes_vec)
1115 {
1116 struct cgraph_node *node = NULL;
1117 struct varpool_node *varpool_node = NULL;
1118 struct bitpack_d bp;
1119 enum ipa_ref_type type;
1120 enum ipa_ref_use use;
1121
1122 bp = streamer_read_bitpack (ib);
1123 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1124 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1125 if (type == IPA_REF_CGRAPH)
1126 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1127 else
1128 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes_vec,
1129 streamer_read_hwi (ib));
1130 ipa_record_reference (refering_node, refering_varpool_node,
1131 node, varpool_node, use, NULL);
1132 }
1133
1134 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1135 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1136 edge being read is indirect (in the sense that it has
1137 indirect_unknown_callee set). */
1138
1139 static void
1140 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1141 bool indirect)
1142 {
1143 struct cgraph_node *caller, *callee;
1144 struct cgraph_edge *edge;
1145 unsigned int stmt_id;
1146 gcov_type count;
1147 int freq;
1148 cgraph_inline_failed_t inline_failed;
1149 struct bitpack_d bp;
1150 int ecf_flags = 0;
1151
1152 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1153 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1154 internal_error ("bytecode stream: no caller found while reading edge");
1155
1156 if (!indirect)
1157 {
1158 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1159 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1160 internal_error ("bytecode stream: no callee found while reading edge");
1161 }
1162 else
1163 callee = NULL;
1164
1165 count = (gcov_type) streamer_read_hwi (ib);
1166
1167 bp = streamer_read_bitpack (ib);
1168 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1169 stmt_id = bp_unpack_var_len_unsigned (&bp);
1170 freq = (int) bp_unpack_var_len_unsigned (&bp);
1171
1172 if (indirect)
1173 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1174 else
1175 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1176
1177 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1178 edge->lto_stmt_uid = stmt_id;
1179 edge->inline_failed = inline_failed;
1180 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1181 edge->can_throw_external = bp_unpack_value (&bp, 1);
1182 if (indirect)
1183 {
1184 if (bp_unpack_value (&bp, 1))
1185 ecf_flags |= ECF_CONST;
1186 if (bp_unpack_value (&bp, 1))
1187 ecf_flags |= ECF_PURE;
1188 if (bp_unpack_value (&bp, 1))
1189 ecf_flags |= ECF_NORETURN;
1190 if (bp_unpack_value (&bp, 1))
1191 ecf_flags |= ECF_MALLOC;
1192 if (bp_unpack_value (&bp, 1))
1193 ecf_flags |= ECF_NOTHROW;
1194 if (bp_unpack_value (&bp, 1))
1195 ecf_flags |= ECF_RETURNS_TWICE;
1196 edge->indirect_info->ecf_flags = ecf_flags;
1197 }
1198 }
1199
1200
1201 /* Read a cgraph from IB using the info in FILE_DATA. */
1202
1203 static VEC(cgraph_node_ptr, heap) *
1204 input_cgraph_1 (struct lto_file_decl_data *file_data,
1205 struct lto_input_block *ib)
1206 {
1207 enum LTO_cgraph_tags tag;
1208 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1209 struct cgraph_node *node;
1210 unsigned i;
1211
1212 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1213 order_base = symtab_order;
1214 while (tag)
1215 {
1216 if (tag == LTO_cgraph_edge)
1217 input_edge (ib, nodes, false);
1218 else if (tag == LTO_cgraph_indirect_edge)
1219 input_edge (ib, nodes, true);
1220 else
1221 {
1222 node = input_node (file_data, ib, tag,nodes);
1223 if (node == NULL || node->symbol.decl == NULL_TREE)
1224 internal_error ("bytecode stream: found empty cgraph node");
1225 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1226 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1227 }
1228
1229 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1230 }
1231
1232 lto_input_toplevel_asms (file_data, order_base);
1233
1234 /* AUX pointers should be all non-zero for nodes read from the stream. */
1235 #ifdef ENABLE_CHECKING
1236 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1237 gcc_assert (node->symbol.aux);
1238 #endif
1239 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1240 {
1241 int ref = (int) (intptr_t) node->global.inlined_to;
1242
1243 /* We share declaration of builtins, so we may read same node twice. */
1244 if (!node->symbol.aux)
1245 continue;
1246 node->symbol.aux = NULL;
1247
1248 /* Fixup inlined_to from reference to pointer. */
1249 if (ref != LCC_NOT_FOUND)
1250 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1251 else
1252 node->global.inlined_to = NULL;
1253
1254 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1255
1256 /* Fixup same_comdat_group from reference to pointer. */
1257 if (ref != LCC_NOT_FOUND)
1258 node->symbol.same_comdat_group = (symtab_node)VEC_index (cgraph_node_ptr, nodes, ref);
1259 else
1260 node->symbol.same_comdat_group = NULL;
1261 }
1262 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1263 node->symbol.aux = (void *)1;
1264 return nodes;
1265 }
1266
1267 /* Read a varpool from IB using the info in FILE_DATA. */
1268
1269 static VEC(varpool_node_ptr, heap) *
1270 input_varpool_1 (struct lto_file_decl_data *file_data,
1271 struct lto_input_block *ib)
1272 {
1273 unsigned HOST_WIDE_INT len;
1274 VEC(varpool_node_ptr, heap) *varpool = NULL;
1275 int i;
1276 struct varpool_node *node;
1277
1278 len = streamer_read_uhwi (ib);
1279 while (len)
1280 {
1281 VEC_safe_push (varpool_node_ptr, heap, varpool,
1282 input_varpool_node (file_data, ib));
1283 len--;
1284 }
1285 #ifdef ENABLE_CHECKING
1286 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1287 gcc_assert (!node->symbol.aux);
1288 #endif
1289 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1290 {
1291 int ref = (int) (intptr_t) node->symbol.same_comdat_group;
1292 /* We share declaration of builtins, so we may read same node twice. */
1293 if (node->symbol.aux)
1294 continue;
1295 node->symbol.aux = (void *)1;
1296
1297 /* Fixup same_comdat_group from reference to pointer. */
1298 if (ref != LCC_NOT_FOUND)
1299 node->symbol.same_comdat_group = (symtab_node)VEC_index (varpool_node_ptr, varpool, ref);
1300 else
1301 node->symbol.same_comdat_group = NULL;
1302 }
1303 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1304 node->symbol.aux = NULL;
1305 return varpool;
1306 }
1307
1308 /* Input ipa_refs. */
1309
1310 static void
1311 input_refs (struct lto_input_block *ib,
1312 VEC(cgraph_node_ptr, heap) *nodes,
1313 VEC(varpool_node_ptr, heap) *varpool)
1314 {
1315 int count;
1316 int idx;
1317 while (true)
1318 {
1319 struct cgraph_node *node;
1320 count = streamer_read_uhwi (ib);
1321 if (!count)
1322 break;
1323 idx = streamer_read_uhwi (ib);
1324 node = VEC_index (cgraph_node_ptr, nodes, idx);
1325 while (count)
1326 {
1327 input_ref (ib, node, NULL, nodes, varpool);
1328 count--;
1329 }
1330 }
1331 while (true)
1332 {
1333 struct varpool_node *node;
1334 count = streamer_read_uhwi (ib);
1335 if (!count)
1336 break;
1337 node = VEC_index (varpool_node_ptr, varpool,
1338 streamer_read_uhwi (ib));
1339 while (count)
1340 {
1341 input_ref (ib, NULL, node, nodes, varpool);
1342 count--;
1343 }
1344 }
1345 }
1346
1347
1348 static struct gcov_ctr_summary lto_gcov_summary;
1349
1350 /* Input profile_info from IB. */
1351 static void
1352 input_profile_summary (struct lto_input_block *ib,
1353 struct lto_file_decl_data *file_data)
1354 {
1355 unsigned int runs = streamer_read_uhwi (ib);
1356 if (runs)
1357 {
1358 file_data->profile_info.runs = runs;
1359 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1360 }
1361
1362 }
1363
1364 /* Rescale profile summaries to the same number of runs in the whole unit. */
1365
1366 static void
1367 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1368 {
1369 struct lto_file_decl_data *file_data;
1370 unsigned int j;
1371 gcov_unsigned_t max_runs = 0;
1372 struct cgraph_node *node;
1373 struct cgraph_edge *edge;
1374
1375 /* Find unit with maximal number of runs. If we ever get serious about
1376 roundoff errors, we might also consider computing smallest common
1377 multiply. */
1378 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1379 if (max_runs < file_data->profile_info.runs)
1380 max_runs = file_data->profile_info.runs;
1381
1382 if (!max_runs)
1383 return;
1384
1385 /* Simple overflow check. We probably don't need to support that many train
1386 runs. Such a large value probably imply data corruption anyway. */
1387 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1388 {
1389 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1390 INT_MAX / REG_BR_PROB_BASE);
1391 return;
1392 }
1393
1394 profile_info = &lto_gcov_summary;
1395 lto_gcov_summary.runs = max_runs;
1396 lto_gcov_summary.sum_max = 0;
1397
1398 /* Rescale all units to the maximal number of runs.
1399 sum_max can not be easily merged, as we have no idea what files come from
1400 the same run. We do not use the info anyway, so leave it 0. */
1401 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1402 if (file_data->profile_info.runs)
1403 {
1404 int scale = ((REG_BR_PROB_BASE * max_runs
1405 + file_data->profile_info.runs / 2)
1406 / file_data->profile_info.runs);
1407 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1408 (file_data->profile_info.sum_max
1409 * scale
1410 + REG_BR_PROB_BASE / 2)
1411 / REG_BR_PROB_BASE);
1412 }
1413
1414 /* Watch roundoff errors. */
1415 if (lto_gcov_summary.sum_max < max_runs)
1416 lto_gcov_summary.sum_max = max_runs;
1417
1418 /* If merging already happent at WPA time, we are done. */
1419 if (flag_ltrans)
1420 return;
1421
1422 /* Now compute count_materialization_scale of each node.
1423 During LTRANS we already have values of count_materialization_scale
1424 computed, so just update them. */
1425 FOR_EACH_FUNCTION (node)
1426 if (node->symbol.lto_file_data
1427 && node->symbol.lto_file_data->profile_info.runs)
1428 {
1429 int scale;
1430
1431 scale =
1432 ((node->count_materialization_scale * max_runs
1433 + node->symbol.lto_file_data->profile_info.runs / 2)
1434 / node->symbol.lto_file_data->profile_info.runs);
1435 node->count_materialization_scale = scale;
1436 if (scale < 0)
1437 fatal_error ("Profile information in %s corrupted",
1438 file_data->file_name);
1439
1440 if (scale == REG_BR_PROB_BASE)
1441 continue;
1442 for (edge = node->callees; edge; edge = edge->next_callee)
1443 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1444 / REG_BR_PROB_BASE);
1445 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1446 / REG_BR_PROB_BASE);
1447 }
1448 }
1449
1450 /* Input and merge the cgraph from each of the .o files passed to
1451 lto1. */
1452
1453 void
1454 input_cgraph (void)
1455 {
1456 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1457 struct lto_file_decl_data *file_data;
1458 unsigned int j = 0;
1459 struct cgraph_node *node;
1460
1461 while ((file_data = file_data_vec[j++]))
1462 {
1463 const char *data;
1464 size_t len;
1465 struct lto_input_block *ib;
1466 VEC(cgraph_node_ptr, heap) *nodes;
1467 VEC(varpool_node_ptr, heap) *varpool;
1468
1469 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1470 &data, &len);
1471 if (!ib)
1472 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1473 input_profile_summary (ib, file_data);
1474 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1475 nodes = input_cgraph_1 (file_data, ib);
1476 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1477 ib, data, len);
1478
1479 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1480 &data, &len);
1481 if (!ib)
1482 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1483 varpool = input_varpool_1 (file_data, ib);
1484 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1485 ib, data, len);
1486
1487 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1488 &data, &len);
1489 if (!ib)
1490 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1491 input_refs (ib, nodes, varpool);
1492 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1493 ib, data, len);
1494 if (flag_ltrans)
1495 input_cgraph_opt_summary (nodes);
1496 VEC_free (cgraph_node_ptr, heap, nodes);
1497 VEC_free (varpool_node_ptr, heap, varpool);
1498 }
1499
1500 merge_profile_summaries (file_data_vec);
1501
1502 /* Clear out the aux field that was used to store enough state to
1503 tell which nodes should be overwritten. */
1504 FOR_EACH_FUNCTION (node)
1505 {
1506 /* Some nodes may have been created by cgraph_node. This
1507 happens when the callgraph contains nested functions. If the
1508 node for the parent function was never emitted to the gimple
1509 file, cgraph_node will create a node for it when setting the
1510 context of the nested function. */
1511 if (node->symbol.lto_file_data)
1512 node->symbol.aux = NULL;
1513 }
1514 }
1515
1516 /* True when we need optimization summary for NODE. */
1517
1518 static int
1519 output_cgraph_opt_summary_p (struct cgraph_node *node,
1520 cgraph_node_set set ATTRIBUTE_UNUSED)
1521 {
1522 return (node->clone_of
1523 && (node->clone.tree_map
1524 || node->clone.args_to_skip
1525 || node->clone.combined_args_to_skip));
1526 }
1527
1528 /* Output optimization summary for EDGE to OB. */
1529 static void
1530 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1531 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1532 {
1533 }
1534
1535 /* Output optimization summary for NODE to OB. */
1536
1537 static void
1538 output_node_opt_summary (struct output_block *ob,
1539 struct cgraph_node *node,
1540 cgraph_node_set set)
1541 {
1542 unsigned int index;
1543 bitmap_iterator bi;
1544 struct ipa_replace_map *map;
1545 struct bitpack_d bp;
1546 int i;
1547 struct cgraph_edge *e;
1548
1549 if (node->clone.args_to_skip)
1550 {
1551 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1552 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1553 streamer_write_uhwi (ob, index);
1554 }
1555 else
1556 streamer_write_uhwi (ob, 0);
1557 if (node->clone.combined_args_to_skip)
1558 {
1559 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1560 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1561 streamer_write_uhwi (ob, index);
1562 }
1563 else
1564 streamer_write_uhwi (ob, 0);
1565 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1566 node->clone.tree_map));
1567 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1568 {
1569 int parm_num;
1570 tree parm;
1571
1572 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1573 parm = DECL_CHAIN (parm), parm_num++)
1574 if (map->old_tree == parm)
1575 break;
1576 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1577 mechanism to store function local declarations into summaries. */
1578 gcc_assert (parm);
1579 streamer_write_uhwi (ob, parm_num);
1580 stream_write_tree (ob, map->new_tree, true);
1581 bp = bitpack_create (ob->main_stream);
1582 bp_pack_value (&bp, map->replace_p, 1);
1583 bp_pack_value (&bp, map->ref_p, 1);
1584 streamer_write_bitpack (&bp);
1585 }
1586
1587 if (cgraph_node_in_set_p (node, set))
1588 {
1589 for (e = node->callees; e; e = e->next_callee)
1590 output_edge_opt_summary (ob, e);
1591 for (e = node->indirect_calls; e; e = e->next_callee)
1592 output_edge_opt_summary (ob, e);
1593 }
1594 }
1595
1596 /* Output optimization summaries stored in callgraph.
1597 At the moment it is the clone info structure. */
1598
1599 static void
1600 output_cgraph_opt_summary (cgraph_node_set set)
1601 {
1602 struct cgraph_node *node;
1603 int i, n_nodes;
1604 lto_cgraph_encoder_t encoder;
1605 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1606 unsigned count = 0;
1607
1608 ob->cgraph_node = NULL;
1609 encoder = ob->decl_state->cgraph_node_encoder;
1610 n_nodes = lto_cgraph_encoder_size (encoder);
1611 for (i = 0; i < n_nodes; i++)
1612 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1613 set))
1614 count++;
1615 streamer_write_uhwi (ob, count);
1616 for (i = 0; i < n_nodes; i++)
1617 {
1618 node = lto_cgraph_encoder_deref (encoder, i);
1619 if (output_cgraph_opt_summary_p (node, set))
1620 {
1621 streamer_write_uhwi (ob, i);
1622 output_node_opt_summary (ob, node, set);
1623 }
1624 }
1625 produce_asm (ob, NULL);
1626 destroy_output_block (ob);
1627 }
1628
1629 /* Input optimisation summary of EDGE. */
1630
1631 static void
1632 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1633 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1634 {
1635 }
1636
1637 /* Input optimisation summary of NODE. */
1638
1639 static void
1640 input_node_opt_summary (struct cgraph_node *node,
1641 struct lto_input_block *ib_main,
1642 struct data_in *data_in)
1643 {
1644 int i;
1645 int count;
1646 int bit;
1647 struct bitpack_d bp;
1648 struct cgraph_edge *e;
1649
1650 count = streamer_read_uhwi (ib_main);
1651 if (count)
1652 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1653 for (i = 0; i < count; i++)
1654 {
1655 bit = streamer_read_uhwi (ib_main);
1656 bitmap_set_bit (node->clone.args_to_skip, bit);
1657 }
1658 count = streamer_read_uhwi (ib_main);
1659 if (count)
1660 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1661 for (i = 0; i < count; i++)
1662 {
1663 bit = streamer_read_uhwi (ib_main);
1664 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1665 }
1666 count = streamer_read_uhwi (ib_main);
1667 for (i = 0; i < count; i++)
1668 {
1669 int parm_num;
1670 tree parm;
1671 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1672
1673 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1674 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm_num;
1675 parm = DECL_CHAIN (parm))
1676 parm_num --;
1677 map->parm_num = streamer_read_uhwi (ib_main);
1678 map->old_tree = NULL;
1679 map->new_tree = stream_read_tree (ib_main, data_in);
1680 bp = streamer_read_bitpack (ib_main);
1681 map->replace_p = bp_unpack_value (&bp, 1);
1682 map->ref_p = bp_unpack_value (&bp, 1);
1683 }
1684 for (e = node->callees; e; e = e->next_callee)
1685 input_edge_opt_summary (e, ib_main);
1686 for (e = node->indirect_calls; e; e = e->next_callee)
1687 input_edge_opt_summary (e, ib_main);
1688 }
1689
1690 /* Read section in file FILE_DATA of length LEN with data DATA. */
1691
1692 static void
1693 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1694 const char *data, size_t len, VEC (cgraph_node_ptr,
1695 heap) * nodes)
1696 {
1697 const struct lto_function_header *header =
1698 (const struct lto_function_header *) data;
1699 const int cfg_offset = sizeof (struct lto_function_header);
1700 const int main_offset = cfg_offset + header->cfg_size;
1701 const int string_offset = main_offset + header->main_size;
1702 struct data_in *data_in;
1703 struct lto_input_block ib_main;
1704 unsigned int i;
1705 unsigned int count;
1706
1707 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1708 header->main_size);
1709
1710 data_in =
1711 lto_data_in_create (file_data, (const char *) data + string_offset,
1712 header->string_size, NULL);
1713 count = streamer_read_uhwi (&ib_main);
1714
1715 for (i = 0; i < count; i++)
1716 {
1717 int ref = streamer_read_uhwi (&ib_main);
1718 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1719 &ib_main, data_in);
1720 }
1721 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1722 len);
1723 lto_data_in_delete (data_in);
1724 }
1725
1726 /* Input optimization summary of cgraph. */
1727
1728 static void
1729 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1730 {
1731 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1732 struct lto_file_decl_data *file_data;
1733 unsigned int j = 0;
1734
1735 while ((file_data = file_data_vec[j++]))
1736 {
1737 size_t len;
1738 const char *data =
1739 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1740 &len);
1741
1742 if (data)
1743 input_cgraph_opt_section (file_data, data, len, nodes);
1744 }
1745 }