lto-streamer.h (lto_input_toplevel_asms): Add order_base parameter.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_cgraph_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_cgraph_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_cgraph_analyzed_node,
70 /* Cgraph edges. */
71 LTO_cgraph_edge,
72 LTO_cgraph_indirect_edge,
73 LTO_cgraph_last_tag
74 };
75
76 /* Create a new cgraph encoder. */
77
78 lto_cgraph_encoder_t
79 lto_cgraph_encoder_new (void)
80 {
81 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
82 encoder->map = pointer_map_create ();
83 encoder->nodes = NULL;
84 encoder->body = pointer_set_create ();
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
93 {
94 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
95 pointer_map_destroy (encoder->map);
96 pointer_set_destroy (encoder->body);
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the cgraph encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
107 struct cgraph_node *node)
108 {
109 int ref;
110 void **slot;
111
112 slot = pointer_map_contains (encoder->map, node);
113 if (!slot)
114 {
115 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
116 slot = pointer_map_insert (encoder->map, node);
117 *slot = (void *) (intptr_t) ref;
118 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
119 }
120 else
121 ref = (int) (intptr_t) *slot;
122
123 return ref;
124 }
125
126 #define LCC_NOT_FOUND (-1)
127
128 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
129 or LCC_NOT_FOUND if it is not there. */
130
131 int
132 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
133 struct cgraph_node *node)
134 {
135 void **slot = pointer_map_contains (encoder->map, node);
136 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 }
138
139
140 /* Return the cgraph node corresponding to REF using ENCODER. */
141
142 struct cgraph_node *
143 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
144 {
145 if (ref == LCC_NOT_FOUND)
146 return NULL;
147
148 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 }
150
151
152 /* Return TRUE if we should encode initializer of NODE (if any). */
153
154 bool
155 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
156 struct cgraph_node *node)
157 {
158 return pointer_set_contains (encoder->body, node);
159 }
160
161 /* Return TRUE if we should encode body of NODE (if any). */
162
163 static void
164 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
165 struct cgraph_node *node)
166 {
167 pointer_set_insert (encoder->body, node);
168 }
169
170 /* Create a new varpool encoder. */
171
172 lto_varpool_encoder_t
173 lto_varpool_encoder_new (void)
174 {
175 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
176 encoder->map = pointer_map_create ();
177 encoder->initializer = pointer_set_create ();
178 encoder->nodes = NULL;
179 return encoder;
180 }
181
182
183 /* Delete ENCODER and its components. */
184
185 void
186 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
187 {
188 VEC_free (varpool_node_ptr, heap, encoder->nodes);
189 pointer_map_destroy (encoder->map);
190 pointer_set_destroy (encoder->initializer);
191 free (encoder);
192 }
193
194
195 /* Return the existing reference number of NODE in the varpool encoder in
196 output block OB. Assign a new reference if this is the first time
197 NODE is encoded. */
198
199 int
200 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
201 struct varpool_node *node)
202 {
203 int ref;
204 void **slot;
205
206 slot = pointer_map_contains (encoder->map, node);
207 if (!slot)
208 {
209 ref = VEC_length (varpool_node_ptr, encoder->nodes);
210 slot = pointer_map_insert (encoder->map, node);
211 *slot = (void *) (intptr_t) ref;
212 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
213 }
214 else
215 ref = (int) (intptr_t) *slot;
216
217 return ref;
218 }
219
220 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
221 or LCC_NOT_FOUND if it is not there. */
222
223 int
224 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
225 struct varpool_node *node)
226 {
227 void **slot = pointer_map_contains (encoder->map, node);
228 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 }
230
231
232 /* Return the varpool node corresponding to REF using ENCODER. */
233
234 struct varpool_node *
235 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
236 {
237 if (ref == LCC_NOT_FOUND)
238 return NULL;
239
240 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 }
242
243
244 /* Return TRUE if we should encode initializer of NODE (if any). */
245
246 bool
247 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
248 struct varpool_node *node)
249 {
250 return pointer_set_contains (encoder->initializer, node);
251 }
252
253 /* Return TRUE if we should encode initializer of NODE (if any). */
254
255 static void
256 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
257 struct varpool_node *node)
258 {
259 pointer_set_insert (encoder->initializer, node);
260 }
261
262 /* Output the cgraph EDGE to OB using ENCODER. */
263
264 static void
265 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
266 lto_cgraph_encoder_t encoder)
267 {
268 unsigned int uid;
269 intptr_t ref;
270 struct bitpack_d bp;
271
272 if (edge->indirect_unknown_callee)
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_indirect_edge);
275 else
276 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
277 LTO_cgraph_edge);
278
279 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
280 gcc_assert (ref != LCC_NOT_FOUND);
281 streamer_write_hwi_stream (ob->main_stream, ref);
282
283 if (!edge->indirect_unknown_callee)
284 {
285 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
286 gcc_assert (ref != LCC_NOT_FOUND);
287 streamer_write_hwi_stream (ob->main_stream, ref);
288 }
289
290 streamer_write_hwi_stream (ob->main_stream, edge->count);
291
292 bp = bitpack_create (ob->main_stream);
293 uid = (!gimple_has_body_p (edge->caller->decl)
294 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
295 bp_pack_enum (&bp, cgraph_inline_failed_enum,
296 CIF_N_REASONS, edge->inline_failed);
297 bp_pack_var_len_unsigned (&bp, uid);
298 bp_pack_var_len_unsigned (&bp, edge->frequency);
299 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 if (edge->indirect_unknown_callee)
303 {
304 int flags = edge->indirect_info->ecf_flags;
305 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
311 /* Flags that should not appear on indirect calls. */
312 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
313 | ECF_MAY_BE_ALLOCA
314 | ECF_SIBCALL
315 | ECF_LEAF
316 | ECF_NOVOPS)));
317 }
318 streamer_write_bitpack (&bp);
319 }
320
321 /* Return if LIST contain references from other partitions. */
322
323 bool
324 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
325 varpool_node_set vset)
326 {
327 int i;
328 struct ipa_ref *ref;
329 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
330 {
331 if (ref->refering_type == IPA_REF_CGRAPH)
332 {
333 if (ipa_ref_refering_node (ref)->in_other_partition
334 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
335 return true;
336 }
337 else
338 {
339 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
340 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
341 vset))
342 return true;
343 }
344 }
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
352 {
353 struct cgraph_edge *e;
354 if (!node->analyzed)
355 return false;
356 if (node->global.inlined_to)
357 return false;
358 for (e = node->callers; e; e = e->next_caller)
359 if (e->caller->in_other_partition
360 || !cgraph_node_in_set_p (e->caller, set))
361 return true;
362 return false;
363 }
364
365 /* Return if LIST contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
369 varpool_node_set vset)
370 {
371 int i;
372 struct ipa_ref *ref;
373 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
374 {
375 if (ref->refering_type == IPA_REF_CGRAPH)
376 {
377 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
378 return true;
379 }
380 else
381 {
382 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
383 vset))
384 return true;
385 }
386 }
387 return false;
388 }
389
390 /* Return true when node is reachable from other partition. */
391
392 bool
393 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
394 {
395 struct cgraph_edge *e;
396 for (e = node->callers; e; e = e->next_caller)
397 if (cgraph_node_in_set_p (e->caller, set))
398 return true;
399 return false;
400 }
401
402 /* Output the cgraph NODE to OB. ENCODER is used to find the
403 reference number of NODE->inlined_to. SET is the set of nodes we
404 are writing to the current file. If NODE is not in SET, then NODE
405 is a boundary of a cgraph_node_set and we pretend NODE just has a
406 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
407 that have had their callgraph node written so far. This is used to
408 determine if NODE is a clone of a previously written node. */
409
410 static void
411 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
412 lto_cgraph_encoder_t encoder, cgraph_node_set set,
413 varpool_node_set vset)
414 {
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of;
421
422 boundary_p = !cgraph_node_in_set_p (node, set);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_cgraph_analyzed_node;
426 else
427 tag = LTO_cgraph_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed)
444 {
445 /* Inline clones can not be part of boundary.
446 gcc_assert (!node->global.inlined_to);
447
448 FIXME: At the moment they can be, when partition contains an inline
449 clone that is clone of inline clone from outside partition. We can
450 reshape the clone tree and make other tree to be the root, but it
451 needs a bit extra work and will be promplty done by cgraph_remove_node
452 after reading back. */
453 in_other_partition = 1;
454 }
455
456 clone_of = node->clone_of;
457 while (clone_of
458 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
459 if (clone_of->prev_sibling_clone)
460 clone_of = clone_of->prev_sibling_clone;
461 else
462 clone_of = clone_of->clone_of;
463
464 if (LTO_cgraph_analyzed_node)
465 gcc_assert (clone_of || !node->clone_of);
466 if (!clone_of)
467 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
468 else
469 streamer_write_hwi_stream (ob->main_stream, ref);
470
471
472 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
473 streamer_write_hwi_stream (ob->main_stream, node->count);
474 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
475
476 if (tag == LTO_cgraph_analyzed_node)
477 {
478 if (node->global.inlined_to)
479 {
480 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
482 }
483 else
484 ref = LCC_NOT_FOUND;
485
486 streamer_write_hwi_stream (ob->main_stream, ref);
487 }
488
489 if (node->same_comdat_group && !boundary_p)
490 {
491 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
492 gcc_assert (ref != LCC_NOT_FOUND);
493 }
494 else
495 ref = LCC_NOT_FOUND;
496 streamer_write_hwi_stream (ob->main_stream, ref);
497
498 bp = bitpack_create (ob->main_stream);
499 bp_pack_value (&bp, node->local.local, 1);
500 bp_pack_value (&bp, node->local.externally_visible, 1);
501 bp_pack_value (&bp, node->local.finalized, 1);
502 bp_pack_value (&bp, node->local.versionable, 1);
503 bp_pack_value (&bp, node->local.can_change_signature, 1);
504 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
505 bp_pack_value (&bp, node->needed, 1);
506 bp_pack_value (&bp, node->address_taken, 1);
507 bp_pack_value (&bp, node->abstract_and_needed, 1);
508 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
509 && !DECL_EXTERNAL (node->decl)
510 && !DECL_COMDAT (node->decl)
511 && (reachable_from_other_partition_p (node, set)
512 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
513 bp_pack_value (&bp, node->lowered, 1);
514 bp_pack_value (&bp, in_other_partition, 1);
515 bp_pack_value (&bp, node->alias && !boundary_p, 1);
516 bp_pack_value (&bp, node->frequency, 2);
517 bp_pack_value (&bp, node->only_called_at_startup, 1);
518 bp_pack_value (&bp, node->only_called_at_exit, 1);
519 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
520 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
521 LDPR_NUM_KNOWN, node->resolution);
522 streamer_write_bitpack (&bp);
523
524 if (node->thunk.thunk_p && !boundary_p)
525 {
526 streamer_write_uhwi_stream
527 (ob->main_stream,
528 1 + (node->thunk.this_adjusting != 0) * 2
529 + (node->thunk.virtual_offset_p != 0) * 4);
530 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
531 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
532 }
533 if ((node->alias || node->thunk.thunk_p) && !boundary_p)
534 {
535 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
536 node->thunk.alias != NULL);
537 if (node->thunk.alias != NULL)
538 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
539 node->thunk.alias);
540 }
541 }
542
543 /* Output the varpool NODE to OB.
544 If NODE is not in SET, then NODE is a boundary. */
545
546 static void
547 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
548 lto_varpool_encoder_t varpool_encoder,
549 cgraph_node_set set, varpool_node_set vset)
550 {
551 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
552 struct bitpack_d bp;
553 int ref;
554
555 streamer_write_hwi_stream (ob->main_stream, node->order);
556 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
557 bp = bitpack_create (ob->main_stream);
558 bp_pack_value (&bp, node->externally_visible, 1);
559 bp_pack_value (&bp, node->force_output, 1);
560 bp_pack_value (&bp, node->finalized, 1);
561 bp_pack_value (&bp, node->alias, 1);
562 bp_pack_value (&bp, node->alias_of != NULL, 1);
563 gcc_assert (node->finalized || !node->analyzed);
564 gcc_assert (node->needed);
565 /* Constant pool initializers can be de-unified into individual ltrans units.
566 FIXME: Alternatively at -Os we may want to avoid generating for them the local
567 labels and share them across LTRANS partitions. */
568 if (DECL_IN_CONSTANT_POOL (node->decl)
569 && !DECL_COMDAT (node->decl))
570 {
571 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
572 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
573 }
574 else
575 {
576 bp_pack_value (&bp, node->analyzed
577 && referenced_from_other_partition_p (&node->ref_list,
578 set, vset), 1);
579 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
580 }
581 streamer_write_bitpack (&bp);
582 if (node->alias_of)
583 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
584 if (node->same_comdat_group && !boundary_p)
585 {
586 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
587 gcc_assert (ref != LCC_NOT_FOUND);
588 }
589 else
590 ref = LCC_NOT_FOUND;
591 streamer_write_hwi_stream (ob->main_stream, ref);
592 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
593 LDPR_NUM_KNOWN, node->resolution);
594 }
595
596 /* Output the varpool NODE to OB.
597 If NODE is not in SET, then NODE is a boundary. */
598
599 static void
600 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
601 lto_cgraph_encoder_t encoder,
602 lto_varpool_encoder_t varpool_encoder)
603 {
604 struct bitpack_d bp;
605 bp = bitpack_create (ob->main_stream);
606 bp_pack_value (&bp, ref->refered_type, 1);
607 bp_pack_value (&bp, ref->use, 2);
608 streamer_write_bitpack (&bp);
609 if (ref->refered_type == IPA_REF_CGRAPH)
610 {
611 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
612 gcc_assert (nref != LCC_NOT_FOUND);
613 streamer_write_hwi_stream (ob->main_stream, nref);
614 }
615 else
616 {
617 int nref = lto_varpool_encoder_lookup (varpool_encoder,
618 ipa_ref_varpool_node (ref));
619 gcc_assert (nref != LCC_NOT_FOUND);
620 streamer_write_hwi_stream (ob->main_stream, nref);
621 }
622 }
623
624 /* Stream out profile_summary to OB. */
625
626 static void
627 output_profile_summary (struct lto_simple_output_block *ob)
628 {
629 if (profile_info)
630 {
631 /* We do not output num, sum_all and run_max, they are not used by
632 GCC profile feedback and they are difficult to merge from multiple
633 units. */
634 gcc_assert (profile_info->runs);
635 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
636 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
637 }
638 else
639 streamer_write_uhwi_stream (ob->main_stream, 0);
640 }
641
642 /* Add NODE into encoder as well as nodes it is cloned from.
643 Do it in a way so clones appear first. */
644
645 static void
646 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
647 bool include_body)
648 {
649 if (node->clone_of)
650 add_node_to (encoder, node->clone_of, include_body);
651 else if (include_body)
652 lto_set_cgraph_encoder_encode_body (encoder, node);
653 lto_cgraph_encoder_encode (encoder, node);
654 }
655
656 /* Add all references in LIST to encoders. */
657
658 static void
659 add_references (lto_cgraph_encoder_t encoder,
660 lto_varpool_encoder_t varpool_encoder,
661 struct ipa_ref_list *list)
662 {
663 int i;
664 struct ipa_ref *ref;
665 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
666 if (ref->refered_type == IPA_REF_CGRAPH)
667 add_node_to (encoder, ipa_ref_node (ref), false);
668 else
669 {
670 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
671 lto_varpool_encoder_encode (varpool_encoder, vnode);
672 }
673 }
674
675 /* Output all callees or indirect outgoing edges. EDGE must be the first such
676 edge. */
677
678 static void
679 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
680 struct lto_simple_output_block *ob,
681 lto_cgraph_encoder_t encoder)
682 {
683 if (!edge)
684 return;
685
686 /* Output edges in backward direction, so the reconstructed callgraph match
687 and it is easy to associate call sites in the IPA pass summaries. */
688 while (edge->next_callee)
689 edge = edge->next_callee;
690 for (; edge; edge = edge->prev_callee)
691 lto_output_edge (ob, edge, encoder);
692 }
693
694 /* Output the part of the cgraph in SET. */
695
696 static void
697 output_refs (cgraph_node_set set, varpool_node_set vset,
698 lto_cgraph_encoder_t encoder,
699 lto_varpool_encoder_t varpool_encoder)
700 {
701 cgraph_node_set_iterator csi;
702 varpool_node_set_iterator vsi;
703 struct lto_simple_output_block *ob;
704 int count;
705 struct ipa_ref *ref;
706 int i;
707
708 ob = lto_create_simple_output_block (LTO_section_refs);
709
710 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
711 {
712 struct cgraph_node *node = csi_node (csi);
713
714 count = ipa_ref_list_nreferences (&node->ref_list);
715 if (count)
716 {
717 streamer_write_uhwi_stream (ob->main_stream, count);
718 streamer_write_uhwi_stream (ob->main_stream,
719 lto_cgraph_encoder_lookup (encoder, node));
720 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
721 lto_output_ref (ob, ref, encoder, varpool_encoder);
722 }
723 }
724
725 streamer_write_uhwi_stream (ob->main_stream, 0);
726
727 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
728 {
729 struct varpool_node *node = vsi_node (vsi);
730
731 count = ipa_ref_list_nreferences (&node->ref_list);
732 if (count)
733 {
734 streamer_write_uhwi_stream (ob->main_stream, count);
735 streamer_write_uhwi_stream (ob->main_stream,
736 lto_varpool_encoder_lookup (varpool_encoder,
737 node));
738 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
739 lto_output_ref (ob, ref, encoder, varpool_encoder);
740 }
741 }
742
743 streamer_write_uhwi_stream (ob->main_stream, 0);
744
745 lto_destroy_simple_output_block (ob);
746 }
747
748 /* Find out all cgraph and varpool nodes we want to encode in current unit
749 and insert them to encoders. */
750 void
751 compute_ltrans_boundary (struct lto_out_decl_state *state,
752 cgraph_node_set set, varpool_node_set vset)
753 {
754 struct cgraph_node *node;
755 cgraph_node_set_iterator csi;
756 varpool_node_set_iterator vsi;
757 struct cgraph_edge *edge;
758 int i;
759 lto_cgraph_encoder_t encoder;
760 lto_varpool_encoder_t varpool_encoder;
761
762 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
763 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
764
765 /* Go over all the nodes in SET and assign references. */
766 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
767 {
768 node = csi_node (csi);
769 add_node_to (encoder, node, true);
770 add_references (encoder, varpool_encoder, &node->ref_list);
771 }
772 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
773 {
774 struct varpool_node *vnode = vsi_node (vsi);
775 gcc_assert (!vnode->alias || vnode->alias_of);
776 lto_varpool_encoder_encode (varpool_encoder, vnode);
777 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
778 add_references (encoder, varpool_encoder, &vnode->ref_list);
779 }
780 /* Pickle in also the initializer of all referenced readonly variables
781 to help folding. Constant pool variables are not shared, so we must
782 pickle those too. */
783 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
784 {
785 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
786 if (DECL_INITIAL (vnode->decl)
787 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
788 vnode)
789 && const_value_known_p (vnode->decl))
790 {
791 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
792 add_references (encoder, varpool_encoder, &vnode->ref_list);
793 }
794 }
795
796 /* Go over all the nodes again to include callees that are not in
797 SET. */
798 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
799 {
800 node = csi_node (csi);
801 for (edge = node->callees; edge; edge = edge->next_callee)
802 {
803 struct cgraph_node *callee = edge->callee;
804 if (!cgraph_node_in_set_p (callee, set))
805 {
806 /* We should have moved all the inlines. */
807 gcc_assert (!callee->global.inlined_to);
808 add_node_to (encoder, callee, false);
809 }
810 }
811 }
812 }
813
814 /* Output the part of the cgraph in SET. */
815
816 void
817 output_cgraph (cgraph_node_set set, varpool_node_set vset)
818 {
819 struct cgraph_node *node;
820 struct lto_simple_output_block *ob;
821 cgraph_node_set_iterator csi;
822 int i, n_nodes;
823 lto_cgraph_encoder_t encoder;
824 lto_varpool_encoder_t varpool_encoder;
825 static bool asm_nodes_output = false;
826
827 if (flag_wpa)
828 output_cgraph_opt_summary (set);
829
830 ob = lto_create_simple_output_block (LTO_section_cgraph);
831
832 output_profile_summary (ob);
833
834 /* An encoder for cgraph nodes should have been created by
835 ipa_write_summaries_1. */
836 gcc_assert (ob->decl_state->cgraph_node_encoder);
837 gcc_assert (ob->decl_state->varpool_node_encoder);
838 encoder = ob->decl_state->cgraph_node_encoder;
839 varpool_encoder = ob->decl_state->varpool_node_encoder;
840
841 /* Write out the nodes. We must first output a node and then its clones,
842 otherwise at a time reading back the node there would be nothing to clone
843 from. */
844 n_nodes = lto_cgraph_encoder_size (encoder);
845 for (i = 0; i < n_nodes; i++)
846 {
847 node = lto_cgraph_encoder_deref (encoder, i);
848 lto_output_node (ob, node, encoder, set, vset);
849 }
850
851 /* Go over the nodes in SET again to write edges. */
852 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
853 {
854 node = csi_node (csi);
855 output_outgoing_cgraph_edges (node->callees, ob, encoder);
856 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
857 }
858
859 streamer_write_uhwi_stream (ob->main_stream, 0);
860
861 lto_destroy_simple_output_block (ob);
862
863 /* Emit toplevel asms.
864 When doing WPA we must output every asm just once. Since we do not partition asm
865 nodes at all, output them to first output. This is kind of hack, but should work
866 well. */
867 if (!asm_nodes_output)
868 {
869 asm_nodes_output = true;
870 lto_output_toplevel_asms ();
871 }
872
873 output_varpool (set, vset);
874 output_refs (set, vset, encoder, varpool_encoder);
875 }
876
877 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
878 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
879 NODE or to replace the values in it, for instance because the first
880 time we saw it, the function body was not available but now it
881 is. BP is a bitpack with all the bitflags for NODE read from the
882 stream. */
883
884 static void
885 input_overwrite_node (struct lto_file_decl_data *file_data,
886 struct cgraph_node *node,
887 enum LTO_cgraph_tags tag,
888 struct bitpack_d *bp)
889 {
890 node->aux = (void *) tag;
891 node->local.lto_file_data = file_data;
892
893 node->local.local = bp_unpack_value (bp, 1);
894 node->local.externally_visible = bp_unpack_value (bp, 1);
895 node->local.finalized = bp_unpack_value (bp, 1);
896 node->local.versionable = bp_unpack_value (bp, 1);
897 node->local.can_change_signature = bp_unpack_value (bp, 1);
898 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
899 node->needed = bp_unpack_value (bp, 1);
900 node->address_taken = bp_unpack_value (bp, 1);
901 node->abstract_and_needed = bp_unpack_value (bp, 1);
902 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
903 node->lowered = bp_unpack_value (bp, 1);
904 node->analyzed = tag == LTO_cgraph_analyzed_node;
905 node->in_other_partition = bp_unpack_value (bp, 1);
906 if (node->in_other_partition
907 /* Avoid updating decl when we are seeing just inline clone.
908 When inlining function that has functions already inlined into it,
909 we produce clones of inline clones.
910
911 WPA partitioning might put each clone into different unit and
912 we might end up streaming inline clone from other partition
913 to support clone we are interested in. */
914 && (!node->clone_of
915 || node->clone_of->decl != node->decl))
916 {
917 DECL_EXTERNAL (node->decl) = 1;
918 TREE_STATIC (node->decl) = 0;
919 }
920 node->alias = bp_unpack_value (bp, 1);
921 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
922 node->only_called_at_startup = bp_unpack_value (bp, 1);
923 node->only_called_at_exit = bp_unpack_value (bp, 1);
924 node->thunk.thunk_p = bp_unpack_value (bp, 1);
925 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
926 LDPR_NUM_KNOWN);
927 }
928
929 /* Output the part of the cgraph in SET. */
930
931 static void
932 output_varpool (cgraph_node_set set, varpool_node_set vset)
933 {
934 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
935 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
936 int len = lto_varpool_encoder_size (varpool_encoder), i;
937
938 streamer_write_uhwi_stream (ob->main_stream, len);
939
940 /* Write out the nodes. We must first output a node and then its clones,
941 otherwise at a time reading back the node there would be nothing to clone
942 from. */
943 for (i = 0; i < len; i++)
944 {
945 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
946 varpool_encoder,
947 set, vset);
948 }
949
950 lto_destroy_simple_output_block (ob);
951 }
952
953 /* Read a node from input_block IB. TAG is the node's tag just read.
954 Return the node read or overwriten. */
955
956 static struct cgraph_node *
957 input_node (struct lto_file_decl_data *file_data,
958 struct lto_input_block *ib,
959 enum LTO_cgraph_tags tag,
960 VEC(cgraph_node_ptr, heap) *nodes)
961 {
962 tree fn_decl;
963 struct cgraph_node *node;
964 struct bitpack_d bp;
965 unsigned decl_index;
966 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
967 int clone_ref;
968 int order;
969
970 order = streamer_read_hwi (ib) + order_base;
971 clone_ref = streamer_read_hwi (ib);
972
973 decl_index = streamer_read_uhwi (ib);
974 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
975
976 if (clone_ref != LCC_NOT_FOUND)
977 {
978 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
979 0, CGRAPH_FREQ_BASE, false, NULL, false);
980 }
981 else
982 node = cgraph_get_create_node (fn_decl);
983
984 node->order = order;
985 if (order >= cgraph_order)
986 cgraph_order = order + 1;
987
988 node->count = streamer_read_hwi (ib);
989 node->count_materialization_scale = streamer_read_hwi (ib);
990
991 if (tag == LTO_cgraph_analyzed_node)
992 ref = streamer_read_hwi (ib);
993
994 ref2 = streamer_read_hwi (ib);
995
996 /* Make sure that we have not read this node before. Nodes that
997 have already been read will have their tag stored in the 'aux'
998 field. Since built-in functions can be referenced in multiple
999 functions, they are expected to be read more than once. */
1000 if (node->aux && !DECL_BUILT_IN (node->decl))
1001 internal_error ("bytecode stream: found multiple instances of cgraph "
1002 "node %d", node->uid);
1003
1004 bp = streamer_read_bitpack (ib);
1005 input_overwrite_node (file_data, node, tag, &bp);
1006
1007 /* Store a reference for now, and fix up later to be a pointer. */
1008 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1009
1010 /* Store a reference for now, and fix up later to be a pointer. */
1011 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1012
1013 if (node->thunk.thunk_p)
1014 {
1015 int type = streamer_read_uhwi (ib);
1016 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1017 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1018
1019 node->thunk.fixed_offset = fixed_offset;
1020 node->thunk.this_adjusting = (type & 2);
1021 node->thunk.virtual_value = virtual_value;
1022 node->thunk.virtual_offset_p = (type & 4);
1023 }
1024 if (node->thunk.thunk_p || node->alias)
1025 {
1026 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1027 {
1028 decl_index = streamer_read_uhwi (ib);
1029 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1030 decl_index);
1031 }
1032 }
1033 return node;
1034 }
1035
1036 /* Read a node from input_block IB. TAG is the node's tag just read.
1037 Return the node read or overwriten. */
1038
1039 static struct varpool_node *
1040 input_varpool_node (struct lto_file_decl_data *file_data,
1041 struct lto_input_block *ib)
1042 {
1043 int decl_index;
1044 tree var_decl;
1045 struct varpool_node *node;
1046 struct bitpack_d bp;
1047 int ref = LCC_NOT_FOUND;
1048 bool non_null_aliasof;
1049 int order;
1050
1051 order = streamer_read_hwi (ib) + order_base;
1052 decl_index = streamer_read_uhwi (ib);
1053 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1054 node = varpool_node (var_decl);
1055 node->order = order;
1056 if (order >= cgraph_order)
1057 cgraph_order = order + 1;
1058 node->lto_file_data = file_data;
1059
1060 bp = streamer_read_bitpack (ib);
1061 node->externally_visible = bp_unpack_value (&bp, 1);
1062 node->force_output = bp_unpack_value (&bp, 1);
1063 node->finalized = bp_unpack_value (&bp, 1);
1064 node->alias = bp_unpack_value (&bp, 1);
1065 non_null_aliasof = bp_unpack_value (&bp, 1);
1066 node->analyzed = node->finalized;
1067 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1068 node->in_other_partition = bp_unpack_value (&bp, 1);
1069 if (node->in_other_partition)
1070 {
1071 DECL_EXTERNAL (node->decl) = 1;
1072 TREE_STATIC (node->decl) = 0;
1073 }
1074 if (node->finalized)
1075 varpool_mark_needed_node (node);
1076 if (non_null_aliasof)
1077 {
1078 decl_index = streamer_read_uhwi (ib);
1079 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1080 }
1081 ref = streamer_read_hwi (ib);
1082 /* Store a reference for now, and fix up later to be a pointer. */
1083 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1084 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1085 LDPR_NUM_KNOWN);
1086
1087 return node;
1088 }
1089
1090 /* Read a node from input_block IB. TAG is the node's tag just read.
1091 Return the node read or overwriten. */
1092
1093 static void
1094 input_ref (struct lto_input_block *ib,
1095 struct cgraph_node *refering_node,
1096 struct varpool_node *refering_varpool_node,
1097 VEC(cgraph_node_ptr, heap) *nodes,
1098 VEC(varpool_node_ptr, heap) *varpool_nodes)
1099 {
1100 struct cgraph_node *node = NULL;
1101 struct varpool_node *varpool_node = NULL;
1102 struct bitpack_d bp;
1103 enum ipa_ref_type type;
1104 enum ipa_ref_use use;
1105
1106 bp = streamer_read_bitpack (ib);
1107 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1108 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1109 if (type == IPA_REF_CGRAPH)
1110 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1111 else
1112 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
1113 streamer_read_hwi (ib));
1114 ipa_record_reference (refering_node, refering_varpool_node,
1115 node, varpool_node, use, NULL);
1116 }
1117
1118 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1119 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1120 edge being read is indirect (in the sense that it has
1121 indirect_unknown_callee set). */
1122
1123 static void
1124 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1125 bool indirect)
1126 {
1127 struct cgraph_node *caller, *callee;
1128 struct cgraph_edge *edge;
1129 unsigned int stmt_id;
1130 gcov_type count;
1131 int freq;
1132 cgraph_inline_failed_t inline_failed;
1133 struct bitpack_d bp;
1134 int ecf_flags = 0;
1135
1136 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1137 if (caller == NULL || caller->decl == NULL_TREE)
1138 internal_error ("bytecode stream: no caller found while reading edge");
1139
1140 if (!indirect)
1141 {
1142 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1143 if (callee == NULL || callee->decl == NULL_TREE)
1144 internal_error ("bytecode stream: no callee found while reading edge");
1145 }
1146 else
1147 callee = NULL;
1148
1149 count = (gcov_type) streamer_read_hwi (ib);
1150
1151 bp = streamer_read_bitpack (ib);
1152 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1153 stmt_id = bp_unpack_var_len_unsigned (&bp);
1154 freq = (int) bp_unpack_var_len_unsigned (&bp);
1155
1156 if (indirect)
1157 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1158 else
1159 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1160
1161 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1162 edge->lto_stmt_uid = stmt_id;
1163 edge->inline_failed = inline_failed;
1164 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1165 edge->can_throw_external = bp_unpack_value (&bp, 1);
1166 if (indirect)
1167 {
1168 if (bp_unpack_value (&bp, 1))
1169 ecf_flags |= ECF_CONST;
1170 if (bp_unpack_value (&bp, 1))
1171 ecf_flags |= ECF_PURE;
1172 if (bp_unpack_value (&bp, 1))
1173 ecf_flags |= ECF_NORETURN;
1174 if (bp_unpack_value (&bp, 1))
1175 ecf_flags |= ECF_MALLOC;
1176 if (bp_unpack_value (&bp, 1))
1177 ecf_flags |= ECF_NOTHROW;
1178 if (bp_unpack_value (&bp, 1))
1179 ecf_flags |= ECF_RETURNS_TWICE;
1180 edge->indirect_info->ecf_flags = ecf_flags;
1181 }
1182 }
1183
1184
1185 /* Read a cgraph from IB using the info in FILE_DATA. */
1186
1187 static VEC(cgraph_node_ptr, heap) *
1188 input_cgraph_1 (struct lto_file_decl_data *file_data,
1189 struct lto_input_block *ib)
1190 {
1191 enum LTO_cgraph_tags tag;
1192 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1193 struct cgraph_node *node;
1194 unsigned i;
1195
1196 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1197 order_base = cgraph_order;
1198 while (tag)
1199 {
1200 if (tag == LTO_cgraph_edge)
1201 input_edge (ib, nodes, false);
1202 else if (tag == LTO_cgraph_indirect_edge)
1203 input_edge (ib, nodes, true);
1204 else
1205 {
1206 node = input_node (file_data, ib, tag,nodes);
1207 if (node == NULL || node->decl == NULL_TREE)
1208 internal_error ("bytecode stream: found empty cgraph node");
1209 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1210 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1211 }
1212
1213 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1214 }
1215
1216 lto_input_toplevel_asms (file_data, order_base);
1217
1218 /* AUX pointers should be all non-zero for nodes read from the stream. */
1219 #ifdef ENABLE_CHECKING
1220 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1221 gcc_assert (node->aux);
1222 #endif
1223 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1224 {
1225 int ref = (int) (intptr_t) node->global.inlined_to;
1226
1227 /* We share declaration of builtins, so we may read same node twice. */
1228 if (!node->aux)
1229 continue;
1230 node->aux = NULL;
1231
1232 /* Fixup inlined_to from reference to pointer. */
1233 if (ref != LCC_NOT_FOUND)
1234 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1235 else
1236 node->global.inlined_to = NULL;
1237
1238 ref = (int) (intptr_t) node->same_comdat_group;
1239
1240 /* Fixup same_comdat_group from reference to pointer. */
1241 if (ref != LCC_NOT_FOUND)
1242 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1243 else
1244 node->same_comdat_group = NULL;
1245 }
1246 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1247 node->aux = (void *)1;
1248 return nodes;
1249 }
1250
1251 /* Read a varpool from IB using the info in FILE_DATA. */
1252
1253 static VEC(varpool_node_ptr, heap) *
1254 input_varpool_1 (struct lto_file_decl_data *file_data,
1255 struct lto_input_block *ib)
1256 {
1257 unsigned HOST_WIDE_INT len;
1258 VEC(varpool_node_ptr, heap) *varpool = NULL;
1259 int i;
1260 struct varpool_node *node;
1261
1262 len = streamer_read_uhwi (ib);
1263 while (len)
1264 {
1265 VEC_safe_push (varpool_node_ptr, heap, varpool,
1266 input_varpool_node (file_data, ib));
1267 len--;
1268 }
1269 #ifdef ENABLE_CHECKING
1270 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1271 gcc_assert (!node->aux);
1272 #endif
1273 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1274 {
1275 int ref = (int) (intptr_t) node->same_comdat_group;
1276 /* We share declaration of builtins, so we may read same node twice. */
1277 if (node->aux)
1278 continue;
1279 node->aux = (void *)1;
1280
1281 /* Fixup same_comdat_group from reference to pointer. */
1282 if (ref != LCC_NOT_FOUND)
1283 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1284 else
1285 node->same_comdat_group = NULL;
1286 }
1287 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1288 node->aux = NULL;
1289 return varpool;
1290 }
1291
1292 /* Input ipa_refs. */
1293
1294 static void
1295 input_refs (struct lto_input_block *ib,
1296 VEC(cgraph_node_ptr, heap) *nodes,
1297 VEC(varpool_node_ptr, heap) *varpool)
1298 {
1299 int count;
1300 int idx;
1301 while (true)
1302 {
1303 struct cgraph_node *node;
1304 count = streamer_read_uhwi (ib);
1305 if (!count)
1306 break;
1307 idx = streamer_read_uhwi (ib);
1308 node = VEC_index (cgraph_node_ptr, nodes, idx);
1309 while (count)
1310 {
1311 input_ref (ib, node, NULL, nodes, varpool);
1312 count--;
1313 }
1314 }
1315 while (true)
1316 {
1317 struct varpool_node *node;
1318 count = streamer_read_uhwi (ib);
1319 if (!count)
1320 break;
1321 node = VEC_index (varpool_node_ptr, varpool,
1322 streamer_read_uhwi (ib));
1323 while (count)
1324 {
1325 input_ref (ib, NULL, node, nodes, varpool);
1326 count--;
1327 }
1328 }
1329 }
1330
1331
1332 static struct gcov_ctr_summary lto_gcov_summary;
1333
1334 /* Input profile_info from IB. */
1335 static void
1336 input_profile_summary (struct lto_input_block *ib,
1337 struct lto_file_decl_data *file_data)
1338 {
1339 unsigned int runs = streamer_read_uhwi (ib);
1340 if (runs)
1341 {
1342 file_data->profile_info.runs = runs;
1343 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1344 }
1345
1346 }
1347
1348 /* Rescale profile summaries to the same number of runs in the whole unit. */
1349
1350 static void
1351 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1352 {
1353 struct lto_file_decl_data *file_data;
1354 unsigned int j;
1355 gcov_unsigned_t max_runs = 0;
1356 struct cgraph_node *node;
1357 struct cgraph_edge *edge;
1358
1359 /* Find unit with maximal number of runs. If we ever get serious about
1360 roundoff errors, we might also consider computing smallest common
1361 multiply. */
1362 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1363 if (max_runs < file_data->profile_info.runs)
1364 max_runs = file_data->profile_info.runs;
1365
1366 if (!max_runs)
1367 return;
1368
1369 /* Simple overflow check. We probably don't need to support that many train
1370 runs. Such a large value probably imply data corruption anyway. */
1371 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1372 {
1373 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1374 INT_MAX / REG_BR_PROB_BASE);
1375 return;
1376 }
1377
1378 profile_info = &lto_gcov_summary;
1379 lto_gcov_summary.runs = max_runs;
1380 lto_gcov_summary.sum_max = 0;
1381
1382 /* Rescale all units to the maximal number of runs.
1383 sum_max can not be easily merged, as we have no idea what files come from
1384 the same run. We do not use the info anyway, so leave it 0. */
1385 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1386 if (file_data->profile_info.runs)
1387 {
1388 int scale = ((REG_BR_PROB_BASE * max_runs
1389 + file_data->profile_info.runs / 2)
1390 / file_data->profile_info.runs);
1391 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1392 (file_data->profile_info.sum_max
1393 * scale
1394 + REG_BR_PROB_BASE / 2)
1395 / REG_BR_PROB_BASE);
1396 }
1397
1398 /* Watch roundoff errors. */
1399 if (lto_gcov_summary.sum_max < max_runs)
1400 lto_gcov_summary.sum_max = max_runs;
1401
1402 /* If merging already happent at WPA time, we are done. */
1403 if (flag_ltrans)
1404 return;
1405
1406 /* Now compute count_materialization_scale of each node.
1407 During LTRANS we already have values of count_materialization_scale
1408 computed, so just update them. */
1409 for (node = cgraph_nodes; node; node = node->next)
1410 if (node->local.lto_file_data
1411 && node->local.lto_file_data->profile_info.runs)
1412 {
1413 int scale;
1414
1415 scale =
1416 ((node->count_materialization_scale * max_runs
1417 + node->local.lto_file_data->profile_info.runs / 2)
1418 / node->local.lto_file_data->profile_info.runs);
1419 node->count_materialization_scale = scale;
1420 if (scale < 0)
1421 fatal_error ("Profile information in %s corrupted",
1422 file_data->file_name);
1423
1424 if (scale == REG_BR_PROB_BASE)
1425 continue;
1426 for (edge = node->callees; edge; edge = edge->next_callee)
1427 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1428 / REG_BR_PROB_BASE);
1429 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1430 / REG_BR_PROB_BASE);
1431 }
1432 }
1433
1434 /* Input and merge the cgraph from each of the .o files passed to
1435 lto1. */
1436
1437 void
1438 input_cgraph (void)
1439 {
1440 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1441 struct lto_file_decl_data *file_data;
1442 unsigned int j = 0;
1443 struct cgraph_node *node;
1444
1445 while ((file_data = file_data_vec[j++]))
1446 {
1447 const char *data;
1448 size_t len;
1449 struct lto_input_block *ib;
1450 VEC(cgraph_node_ptr, heap) *nodes;
1451 VEC(varpool_node_ptr, heap) *varpool;
1452
1453 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1454 &data, &len);
1455 if (!ib)
1456 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1457 input_profile_summary (ib, file_data);
1458 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1459 nodes = input_cgraph_1 (file_data, ib);
1460 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1461 ib, data, len);
1462
1463 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1464 &data, &len);
1465 if (!ib)
1466 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1467 varpool = input_varpool_1 (file_data, ib);
1468 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1469 ib, data, len);
1470
1471 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1472 &data, &len);
1473 if (!ib)
1474 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1475 input_refs (ib, nodes, varpool);
1476 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1477 ib, data, len);
1478 if (flag_ltrans)
1479 input_cgraph_opt_summary (nodes);
1480 VEC_free (cgraph_node_ptr, heap, nodes);
1481 VEC_free (varpool_node_ptr, heap, varpool);
1482 }
1483
1484 merge_profile_summaries (file_data_vec);
1485
1486 /* Clear out the aux field that was used to store enough state to
1487 tell which nodes should be overwritten. */
1488 for (node = cgraph_nodes; node; node = node->next)
1489 {
1490 /* Some nodes may have been created by cgraph_node. This
1491 happens when the callgraph contains nested functions. If the
1492 node for the parent function was never emitted to the gimple
1493 file, cgraph_node will create a node for it when setting the
1494 context of the nested function. */
1495 if (node->local.lto_file_data)
1496 node->aux = NULL;
1497 }
1498 }
1499
1500 /* True when we need optimization summary for NODE. */
1501
1502 static int
1503 output_cgraph_opt_summary_p (struct cgraph_node *node,
1504 cgraph_node_set set ATTRIBUTE_UNUSED)
1505 {
1506 return (node->clone_of
1507 && (node->clone.tree_map
1508 || node->clone.args_to_skip
1509 || node->clone.combined_args_to_skip));
1510 }
1511
1512 /* Output optimization summary for EDGE to OB. */
1513 static void
1514 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1515 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1516 {
1517 }
1518
1519 /* Output optimization summary for NODE to OB. */
1520
1521 static void
1522 output_node_opt_summary (struct output_block *ob,
1523 struct cgraph_node *node,
1524 cgraph_node_set set)
1525 {
1526 unsigned int index;
1527 bitmap_iterator bi;
1528 struct ipa_replace_map *map;
1529 struct bitpack_d bp;
1530 int i;
1531 struct cgraph_edge *e;
1532
1533 if (node->clone.args_to_skip)
1534 {
1535 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1536 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1537 streamer_write_uhwi (ob, index);
1538 }
1539 else
1540 streamer_write_uhwi (ob, 0);
1541 if (node->clone.combined_args_to_skip)
1542 {
1543 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1544 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1545 streamer_write_uhwi (ob, index);
1546 }
1547 else
1548 streamer_write_uhwi (ob, 0);
1549 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1550 node->clone.tree_map));
1551 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1552 {
1553 int parm_num;
1554 tree parm;
1555
1556 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1557 parm = DECL_CHAIN (parm), parm_num++)
1558 if (map->old_tree == parm)
1559 break;
1560 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1561 mechanism to store function local declarations into summaries. */
1562 gcc_assert (parm);
1563 streamer_write_uhwi (ob, parm_num);
1564 stream_write_tree (ob, map->new_tree, true);
1565 bp = bitpack_create (ob->main_stream);
1566 bp_pack_value (&bp, map->replace_p, 1);
1567 bp_pack_value (&bp, map->ref_p, 1);
1568 streamer_write_bitpack (&bp);
1569 }
1570
1571 if (cgraph_node_in_set_p (node, set))
1572 {
1573 for (e = node->callees; e; e = e->next_callee)
1574 output_edge_opt_summary (ob, e);
1575 for (e = node->indirect_calls; e; e = e->next_callee)
1576 output_edge_opt_summary (ob, e);
1577 }
1578 }
1579
1580 /* Output optimization summaries stored in callgraph.
1581 At the moment it is the clone info structure. */
1582
1583 static void
1584 output_cgraph_opt_summary (cgraph_node_set set)
1585 {
1586 struct cgraph_node *node;
1587 int i, n_nodes;
1588 lto_cgraph_encoder_t encoder;
1589 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1590 unsigned count = 0;
1591
1592 ob->cgraph_node = NULL;
1593 encoder = ob->decl_state->cgraph_node_encoder;
1594 n_nodes = lto_cgraph_encoder_size (encoder);
1595 for (i = 0; i < n_nodes; i++)
1596 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1597 set))
1598 count++;
1599 streamer_write_uhwi (ob, count);
1600 for (i = 0; i < n_nodes; i++)
1601 {
1602 node = lto_cgraph_encoder_deref (encoder, i);
1603 if (output_cgraph_opt_summary_p (node, set))
1604 {
1605 streamer_write_uhwi (ob, i);
1606 output_node_opt_summary (ob, node, set);
1607 }
1608 }
1609 produce_asm (ob, NULL);
1610 destroy_output_block (ob);
1611 }
1612
1613 /* Input optimisation summary of EDGE. */
1614
1615 static void
1616 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1617 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1618 {
1619 }
1620
1621 /* Input optimisation summary of NODE. */
1622
1623 static void
1624 input_node_opt_summary (struct cgraph_node *node,
1625 struct lto_input_block *ib_main,
1626 struct data_in *data_in)
1627 {
1628 int i;
1629 int count;
1630 int bit;
1631 struct bitpack_d bp;
1632 struct cgraph_edge *e;
1633
1634 count = streamer_read_uhwi (ib_main);
1635 if (count)
1636 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1637 for (i = 0; i < count; i++)
1638 {
1639 bit = streamer_read_uhwi (ib_main);
1640 bitmap_set_bit (node->clone.args_to_skip, bit);
1641 }
1642 count = streamer_read_uhwi (ib_main);
1643 if (count)
1644 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1645 for (i = 0; i < count; i++)
1646 {
1647 bit = streamer_read_uhwi (ib_main);
1648 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1649 }
1650 count = streamer_read_uhwi (ib_main);
1651 for (i = 0; i < count; i++)
1652 {
1653 int parm_num;
1654 tree parm;
1655 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1656
1657 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1658 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1659 parm = DECL_CHAIN (parm))
1660 parm_num --;
1661 map->parm_num = streamer_read_uhwi (ib_main);
1662 map->old_tree = NULL;
1663 map->new_tree = stream_read_tree (ib_main, data_in);
1664 bp = streamer_read_bitpack (ib_main);
1665 map->replace_p = bp_unpack_value (&bp, 1);
1666 map->ref_p = bp_unpack_value (&bp, 1);
1667 }
1668 for (e = node->callees; e; e = e->next_callee)
1669 input_edge_opt_summary (e, ib_main);
1670 for (e = node->indirect_calls; e; e = e->next_callee)
1671 input_edge_opt_summary (e, ib_main);
1672 }
1673
1674 /* Read section in file FILE_DATA of length LEN with data DATA. */
1675
1676 static void
1677 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1678 const char *data, size_t len, VEC (cgraph_node_ptr,
1679 heap) * nodes)
1680 {
1681 const struct lto_function_header *header =
1682 (const struct lto_function_header *) data;
1683 const int32_t cfg_offset = sizeof (struct lto_function_header);
1684 const int32_t main_offset = cfg_offset + header->cfg_size;
1685 const int32_t string_offset = main_offset + header->main_size;
1686 struct data_in *data_in;
1687 struct lto_input_block ib_main;
1688 unsigned int i;
1689 unsigned int count;
1690
1691 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1692 header->main_size);
1693
1694 data_in =
1695 lto_data_in_create (file_data, (const char *) data + string_offset,
1696 header->string_size, NULL);
1697 count = streamer_read_uhwi (&ib_main);
1698
1699 for (i = 0; i < count; i++)
1700 {
1701 int ref = streamer_read_uhwi (&ib_main);
1702 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1703 &ib_main, data_in);
1704 }
1705 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1706 len);
1707 lto_data_in_delete (data_in);
1708 }
1709
1710 /* Input optimization summary of cgraph. */
1711
1712 static void
1713 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1714 {
1715 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1716 struct lto_file_decl_data *file_data;
1717 unsigned int j = 0;
1718
1719 while ((file_data = file_data_vec[j++]))
1720 {
1721 size_t len;
1722 const char *data =
1723 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1724 &len);
1725
1726 if (data)
1727 input_cgraph_opt_section (file_data, data, len, nodes);
1728 }
1729 }