Split out LTO's writing of top level asm nodes in preparation of extending what...
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
48 #include "gcov-io.h"
49
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_RESOLVED_DYN + 1)
56
57 /* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
59 enum LTO_cgraph_tags
60 {
61 /* Must leave 0 for the stopper. */
62
63 /* Cgraph node without body available. */
64 LTO_cgraph_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_cgraph_analyzed_node,
67 /* Cgraph edges. */
68 LTO_cgraph_edge,
69 LTO_cgraph_indirect_edge,
70 LTO_cgraph_last_tag
71 };
72
73 /* Create a new cgraph encoder. */
74
75 lto_cgraph_encoder_t
76 lto_cgraph_encoder_new (void)
77 {
78 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
79 encoder->map = pointer_map_create ();
80 encoder->nodes = NULL;
81 encoder->body = pointer_set_create ();
82 return encoder;
83 }
84
85
86 /* Delete ENCODER and its components. */
87
88 void
89 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
90 {
91 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
92 pointer_map_destroy (encoder->map);
93 pointer_set_destroy (encoder->body);
94 free (encoder);
95 }
96
97
98 /* Return the existing reference number of NODE in the cgraph encoder in
99 output block OB. Assign a new reference if this is the first time
100 NODE is encoded. */
101
102 int
103 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
104 struct cgraph_node *node)
105 {
106 int ref;
107 void **slot;
108
109 slot = pointer_map_contains (encoder->map, node);
110 if (!slot)
111 {
112 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
113 slot = pointer_map_insert (encoder->map, node);
114 *slot = (void *) (intptr_t) ref;
115 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
116 }
117 else
118 ref = (int) (intptr_t) *slot;
119
120 return ref;
121 }
122
123 #define LCC_NOT_FOUND (-1)
124
125 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
126 or LCC_NOT_FOUND if it is not there. */
127
128 int
129 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
130 struct cgraph_node *node)
131 {
132 void **slot = pointer_map_contains (encoder->map, node);
133 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
134 }
135
136
137 /* Return the cgraph node corresponding to REF using ENCODER. */
138
139 struct cgraph_node *
140 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
141 {
142 if (ref == LCC_NOT_FOUND)
143 return NULL;
144
145 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
146 }
147
148
149 /* Return TRUE if we should encode initializer of NODE (if any). */
150
151 bool
152 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
153 struct cgraph_node *node)
154 {
155 return pointer_set_contains (encoder->body, node);
156 }
157
158 /* Return TRUE if we should encode body of NODE (if any). */
159
160 static void
161 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
162 struct cgraph_node *node)
163 {
164 pointer_set_insert (encoder->body, node);
165 }
166
167 /* Create a new varpool encoder. */
168
169 lto_varpool_encoder_t
170 lto_varpool_encoder_new (void)
171 {
172 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
173 encoder->map = pointer_map_create ();
174 encoder->initializer = pointer_set_create ();
175 encoder->nodes = NULL;
176 return encoder;
177 }
178
179
180 /* Delete ENCODER and its components. */
181
182 void
183 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
184 {
185 VEC_free (varpool_node_ptr, heap, encoder->nodes);
186 pointer_map_destroy (encoder->map);
187 pointer_set_destroy (encoder->initializer);
188 free (encoder);
189 }
190
191
192 /* Return the existing reference number of NODE in the varpool encoder in
193 output block OB. Assign a new reference if this is the first time
194 NODE is encoded. */
195
196 int
197 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
198 struct varpool_node *node)
199 {
200 int ref;
201 void **slot;
202
203 slot = pointer_map_contains (encoder->map, node);
204 if (!slot)
205 {
206 ref = VEC_length (varpool_node_ptr, encoder->nodes);
207 slot = pointer_map_insert (encoder->map, node);
208 *slot = (void *) (intptr_t) ref;
209 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
210 }
211 else
212 ref = (int) (intptr_t) *slot;
213
214 return ref;
215 }
216
217 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
218 or LCC_NOT_FOUND if it is not there. */
219
220 int
221 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
222 struct varpool_node *node)
223 {
224 void **slot = pointer_map_contains (encoder->map, node);
225 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
226 }
227
228
229 /* Return the varpool node corresponding to REF using ENCODER. */
230
231 struct varpool_node *
232 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
233 {
234 if (ref == LCC_NOT_FOUND)
235 return NULL;
236
237 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
238 }
239
240
241 /* Return TRUE if we should encode initializer of NODE (if any). */
242
243 bool
244 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
245 struct varpool_node *node)
246 {
247 return pointer_set_contains (encoder->initializer, node);
248 }
249
250 /* Return TRUE if we should encode initializer of NODE (if any). */
251
252 static void
253 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
254 struct varpool_node *node)
255 {
256 pointer_set_insert (encoder->initializer, node);
257 }
258
259 /* Output the cgraph EDGE to OB using ENCODER. */
260
261 static void
262 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
263 lto_cgraph_encoder_t encoder)
264 {
265 unsigned int uid;
266 intptr_t ref;
267 struct bitpack_d bp;
268
269 if (edge->indirect_unknown_callee)
270 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
271 LTO_cgraph_indirect_edge);
272 else
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
274 LTO_cgraph_edge);
275
276 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 streamer_write_hwi_stream (ob->main_stream, ref);
279
280 if (!edge->indirect_unknown_callee)
281 {
282 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
283 gcc_assert (ref != LCC_NOT_FOUND);
284 streamer_write_hwi_stream (ob->main_stream, ref);
285 }
286
287 streamer_write_hwi_stream (ob->main_stream, edge->count);
288
289 bp = bitpack_create (ob->main_stream);
290 uid = (!gimple_has_body_p (edge->caller->decl)
291 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
292 bp_pack_enum (&bp, cgraph_inline_failed_enum,
293 CIF_N_REASONS, edge->inline_failed);
294 bp_pack_var_len_unsigned (&bp, uid);
295 bp_pack_var_len_unsigned (&bp, edge->frequency);
296 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
297 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
298 bp_pack_value (&bp, edge->can_throw_external, 1);
299 if (edge->indirect_unknown_callee)
300 {
301 int flags = edge->indirect_info->ecf_flags;
302 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
303 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
304 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
305 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
308 /* Flags that should not appear on indirect calls. */
309 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
310 | ECF_MAY_BE_ALLOCA
311 | ECF_SIBCALL
312 | ECF_LEAF
313 | ECF_NOVOPS)));
314 }
315 streamer_write_bitpack (&bp);
316 }
317
318 /* Return if LIST contain references from other partitions. */
319
320 bool
321 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
322 varpool_node_set vset)
323 {
324 int i;
325 struct ipa_ref *ref;
326 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
327 {
328 if (ref->refering_type == IPA_REF_CGRAPH)
329 {
330 if (ipa_ref_refering_node (ref)->in_other_partition
331 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
332 return true;
333 }
334 else
335 {
336 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
337 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
338 vset))
339 return true;
340 }
341 }
342 return false;
343 }
344
345 /* Return true when node is reachable from other partition. */
346
347 bool
348 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
349 {
350 struct cgraph_edge *e;
351 if (!node->analyzed)
352 return false;
353 if (node->global.inlined_to)
354 return false;
355 for (e = node->callers; e; e = e->next_caller)
356 if (e->caller->in_other_partition
357 || !cgraph_node_in_set_p (e->caller, set))
358 return true;
359 return false;
360 }
361
362 /* Return if LIST contain references from other partitions. */
363
364 bool
365 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
366 varpool_node_set vset)
367 {
368 int i;
369 struct ipa_ref *ref;
370 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
371 {
372 if (ref->refering_type == IPA_REF_CGRAPH)
373 {
374 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
375 return true;
376 }
377 else
378 {
379 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
380 vset))
381 return true;
382 }
383 }
384 return false;
385 }
386
387 /* Return true when node is reachable from other partition. */
388
389 bool
390 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
391 {
392 struct cgraph_edge *e;
393 for (e = node->callers; e; e = e->next_caller)
394 if (cgraph_node_in_set_p (e->caller, set))
395 return true;
396 return false;
397 }
398
399 /* Output the cgraph NODE to OB. ENCODER is used to find the
400 reference number of NODE->inlined_to. SET is the set of nodes we
401 are writing to the current file. If NODE is not in SET, then NODE
402 is a boundary of a cgraph_node_set and we pretend NODE just has a
403 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
404 that have had their callgraph node written so far. This is used to
405 determine if NODE is a clone of a previously written node. */
406
407 static void
408 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
409 lto_cgraph_encoder_t encoder, cgraph_node_set set,
410 varpool_node_set vset)
411 {
412 unsigned int tag;
413 struct bitpack_d bp;
414 bool boundary_p;
415 intptr_t ref;
416 bool in_other_partition = false;
417 struct cgraph_node *clone_of;
418
419 boundary_p = !cgraph_node_in_set_p (node, set);
420
421 if (node->analyzed && !boundary_p)
422 tag = LTO_cgraph_analyzed_node;
423 else
424 tag = LTO_cgraph_unavail_node;
425
426 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
427 tag);
428
429 /* In WPA mode, we only output part of the call-graph. Also, we
430 fake cgraph node attributes. There are two cases that we care.
431
432 Boundary nodes: There are nodes that are not part of SET but are
433 called from within SET. We artificially make them look like
434 externally visible nodes with no function body.
435
436 Cherry-picked nodes: These are nodes we pulled from other
437 translation units into SET during IPA-inlining. We make them as
438 local static nodes to prevent clashes with other local statics. */
439 if (boundary_p && node->analyzed)
440 {
441 /* Inline clones can not be part of boundary.
442 gcc_assert (!node->global.inlined_to);
443
444 FIXME: At the moment they can be, when partition contains an inline
445 clone that is clone of inline clone from outside partition. We can
446 reshape the clone tree and make other tree to be the root, but it
447 needs a bit extra work and will be promplty done by cgraph_remove_node
448 after reading back. */
449 in_other_partition = 1;
450 }
451
452 clone_of = node->clone_of;
453 while (clone_of
454 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
455 if (clone_of->prev_sibling_clone)
456 clone_of = clone_of->prev_sibling_clone;
457 else
458 clone_of = clone_of->clone_of;
459
460 if (LTO_cgraph_analyzed_node)
461 gcc_assert (clone_of || !node->clone_of);
462 if (!clone_of)
463 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
464 else
465 streamer_write_hwi_stream (ob->main_stream, ref);
466
467
468 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
469 streamer_write_hwi_stream (ob->main_stream, node->count);
470 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
471
472 if (tag == LTO_cgraph_analyzed_node)
473 {
474 if (node->global.inlined_to)
475 {
476 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
477 gcc_assert (ref != LCC_NOT_FOUND);
478 }
479 else
480 ref = LCC_NOT_FOUND;
481
482 streamer_write_hwi_stream (ob->main_stream, ref);
483 }
484
485 if (node->same_comdat_group && !boundary_p)
486 {
487 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
488 gcc_assert (ref != LCC_NOT_FOUND);
489 }
490 else
491 ref = LCC_NOT_FOUND;
492 streamer_write_hwi_stream (ob->main_stream, ref);
493
494 bp = bitpack_create (ob->main_stream);
495 bp_pack_value (&bp, node->local.local, 1);
496 bp_pack_value (&bp, node->local.externally_visible, 1);
497 bp_pack_value (&bp, node->local.finalized, 1);
498 bp_pack_value (&bp, node->local.versionable, 1);
499 bp_pack_value (&bp, node->local.can_change_signature, 1);
500 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
501 bp_pack_value (&bp, node->needed, 1);
502 bp_pack_value (&bp, node->address_taken, 1);
503 bp_pack_value (&bp, node->abstract_and_needed, 1);
504 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
505 && !DECL_EXTERNAL (node->decl)
506 && !DECL_COMDAT (node->decl)
507 && (reachable_from_other_partition_p (node, set)
508 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
509 bp_pack_value (&bp, node->lowered, 1);
510 bp_pack_value (&bp, in_other_partition, 1);
511 bp_pack_value (&bp, node->alias && !boundary_p, 1);
512 bp_pack_value (&bp, node->frequency, 2);
513 bp_pack_value (&bp, node->only_called_at_startup, 1);
514 bp_pack_value (&bp, node->only_called_at_exit, 1);
515 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
516 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
517 LDPR_NUM_KNOWN, node->resolution);
518 streamer_write_bitpack (&bp);
519
520 if (node->thunk.thunk_p && !boundary_p)
521 {
522 streamer_write_uhwi_stream
523 (ob->main_stream,
524 1 + (node->thunk.this_adjusting != 0) * 2
525 + (node->thunk.virtual_offset_p != 0) * 4);
526 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
527 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
528 }
529 if ((node->alias || node->thunk.thunk_p) && !boundary_p)
530 {
531 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
532 node->thunk.alias != NULL);
533 if (node->thunk.alias != NULL)
534 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
535 node->thunk.alias);
536 }
537 }
538
539 /* Output the varpool NODE to OB.
540 If NODE is not in SET, then NODE is a boundary. */
541
542 static void
543 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
544 lto_varpool_encoder_t varpool_encoder,
545 cgraph_node_set set, varpool_node_set vset)
546 {
547 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
548 struct bitpack_d bp;
549 int ref;
550
551 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
552 bp = bitpack_create (ob->main_stream);
553 bp_pack_value (&bp, node->externally_visible, 1);
554 bp_pack_value (&bp, node->force_output, 1);
555 bp_pack_value (&bp, node->finalized, 1);
556 bp_pack_value (&bp, node->alias, 1);
557 bp_pack_value (&bp, node->alias_of != NULL, 1);
558 gcc_assert (node->finalized || !node->analyzed);
559 gcc_assert (node->needed);
560 /* Constant pool initializers can be de-unified into individual ltrans units.
561 FIXME: Alternatively at -Os we may want to avoid generating for them the local
562 labels and share them across LTRANS partitions. */
563 if (DECL_IN_CONSTANT_POOL (node->decl)
564 && !DECL_COMDAT (node->decl))
565 {
566 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
567 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
568 }
569 else
570 {
571 bp_pack_value (&bp, node->analyzed
572 && referenced_from_other_partition_p (&node->ref_list,
573 set, vset), 1);
574 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
575 }
576 streamer_write_bitpack (&bp);
577 if (node->alias_of)
578 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
579 if (node->same_comdat_group && !boundary_p)
580 {
581 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
582 gcc_assert (ref != LCC_NOT_FOUND);
583 }
584 else
585 ref = LCC_NOT_FOUND;
586 streamer_write_hwi_stream (ob->main_stream, ref);
587 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
588 LDPR_NUM_KNOWN, node->resolution);
589 }
590
591 /* Output the varpool NODE to OB.
592 If NODE is not in SET, then NODE is a boundary. */
593
594 static void
595 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
596 lto_cgraph_encoder_t encoder,
597 lto_varpool_encoder_t varpool_encoder)
598 {
599 struct bitpack_d bp;
600 bp = bitpack_create (ob->main_stream);
601 bp_pack_value (&bp, ref->refered_type, 1);
602 bp_pack_value (&bp, ref->use, 2);
603 streamer_write_bitpack (&bp);
604 if (ref->refered_type == IPA_REF_CGRAPH)
605 {
606 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
607 gcc_assert (nref != LCC_NOT_FOUND);
608 streamer_write_hwi_stream (ob->main_stream, nref);
609 }
610 else
611 {
612 int nref = lto_varpool_encoder_lookup (varpool_encoder,
613 ipa_ref_varpool_node (ref));
614 gcc_assert (nref != LCC_NOT_FOUND);
615 streamer_write_hwi_stream (ob->main_stream, nref);
616 }
617 }
618
619 /* Stream out profile_summary to OB. */
620
621 static void
622 output_profile_summary (struct lto_simple_output_block *ob)
623 {
624 if (profile_info)
625 {
626 /* We do not output num, sum_all and run_max, they are not used by
627 GCC profile feedback and they are difficult to merge from multiple
628 units. */
629 gcc_assert (profile_info->runs);
630 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
631 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
632 }
633 else
634 streamer_write_uhwi_stream (ob->main_stream, 0);
635 }
636
637 /* Add NODE into encoder as well as nodes it is cloned from.
638 Do it in a way so clones appear first. */
639
640 static void
641 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
642 bool include_body)
643 {
644 if (node->clone_of)
645 add_node_to (encoder, node->clone_of, include_body);
646 else if (include_body)
647 lto_set_cgraph_encoder_encode_body (encoder, node);
648 lto_cgraph_encoder_encode (encoder, node);
649 }
650
651 /* Add all references in LIST to encoders. */
652
653 static void
654 add_references (lto_cgraph_encoder_t encoder,
655 lto_varpool_encoder_t varpool_encoder,
656 struct ipa_ref_list *list)
657 {
658 int i;
659 struct ipa_ref *ref;
660 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
661 if (ref->refered_type == IPA_REF_CGRAPH)
662 add_node_to (encoder, ipa_ref_node (ref), false);
663 else
664 {
665 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
666 lto_varpool_encoder_encode (varpool_encoder, vnode);
667 }
668 }
669
670 /* Output all callees or indirect outgoing edges. EDGE must be the first such
671 edge. */
672
673 static void
674 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
675 struct lto_simple_output_block *ob,
676 lto_cgraph_encoder_t encoder)
677 {
678 if (!edge)
679 return;
680
681 /* Output edges in backward direction, so the reconstructed callgraph match
682 and it is easy to associate call sites in the IPA pass summaries. */
683 while (edge->next_callee)
684 edge = edge->next_callee;
685 for (; edge; edge = edge->prev_callee)
686 lto_output_edge (ob, edge, encoder);
687 }
688
689 /* Output the part of the cgraph in SET. */
690
691 static void
692 output_refs (cgraph_node_set set, varpool_node_set vset,
693 lto_cgraph_encoder_t encoder,
694 lto_varpool_encoder_t varpool_encoder)
695 {
696 cgraph_node_set_iterator csi;
697 varpool_node_set_iterator vsi;
698 struct lto_simple_output_block *ob;
699 int count;
700 struct ipa_ref *ref;
701 int i;
702
703 ob = lto_create_simple_output_block (LTO_section_refs);
704
705 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
706 {
707 struct cgraph_node *node = csi_node (csi);
708
709 count = ipa_ref_list_nreferences (&node->ref_list);
710 if (count)
711 {
712 streamer_write_uhwi_stream (ob->main_stream, count);
713 streamer_write_uhwi_stream (ob->main_stream,
714 lto_cgraph_encoder_lookup (encoder, node));
715 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
716 lto_output_ref (ob, ref, encoder, varpool_encoder);
717 }
718 }
719
720 streamer_write_uhwi_stream (ob->main_stream, 0);
721
722 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
723 {
724 struct varpool_node *node = vsi_node (vsi);
725
726 count = ipa_ref_list_nreferences (&node->ref_list);
727 if (count)
728 {
729 streamer_write_uhwi_stream (ob->main_stream, count);
730 streamer_write_uhwi_stream (ob->main_stream,
731 lto_varpool_encoder_lookup (varpool_encoder,
732 node));
733 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
734 lto_output_ref (ob, ref, encoder, varpool_encoder);
735 }
736 }
737
738 streamer_write_uhwi_stream (ob->main_stream, 0);
739
740 lto_destroy_simple_output_block (ob);
741 }
742
743 /* Find out all cgraph and varpool nodes we want to encode in current unit
744 and insert them to encoders. */
745 void
746 compute_ltrans_boundary (struct lto_out_decl_state *state,
747 cgraph_node_set set, varpool_node_set vset)
748 {
749 struct cgraph_node *node;
750 cgraph_node_set_iterator csi;
751 varpool_node_set_iterator vsi;
752 struct cgraph_edge *edge;
753 int i;
754 lto_cgraph_encoder_t encoder;
755 lto_varpool_encoder_t varpool_encoder;
756
757 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
758 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
759
760 /* Go over all the nodes in SET and assign references. */
761 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
762 {
763 node = csi_node (csi);
764 add_node_to (encoder, node, true);
765 add_references (encoder, varpool_encoder, &node->ref_list);
766 }
767 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
768 {
769 struct varpool_node *vnode = vsi_node (vsi);
770 gcc_assert (!vnode->alias || vnode->alias_of);
771 lto_varpool_encoder_encode (varpool_encoder, vnode);
772 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
773 add_references (encoder, varpool_encoder, &vnode->ref_list);
774 }
775 /* Pickle in also the initializer of all referenced readonly variables
776 to help folding. Constant pool variables are not shared, so we must
777 pickle those too. */
778 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
779 {
780 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
781 if (DECL_INITIAL (vnode->decl)
782 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
783 vnode)
784 && const_value_known_p (vnode->decl))
785 {
786 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
787 add_references (encoder, varpool_encoder, &vnode->ref_list);
788 }
789 }
790
791 /* Go over all the nodes again to include callees that are not in
792 SET. */
793 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
794 {
795 node = csi_node (csi);
796 for (edge = node->callees; edge; edge = edge->next_callee)
797 {
798 struct cgraph_node *callee = edge->callee;
799 if (!cgraph_node_in_set_p (callee, set))
800 {
801 /* We should have moved all the inlines. */
802 gcc_assert (!callee->global.inlined_to);
803 add_node_to (encoder, callee, false);
804 }
805 }
806 }
807 }
808
809 /* Output the part of the cgraph in SET. */
810
811 void
812 output_cgraph (cgraph_node_set set, varpool_node_set vset)
813 {
814 struct cgraph_node *node;
815 struct lto_simple_output_block *ob;
816 cgraph_node_set_iterator csi;
817 int i, n_nodes;
818 lto_cgraph_encoder_t encoder;
819 lto_varpool_encoder_t varpool_encoder;
820 static bool asm_nodes_output = false;
821
822 if (flag_wpa)
823 output_cgraph_opt_summary (set);
824
825 ob = lto_create_simple_output_block (LTO_section_cgraph);
826
827 output_profile_summary (ob);
828
829 /* An encoder for cgraph nodes should have been created by
830 ipa_write_summaries_1. */
831 gcc_assert (ob->decl_state->cgraph_node_encoder);
832 gcc_assert (ob->decl_state->varpool_node_encoder);
833 encoder = ob->decl_state->cgraph_node_encoder;
834 varpool_encoder = ob->decl_state->varpool_node_encoder;
835
836 /* Write out the nodes. We must first output a node and then its clones,
837 otherwise at a time reading back the node there would be nothing to clone
838 from. */
839 n_nodes = lto_cgraph_encoder_size (encoder);
840 for (i = 0; i < n_nodes; i++)
841 {
842 node = lto_cgraph_encoder_deref (encoder, i);
843 lto_output_node (ob, node, encoder, set, vset);
844 }
845
846 /* Go over the nodes in SET again to write edges. */
847 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
848 {
849 node = csi_node (csi);
850 output_outgoing_cgraph_edges (node->callees, ob, encoder);
851 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
852 }
853
854 streamer_write_uhwi_stream (ob->main_stream, 0);
855
856 lto_destroy_simple_output_block (ob);
857
858 /* Emit toplevel asms.
859 When doing WPA we must output every asm just once. Since we do not partition asm
860 nodes at all, output them to first output. This is kind of hack, but should work
861 well. */
862 if (!asm_nodes_output)
863 {
864 asm_nodes_output = true;
865 lto_output_toplevel_asms ();
866 }
867
868 output_varpool (set, vset);
869 output_refs (set, vset, encoder, varpool_encoder);
870 }
871
872 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
873 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
874 NODE or to replace the values in it, for instance because the first
875 time we saw it, the function body was not available but now it
876 is. BP is a bitpack with all the bitflags for NODE read from the
877 stream. */
878
879 static void
880 input_overwrite_node (struct lto_file_decl_data *file_data,
881 struct cgraph_node *node,
882 enum LTO_cgraph_tags tag,
883 struct bitpack_d *bp)
884 {
885 node->aux = (void *) tag;
886 node->local.lto_file_data = file_data;
887
888 node->local.local = bp_unpack_value (bp, 1);
889 node->local.externally_visible = bp_unpack_value (bp, 1);
890 node->local.finalized = bp_unpack_value (bp, 1);
891 node->local.versionable = bp_unpack_value (bp, 1);
892 node->local.can_change_signature = bp_unpack_value (bp, 1);
893 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
894 node->needed = bp_unpack_value (bp, 1);
895 node->address_taken = bp_unpack_value (bp, 1);
896 node->abstract_and_needed = bp_unpack_value (bp, 1);
897 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
898 node->lowered = bp_unpack_value (bp, 1);
899 node->analyzed = tag == LTO_cgraph_analyzed_node;
900 node->in_other_partition = bp_unpack_value (bp, 1);
901 if (node->in_other_partition
902 /* Avoid updating decl when we are seeing just inline clone.
903 When inlining function that has functions already inlined into it,
904 we produce clones of inline clones.
905
906 WPA partitioning might put each clone into different unit and
907 we might end up streaming inline clone from other partition
908 to support clone we are interested in. */
909 && (!node->clone_of
910 || node->clone_of->decl != node->decl))
911 {
912 DECL_EXTERNAL (node->decl) = 1;
913 TREE_STATIC (node->decl) = 0;
914 }
915 node->alias = bp_unpack_value (bp, 1);
916 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
917 node->only_called_at_startup = bp_unpack_value (bp, 1);
918 node->only_called_at_exit = bp_unpack_value (bp, 1);
919 node->thunk.thunk_p = bp_unpack_value (bp, 1);
920 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
921 LDPR_NUM_KNOWN);
922 }
923
924 /* Output the part of the cgraph in SET. */
925
926 static void
927 output_varpool (cgraph_node_set set, varpool_node_set vset)
928 {
929 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
930 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
931 int len = lto_varpool_encoder_size (varpool_encoder), i;
932
933 streamer_write_uhwi_stream (ob->main_stream, len);
934
935 /* Write out the nodes. We must first output a node and then its clones,
936 otherwise at a time reading back the node there would be nothing to clone
937 from. */
938 for (i = 0; i < len; i++)
939 {
940 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
941 varpool_encoder,
942 set, vset);
943 }
944
945 lto_destroy_simple_output_block (ob);
946 }
947
948 /* Read a node from input_block IB. TAG is the node's tag just read.
949 Return the node read or overwriten. */
950
951 static struct cgraph_node *
952 input_node (struct lto_file_decl_data *file_data,
953 struct lto_input_block *ib,
954 enum LTO_cgraph_tags tag,
955 VEC(cgraph_node_ptr, heap) *nodes)
956 {
957 tree fn_decl;
958 struct cgraph_node *node;
959 struct bitpack_d bp;
960 unsigned decl_index;
961 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
962 int clone_ref;
963
964 clone_ref = streamer_read_hwi (ib);
965
966 decl_index = streamer_read_uhwi (ib);
967 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
968
969 if (clone_ref != LCC_NOT_FOUND)
970 {
971 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
972 0, CGRAPH_FREQ_BASE, false, NULL, false);
973 }
974 else
975 node = cgraph_get_create_node (fn_decl);
976
977 node->count = streamer_read_hwi (ib);
978 node->count_materialization_scale = streamer_read_hwi (ib);
979
980 if (tag == LTO_cgraph_analyzed_node)
981 ref = streamer_read_hwi (ib);
982
983 ref2 = streamer_read_hwi (ib);
984
985 /* Make sure that we have not read this node before. Nodes that
986 have already been read will have their tag stored in the 'aux'
987 field. Since built-in functions can be referenced in multiple
988 functions, they are expected to be read more than once. */
989 if (node->aux && !DECL_BUILT_IN (node->decl))
990 internal_error ("bytecode stream: found multiple instances of cgraph "
991 "node %d", node->uid);
992
993 bp = streamer_read_bitpack (ib);
994 input_overwrite_node (file_data, node, tag, &bp);
995
996 /* Store a reference for now, and fix up later to be a pointer. */
997 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
998
999 /* Store a reference for now, and fix up later to be a pointer. */
1000 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1001
1002 if (node->thunk.thunk_p)
1003 {
1004 int type = streamer_read_uhwi (ib);
1005 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1006 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1007
1008 node->thunk.fixed_offset = fixed_offset;
1009 node->thunk.this_adjusting = (type & 2);
1010 node->thunk.virtual_value = virtual_value;
1011 node->thunk.virtual_offset_p = (type & 4);
1012 }
1013 if (node->thunk.thunk_p || node->alias)
1014 {
1015 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1016 {
1017 decl_index = streamer_read_uhwi (ib);
1018 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1019 decl_index);
1020 }
1021 }
1022 return node;
1023 }
1024
1025 /* Read a node from input_block IB. TAG is the node's tag just read.
1026 Return the node read or overwriten. */
1027
1028 static struct varpool_node *
1029 input_varpool_node (struct lto_file_decl_data *file_data,
1030 struct lto_input_block *ib)
1031 {
1032 int decl_index;
1033 tree var_decl;
1034 struct varpool_node *node;
1035 struct bitpack_d bp;
1036 int ref = LCC_NOT_FOUND;
1037 bool non_null_aliasof;
1038
1039 decl_index = streamer_read_uhwi (ib);
1040 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1041 node = varpool_node (var_decl);
1042 node->lto_file_data = file_data;
1043
1044 bp = streamer_read_bitpack (ib);
1045 node->externally_visible = bp_unpack_value (&bp, 1);
1046 node->force_output = bp_unpack_value (&bp, 1);
1047 node->finalized = bp_unpack_value (&bp, 1);
1048 node->alias = bp_unpack_value (&bp, 1);
1049 non_null_aliasof = bp_unpack_value (&bp, 1);
1050 node->analyzed = node->finalized;
1051 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1052 node->in_other_partition = bp_unpack_value (&bp, 1);
1053 if (node->in_other_partition)
1054 {
1055 DECL_EXTERNAL (node->decl) = 1;
1056 TREE_STATIC (node->decl) = 0;
1057 }
1058 if (node->finalized)
1059 varpool_mark_needed_node (node);
1060 if (non_null_aliasof)
1061 {
1062 decl_index = streamer_read_uhwi (ib);
1063 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1064 }
1065 ref = streamer_read_hwi (ib);
1066 /* Store a reference for now, and fix up later to be a pointer. */
1067 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1068 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1069 LDPR_NUM_KNOWN);
1070
1071 return node;
1072 }
1073
1074 /* Read a node from input_block IB. TAG is the node's tag just read.
1075 Return the node read or overwriten. */
1076
1077 static void
1078 input_ref (struct lto_input_block *ib,
1079 struct cgraph_node *refering_node,
1080 struct varpool_node *refering_varpool_node,
1081 VEC(cgraph_node_ptr, heap) *nodes,
1082 VEC(varpool_node_ptr, heap) *varpool_nodes)
1083 {
1084 struct cgraph_node *node = NULL;
1085 struct varpool_node *varpool_node = NULL;
1086 struct bitpack_d bp;
1087 enum ipa_ref_type type;
1088 enum ipa_ref_use use;
1089
1090 bp = streamer_read_bitpack (ib);
1091 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1092 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1093 if (type == IPA_REF_CGRAPH)
1094 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1095 else
1096 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
1097 streamer_read_hwi (ib));
1098 ipa_record_reference (refering_node, refering_varpool_node,
1099 node, varpool_node, use, NULL);
1100 }
1101
1102 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1103 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1104 edge being read is indirect (in the sense that it has
1105 indirect_unknown_callee set). */
1106
1107 static void
1108 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1109 bool indirect)
1110 {
1111 struct cgraph_node *caller, *callee;
1112 struct cgraph_edge *edge;
1113 unsigned int stmt_id;
1114 gcov_type count;
1115 int freq;
1116 cgraph_inline_failed_t inline_failed;
1117 struct bitpack_d bp;
1118 int ecf_flags = 0;
1119
1120 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1121 if (caller == NULL || caller->decl == NULL_TREE)
1122 internal_error ("bytecode stream: no caller found while reading edge");
1123
1124 if (!indirect)
1125 {
1126 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1127 if (callee == NULL || callee->decl == NULL_TREE)
1128 internal_error ("bytecode stream: no callee found while reading edge");
1129 }
1130 else
1131 callee = NULL;
1132
1133 count = (gcov_type) streamer_read_hwi (ib);
1134
1135 bp = streamer_read_bitpack (ib);
1136 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1137 stmt_id = bp_unpack_var_len_unsigned (&bp);
1138 freq = (int) bp_unpack_var_len_unsigned (&bp);
1139
1140 if (indirect)
1141 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1142 else
1143 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1144
1145 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1146 edge->lto_stmt_uid = stmt_id;
1147 edge->inline_failed = inline_failed;
1148 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1149 edge->can_throw_external = bp_unpack_value (&bp, 1);
1150 if (indirect)
1151 {
1152 if (bp_unpack_value (&bp, 1))
1153 ecf_flags |= ECF_CONST;
1154 if (bp_unpack_value (&bp, 1))
1155 ecf_flags |= ECF_PURE;
1156 if (bp_unpack_value (&bp, 1))
1157 ecf_flags |= ECF_NORETURN;
1158 if (bp_unpack_value (&bp, 1))
1159 ecf_flags |= ECF_MALLOC;
1160 if (bp_unpack_value (&bp, 1))
1161 ecf_flags |= ECF_NOTHROW;
1162 if (bp_unpack_value (&bp, 1))
1163 ecf_flags |= ECF_RETURNS_TWICE;
1164 edge->indirect_info->ecf_flags = ecf_flags;
1165 }
1166 }
1167
1168
1169 /* Read a cgraph from IB using the info in FILE_DATA. */
1170
1171 static VEC(cgraph_node_ptr, heap) *
1172 input_cgraph_1 (struct lto_file_decl_data *file_data,
1173 struct lto_input_block *ib)
1174 {
1175 enum LTO_cgraph_tags tag;
1176 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1177 struct cgraph_node *node;
1178 unsigned i;
1179
1180 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1181 while (tag)
1182 {
1183 if (tag == LTO_cgraph_edge)
1184 input_edge (ib, nodes, false);
1185 else if (tag == LTO_cgraph_indirect_edge)
1186 input_edge (ib, nodes, true);
1187 else
1188 {
1189 node = input_node (file_data, ib, tag,nodes);
1190 if (node == NULL || node->decl == NULL_TREE)
1191 internal_error ("bytecode stream: found empty cgraph node");
1192 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1193 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1194 }
1195
1196 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1197 }
1198
1199 lto_input_toplevel_asms (file_data);
1200
1201 /* AUX pointers should be all non-zero for nodes read from the stream. */
1202 #ifdef ENABLE_CHECKING
1203 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1204 gcc_assert (node->aux);
1205 #endif
1206 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1207 {
1208 int ref = (int) (intptr_t) node->global.inlined_to;
1209
1210 /* We share declaration of builtins, so we may read same node twice. */
1211 if (!node->aux)
1212 continue;
1213 node->aux = NULL;
1214
1215 /* Fixup inlined_to from reference to pointer. */
1216 if (ref != LCC_NOT_FOUND)
1217 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1218 else
1219 node->global.inlined_to = NULL;
1220
1221 ref = (int) (intptr_t) node->same_comdat_group;
1222
1223 /* Fixup same_comdat_group from reference to pointer. */
1224 if (ref != LCC_NOT_FOUND)
1225 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1226 else
1227 node->same_comdat_group = NULL;
1228 }
1229 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1230 node->aux = (void *)1;
1231 return nodes;
1232 }
1233
1234 /* Read a varpool from IB using the info in FILE_DATA. */
1235
1236 static VEC(varpool_node_ptr, heap) *
1237 input_varpool_1 (struct lto_file_decl_data *file_data,
1238 struct lto_input_block *ib)
1239 {
1240 unsigned HOST_WIDE_INT len;
1241 VEC(varpool_node_ptr, heap) *varpool = NULL;
1242 int i;
1243 struct varpool_node *node;
1244
1245 len = streamer_read_uhwi (ib);
1246 while (len)
1247 {
1248 VEC_safe_push (varpool_node_ptr, heap, varpool,
1249 input_varpool_node (file_data, ib));
1250 len--;
1251 }
1252 #ifdef ENABLE_CHECKING
1253 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1254 gcc_assert (!node->aux);
1255 #endif
1256 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1257 {
1258 int ref = (int) (intptr_t) node->same_comdat_group;
1259 /* We share declaration of builtins, so we may read same node twice. */
1260 if (node->aux)
1261 continue;
1262 node->aux = (void *)1;
1263
1264 /* Fixup same_comdat_group from reference to pointer. */
1265 if (ref != LCC_NOT_FOUND)
1266 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1267 else
1268 node->same_comdat_group = NULL;
1269 }
1270 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1271 node->aux = NULL;
1272 return varpool;
1273 }
1274
1275 /* Input ipa_refs. */
1276
1277 static void
1278 input_refs (struct lto_input_block *ib,
1279 VEC(cgraph_node_ptr, heap) *nodes,
1280 VEC(varpool_node_ptr, heap) *varpool)
1281 {
1282 int count;
1283 int idx;
1284 while (true)
1285 {
1286 struct cgraph_node *node;
1287 count = streamer_read_uhwi (ib);
1288 if (!count)
1289 break;
1290 idx = streamer_read_uhwi (ib);
1291 node = VEC_index (cgraph_node_ptr, nodes, idx);
1292 while (count)
1293 {
1294 input_ref (ib, node, NULL, nodes, varpool);
1295 count--;
1296 }
1297 }
1298 while (true)
1299 {
1300 struct varpool_node *node;
1301 count = streamer_read_uhwi (ib);
1302 if (!count)
1303 break;
1304 node = VEC_index (varpool_node_ptr, varpool,
1305 streamer_read_uhwi (ib));
1306 while (count)
1307 {
1308 input_ref (ib, NULL, node, nodes, varpool);
1309 count--;
1310 }
1311 }
1312 }
1313
1314
1315 static struct gcov_ctr_summary lto_gcov_summary;
1316
1317 /* Input profile_info from IB. */
1318 static void
1319 input_profile_summary (struct lto_input_block *ib,
1320 struct lto_file_decl_data *file_data)
1321 {
1322 unsigned int runs = streamer_read_uhwi (ib);
1323 if (runs)
1324 {
1325 file_data->profile_info.runs = runs;
1326 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1327 }
1328
1329 }
1330
1331 /* Rescale profile summaries to the same number of runs in the whole unit. */
1332
1333 static void
1334 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1335 {
1336 struct lto_file_decl_data *file_data;
1337 unsigned int j;
1338 gcov_unsigned_t max_runs = 0;
1339 struct cgraph_node *node;
1340 struct cgraph_edge *edge;
1341
1342 /* Find unit with maximal number of runs. If we ever get serious about
1343 roundoff errors, we might also consider computing smallest common
1344 multiply. */
1345 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1346 if (max_runs < file_data->profile_info.runs)
1347 max_runs = file_data->profile_info.runs;
1348
1349 if (!max_runs)
1350 return;
1351
1352 /* Simple overflow check. We probably don't need to support that many train
1353 runs. Such a large value probably imply data corruption anyway. */
1354 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1355 {
1356 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1357 INT_MAX / REG_BR_PROB_BASE);
1358 return;
1359 }
1360
1361 profile_info = &lto_gcov_summary;
1362 lto_gcov_summary.runs = max_runs;
1363 lto_gcov_summary.sum_max = 0;
1364
1365 /* Rescale all units to the maximal number of runs.
1366 sum_max can not be easily merged, as we have no idea what files come from
1367 the same run. We do not use the info anyway, so leave it 0. */
1368 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1369 if (file_data->profile_info.runs)
1370 {
1371 int scale = ((REG_BR_PROB_BASE * max_runs
1372 + file_data->profile_info.runs / 2)
1373 / file_data->profile_info.runs);
1374 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1375 (file_data->profile_info.sum_max
1376 * scale
1377 + REG_BR_PROB_BASE / 2)
1378 / REG_BR_PROB_BASE);
1379 }
1380
1381 /* Watch roundoff errors. */
1382 if (lto_gcov_summary.sum_max < max_runs)
1383 lto_gcov_summary.sum_max = max_runs;
1384
1385 /* If merging already happent at WPA time, we are done. */
1386 if (flag_ltrans)
1387 return;
1388
1389 /* Now compute count_materialization_scale of each node.
1390 During LTRANS we already have values of count_materialization_scale
1391 computed, so just update them. */
1392 for (node = cgraph_nodes; node; node = node->next)
1393 if (node->local.lto_file_data
1394 && node->local.lto_file_data->profile_info.runs)
1395 {
1396 int scale;
1397
1398 scale =
1399 ((node->count_materialization_scale * max_runs
1400 + node->local.lto_file_data->profile_info.runs / 2)
1401 / node->local.lto_file_data->profile_info.runs);
1402 node->count_materialization_scale = scale;
1403 if (scale < 0)
1404 fatal_error ("Profile information in %s corrupted",
1405 file_data->file_name);
1406
1407 if (scale == REG_BR_PROB_BASE)
1408 continue;
1409 for (edge = node->callees; edge; edge = edge->next_callee)
1410 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1411 / REG_BR_PROB_BASE);
1412 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1413 / REG_BR_PROB_BASE);
1414 }
1415 }
1416
1417 /* Input and merge the cgraph from each of the .o files passed to
1418 lto1. */
1419
1420 void
1421 input_cgraph (void)
1422 {
1423 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1424 struct lto_file_decl_data *file_data;
1425 unsigned int j = 0;
1426 struct cgraph_node *node;
1427
1428 while ((file_data = file_data_vec[j++]))
1429 {
1430 const char *data;
1431 size_t len;
1432 struct lto_input_block *ib;
1433 VEC(cgraph_node_ptr, heap) *nodes;
1434 VEC(varpool_node_ptr, heap) *varpool;
1435
1436 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1437 &data, &len);
1438 if (!ib)
1439 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1440 input_profile_summary (ib, file_data);
1441 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1442 nodes = input_cgraph_1 (file_data, ib);
1443 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1444 ib, data, len);
1445
1446 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1447 &data, &len);
1448 if (!ib)
1449 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1450 varpool = input_varpool_1 (file_data, ib);
1451 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1452 ib, data, len);
1453
1454 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1455 &data, &len);
1456 if (!ib)
1457 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1458 input_refs (ib, nodes, varpool);
1459 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1460 ib, data, len);
1461 if (flag_ltrans)
1462 input_cgraph_opt_summary (nodes);
1463 VEC_free (cgraph_node_ptr, heap, nodes);
1464 VEC_free (varpool_node_ptr, heap, varpool);
1465 }
1466
1467 merge_profile_summaries (file_data_vec);
1468
1469 /* Clear out the aux field that was used to store enough state to
1470 tell which nodes should be overwritten. */
1471 for (node = cgraph_nodes; node; node = node->next)
1472 {
1473 /* Some nodes may have been created by cgraph_node. This
1474 happens when the callgraph contains nested functions. If the
1475 node for the parent function was never emitted to the gimple
1476 file, cgraph_node will create a node for it when setting the
1477 context of the nested function. */
1478 if (node->local.lto_file_data)
1479 node->aux = NULL;
1480 }
1481 }
1482
1483 /* True when we need optimization summary for NODE. */
1484
1485 static int
1486 output_cgraph_opt_summary_p (struct cgraph_node *node,
1487 cgraph_node_set set ATTRIBUTE_UNUSED)
1488 {
1489 return (node->clone_of
1490 && (node->clone.tree_map
1491 || node->clone.args_to_skip
1492 || node->clone.combined_args_to_skip));
1493 }
1494
1495 /* Output optimization summary for EDGE to OB. */
1496 static void
1497 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1498 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1499 {
1500 }
1501
1502 /* Output optimization summary for NODE to OB. */
1503
1504 static void
1505 output_node_opt_summary (struct output_block *ob,
1506 struct cgraph_node *node,
1507 cgraph_node_set set)
1508 {
1509 unsigned int index;
1510 bitmap_iterator bi;
1511 struct ipa_replace_map *map;
1512 struct bitpack_d bp;
1513 int i;
1514 struct cgraph_edge *e;
1515
1516 if (node->clone.args_to_skip)
1517 {
1518 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1519 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1520 streamer_write_uhwi (ob, index);
1521 }
1522 else
1523 streamer_write_uhwi (ob, 0);
1524 if (node->clone.combined_args_to_skip)
1525 {
1526 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1527 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1528 streamer_write_uhwi (ob, index);
1529 }
1530 else
1531 streamer_write_uhwi (ob, 0);
1532 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1533 node->clone.tree_map));
1534 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1535 {
1536 int parm_num;
1537 tree parm;
1538
1539 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1540 parm = DECL_CHAIN (parm), parm_num++)
1541 if (map->old_tree == parm)
1542 break;
1543 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1544 mechanism to store function local declarations into summaries. */
1545 gcc_assert (parm);
1546 streamer_write_uhwi (ob, parm_num);
1547 stream_write_tree (ob, map->new_tree, true);
1548 bp = bitpack_create (ob->main_stream);
1549 bp_pack_value (&bp, map->replace_p, 1);
1550 bp_pack_value (&bp, map->ref_p, 1);
1551 streamer_write_bitpack (&bp);
1552 }
1553
1554 if (cgraph_node_in_set_p (node, set))
1555 {
1556 for (e = node->callees; e; e = e->next_callee)
1557 output_edge_opt_summary (ob, e);
1558 for (e = node->indirect_calls; e; e = e->next_callee)
1559 output_edge_opt_summary (ob, e);
1560 }
1561 }
1562
1563 /* Output optimization summaries stored in callgraph.
1564 At the moment it is the clone info structure. */
1565
1566 static void
1567 output_cgraph_opt_summary (cgraph_node_set set)
1568 {
1569 struct cgraph_node *node;
1570 int i, n_nodes;
1571 lto_cgraph_encoder_t encoder;
1572 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1573 unsigned count = 0;
1574
1575 ob->cgraph_node = NULL;
1576 encoder = ob->decl_state->cgraph_node_encoder;
1577 n_nodes = lto_cgraph_encoder_size (encoder);
1578 for (i = 0; i < n_nodes; i++)
1579 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1580 set))
1581 count++;
1582 streamer_write_uhwi (ob, count);
1583 for (i = 0; i < n_nodes; i++)
1584 {
1585 node = lto_cgraph_encoder_deref (encoder, i);
1586 if (output_cgraph_opt_summary_p (node, set))
1587 {
1588 streamer_write_uhwi (ob, i);
1589 output_node_opt_summary (ob, node, set);
1590 }
1591 }
1592 produce_asm (ob, NULL);
1593 destroy_output_block (ob);
1594 }
1595
1596 /* Input optimisation summary of EDGE. */
1597
1598 static void
1599 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1600 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1601 {
1602 }
1603
1604 /* Input optimisation summary of NODE. */
1605
1606 static void
1607 input_node_opt_summary (struct cgraph_node *node,
1608 struct lto_input_block *ib_main,
1609 struct data_in *data_in)
1610 {
1611 int i;
1612 int count;
1613 int bit;
1614 struct bitpack_d bp;
1615 struct cgraph_edge *e;
1616
1617 count = streamer_read_uhwi (ib_main);
1618 if (count)
1619 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1620 for (i = 0; i < count; i++)
1621 {
1622 bit = streamer_read_uhwi (ib_main);
1623 bitmap_set_bit (node->clone.args_to_skip, bit);
1624 }
1625 count = streamer_read_uhwi (ib_main);
1626 if (count)
1627 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1628 for (i = 0; i < count; i++)
1629 {
1630 bit = streamer_read_uhwi (ib_main);
1631 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1632 }
1633 count = streamer_read_uhwi (ib_main);
1634 for (i = 0; i < count; i++)
1635 {
1636 int parm_num;
1637 tree parm;
1638 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1639
1640 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1641 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1642 parm = DECL_CHAIN (parm))
1643 parm_num --;
1644 map->parm_num = streamer_read_uhwi (ib_main);
1645 map->old_tree = NULL;
1646 map->new_tree = stream_read_tree (ib_main, data_in);
1647 bp = streamer_read_bitpack (ib_main);
1648 map->replace_p = bp_unpack_value (&bp, 1);
1649 map->ref_p = bp_unpack_value (&bp, 1);
1650 }
1651 for (e = node->callees; e; e = e->next_callee)
1652 input_edge_opt_summary (e, ib_main);
1653 for (e = node->indirect_calls; e; e = e->next_callee)
1654 input_edge_opt_summary (e, ib_main);
1655 }
1656
1657 /* Read section in file FILE_DATA of length LEN with data DATA. */
1658
1659 static void
1660 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1661 const char *data, size_t len, VEC (cgraph_node_ptr,
1662 heap) * nodes)
1663 {
1664 const struct lto_function_header *header =
1665 (const struct lto_function_header *) data;
1666 const int32_t cfg_offset = sizeof (struct lto_function_header);
1667 const int32_t main_offset = cfg_offset + header->cfg_size;
1668 const int32_t string_offset = main_offset + header->main_size;
1669 struct data_in *data_in;
1670 struct lto_input_block ib_main;
1671 unsigned int i;
1672 unsigned int count;
1673
1674 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1675 header->main_size);
1676
1677 data_in =
1678 lto_data_in_create (file_data, (const char *) data + string_offset,
1679 header->string_size, NULL);
1680 count = streamer_read_uhwi (&ib_main);
1681
1682 for (i = 0; i < count; i++)
1683 {
1684 int ref = streamer_read_uhwi (&ib_main);
1685 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1686 &ib_main, data_in);
1687 }
1688 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1689 len);
1690 lto_data_in_delete (data_in);
1691 }
1692
1693 /* Input optimization summary of cgraph. */
1694
1695 static void
1696 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1697 {
1698 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1699 struct lto_file_decl_data *file_data;
1700 unsigned int j = 0;
1701
1702 while ((file_data = file_data_vec[j++]))
1703 {
1704 size_t len;
1705 const char *data =
1706 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1707 &len);
1708
1709 if (data)
1710 input_cgraph_opt_section (file_data, data, len, nodes);
1711 }
1712 }