output.h (__gcc_host_wide_int__): Move to hwint.h.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "tree-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (cgraph_node_set set);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53 /* Number of LDPR values known to GCC. */
54 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
55
56 /* All node orders are ofsetted by ORDER_BASE. */
57 static int order_base;
58
59 /* Cgraph streaming is organized as set of record whose type
60 is indicated by a tag. */
61 enum LTO_cgraph_tags
62 {
63 /* Must leave 0 for the stopper. */
64
65 /* Cgraph node without body available. */
66 LTO_cgraph_unavail_node = 1,
67 /* Cgraph node with function body. */
68 LTO_cgraph_analyzed_node,
69 /* Cgraph edges. */
70 LTO_cgraph_edge,
71 LTO_cgraph_indirect_edge,
72 LTO_cgraph_last_tag
73 };
74
75 /* Create a new cgraph encoder. */
76
77 lto_cgraph_encoder_t
78 lto_cgraph_encoder_new (void)
79 {
80 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
81 encoder->map = pointer_map_create ();
82 encoder->nodes = NULL;
83 encoder->body = pointer_set_create ();
84 return encoder;
85 }
86
87
88 /* Delete ENCODER and its components. */
89
90 void
91 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
92 {
93 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
94 pointer_map_destroy (encoder->map);
95 pointer_set_destroy (encoder->body);
96 free (encoder);
97 }
98
99
100 /* Return the existing reference number of NODE in the cgraph encoder in
101 output block OB. Assign a new reference if this is the first time
102 NODE is encoded. */
103
104 int
105 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
106 struct cgraph_node *node)
107 {
108 int ref;
109 void **slot;
110
111 slot = pointer_map_contains (encoder->map, node);
112 if (!slot)
113 {
114 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
115 slot = pointer_map_insert (encoder->map, node);
116 *slot = (void *) (intptr_t) ref;
117 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
118 }
119 else
120 ref = (int) (intptr_t) *slot;
121
122 return ref;
123 }
124
125 #define LCC_NOT_FOUND (-1)
126
127 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
128 or LCC_NOT_FOUND if it is not there. */
129
130 int
131 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
132 struct cgraph_node *node)
133 {
134 void **slot = pointer_map_contains (encoder->map, node);
135 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
136 }
137
138
139 /* Return the cgraph node corresponding to REF using ENCODER. */
140
141 struct cgraph_node *
142 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
143 {
144 if (ref == LCC_NOT_FOUND)
145 return NULL;
146
147 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
148 }
149
150
151 /* Return TRUE if we should encode initializer of NODE (if any). */
152
153 bool
154 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
155 struct cgraph_node *node)
156 {
157 return pointer_set_contains (encoder->body, node);
158 }
159
160 /* Return TRUE if we should encode body of NODE (if any). */
161
162 static void
163 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
164 struct cgraph_node *node)
165 {
166 pointer_set_insert (encoder->body, node);
167 }
168
169 /* Create a new varpool encoder. */
170
171 lto_varpool_encoder_t
172 lto_varpool_encoder_new (void)
173 {
174 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
175 encoder->map = pointer_map_create ();
176 encoder->initializer = pointer_set_create ();
177 encoder->nodes = NULL;
178 return encoder;
179 }
180
181
182 /* Delete ENCODER and its components. */
183
184 void
185 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
186 {
187 VEC_free (varpool_node_ptr, heap, encoder->nodes);
188 pointer_map_destroy (encoder->map);
189 pointer_set_destroy (encoder->initializer);
190 free (encoder);
191 }
192
193
194 /* Return the existing reference number of NODE in the varpool encoder in
195 output block OB. Assign a new reference if this is the first time
196 NODE is encoded. */
197
198 int
199 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
200 struct varpool_node *node)
201 {
202 int ref;
203 void **slot;
204
205 slot = pointer_map_contains (encoder->map, node);
206 if (!slot)
207 {
208 ref = VEC_length (varpool_node_ptr, encoder->nodes);
209 slot = pointer_map_insert (encoder->map, node);
210 *slot = (void *) (intptr_t) ref;
211 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
212 }
213 else
214 ref = (int) (intptr_t) *slot;
215
216 return ref;
217 }
218
219 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
220 or LCC_NOT_FOUND if it is not there. */
221
222 int
223 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
224 struct varpool_node *node)
225 {
226 void **slot = pointer_map_contains (encoder->map, node);
227 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
228 }
229
230
231 /* Return the varpool node corresponding to REF using ENCODER. */
232
233 struct varpool_node *
234 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
235 {
236 if (ref == LCC_NOT_FOUND)
237 return NULL;
238
239 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
240 }
241
242
243 /* Return TRUE if we should encode initializer of NODE (if any). */
244
245 bool
246 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
247 struct varpool_node *node)
248 {
249 return pointer_set_contains (encoder->initializer, node);
250 }
251
252 /* Return TRUE if we should encode initializer of NODE (if any). */
253
254 static void
255 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
256 struct varpool_node *node)
257 {
258 pointer_set_insert (encoder->initializer, node);
259 }
260
261 /* Output the cgraph EDGE to OB using ENCODER. */
262
263 static void
264 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
265 lto_cgraph_encoder_t encoder)
266 {
267 unsigned int uid;
268 intptr_t ref;
269 struct bitpack_d bp;
270
271 if (edge->indirect_unknown_callee)
272 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
273 LTO_cgraph_indirect_edge);
274 else
275 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
276 LTO_cgraph_edge);
277
278 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
279 gcc_assert (ref != LCC_NOT_FOUND);
280 streamer_write_hwi_stream (ob->main_stream, ref);
281
282 if (!edge->indirect_unknown_callee)
283 {
284 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
285 gcc_assert (ref != LCC_NOT_FOUND);
286 streamer_write_hwi_stream (ob->main_stream, ref);
287 }
288
289 streamer_write_hwi_stream (ob->main_stream, edge->count);
290
291 bp = bitpack_create (ob->main_stream);
292 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
293 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
294 bp_pack_enum (&bp, cgraph_inline_failed_enum,
295 CIF_N_REASONS, edge->inline_failed);
296 bp_pack_var_len_unsigned (&bp, uid);
297 bp_pack_var_len_unsigned (&bp, edge->frequency);
298 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
299 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
300 bp_pack_value (&bp, edge->can_throw_external, 1);
301 if (edge->indirect_unknown_callee)
302 {
303 int flags = edge->indirect_info->ecf_flags;
304 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
305 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
310 /* Flags that should not appear on indirect calls. */
311 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
312 | ECF_MAY_BE_ALLOCA
313 | ECF_SIBCALL
314 | ECF_LEAF
315 | ECF_NOVOPS)));
316 }
317 streamer_write_bitpack (&bp);
318 }
319
320 /* Return if LIST contain references from other partitions. */
321
322 bool
323 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
324 varpool_node_set vset)
325 {
326 int i;
327 struct ipa_ref *ref;
328 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
329 {
330 if (symtab_function_p (ref->referring))
331 {
332 if (ipa_ref_referring_node (ref)->symbol.in_other_partition
333 || !cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
334 return true;
335 }
336 else
337 {
338 if (ipa_ref_referring_varpool_node (ref)->symbol.in_other_partition
339 || !varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
340 vset))
341 return true;
342 }
343 }
344 return false;
345 }
346
347 /* Return true when node is reachable from other partition. */
348
349 bool
350 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
351 {
352 struct cgraph_edge *e;
353 if (!node->analyzed)
354 return false;
355 if (node->global.inlined_to)
356 return false;
357 for (e = node->callers; e; e = e->next_caller)
358 if (e->caller->symbol.in_other_partition
359 || !cgraph_node_in_set_p (e->caller, set))
360 return true;
361 return false;
362 }
363
364 /* Return if LIST contain references from other partitions. */
365
366 bool
367 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
368 varpool_node_set vset)
369 {
370 int i;
371 struct ipa_ref *ref;
372 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
373 {
374 if (symtab_function_p (ref->referring))
375 {
376 if (cgraph_node_in_set_p (ipa_ref_referring_node (ref), set))
377 return true;
378 }
379 else
380 {
381 if (varpool_node_in_set_p (ipa_ref_referring_varpool_node (ref),
382 vset))
383 return true;
384 }
385 }
386 return false;
387 }
388
389 /* Return true when node is reachable from other partition. */
390
391 bool
392 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
393 {
394 struct cgraph_edge *e;
395 for (e = node->callers; e; e = e->next_caller)
396 if (cgraph_node_in_set_p (e->caller, set))
397 return true;
398 return false;
399 }
400
401 /* Output the cgraph NODE to OB. ENCODER is used to find the
402 reference number of NODE->inlined_to. SET is the set of nodes we
403 are writing to the current file. If NODE is not in SET, then NODE
404 is a boundary of a cgraph_node_set and we pretend NODE just has a
405 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
406 that have had their callgraph node written so far. This is used to
407 determine if NODE is a clone of a previously written node. */
408
409 static void
410 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
411 lto_cgraph_encoder_t encoder, cgraph_node_set set,
412 varpool_node_set vset)
413 {
414 unsigned int tag;
415 struct bitpack_d bp;
416 bool boundary_p;
417 intptr_t ref;
418 bool in_other_partition = false;
419 struct cgraph_node *clone_of;
420
421 boundary_p = !cgraph_node_in_set_p (node, set);
422
423 if (node->analyzed && !boundary_p)
424 tag = LTO_cgraph_analyzed_node;
425 else
426 tag = LTO_cgraph_unavail_node;
427
428 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
429 tag);
430 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
431
432 /* In WPA mode, we only output part of the call-graph. Also, we
433 fake cgraph node attributes. There are two cases that we care.
434
435 Boundary nodes: There are nodes that are not part of SET but are
436 called from within SET. We artificially make them look like
437 externally visible nodes with no function body.
438
439 Cherry-picked nodes: These are nodes we pulled from other
440 translation units into SET during IPA-inlining. We make them as
441 local static nodes to prevent clashes with other local statics. */
442 if (boundary_p && node->analyzed && !DECL_EXTERNAL (node->symbol.decl))
443 {
444 /* Inline clones can not be part of boundary.
445 gcc_assert (!node->global.inlined_to);
446
447 FIXME: At the moment they can be, when partition contains an inline
448 clone that is clone of inline clone from outside partition. We can
449 reshape the clone tree and make other tree to be the root, but it
450 needs a bit extra work and will be promplty done by cgraph_remove_node
451 after reading back. */
452 in_other_partition = 1;
453 }
454
455 clone_of = node->clone_of;
456 while (clone_of
457 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
458 if (clone_of->prev_sibling_clone)
459 clone_of = clone_of->prev_sibling_clone;
460 else
461 clone_of = clone_of->clone_of;
462
463 if (LTO_cgraph_analyzed_node)
464 gcc_assert (clone_of || !node->clone_of);
465 if (!clone_of)
466 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
467 else
468 streamer_write_hwi_stream (ob->main_stream, ref);
469
470
471 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
472 streamer_write_hwi_stream (ob->main_stream, node->count);
473 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
474
475 if (tag == LTO_cgraph_analyzed_node)
476 {
477 if (node->global.inlined_to)
478 {
479 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
480 gcc_assert (ref != LCC_NOT_FOUND);
481 }
482 else
483 ref = LCC_NOT_FOUND;
484
485 streamer_write_hwi_stream (ob->main_stream, ref);
486 }
487
488 if (node->symbol.same_comdat_group && !boundary_p)
489 {
490 ref = lto_cgraph_encoder_lookup (encoder,
491 cgraph (node->symbol.same_comdat_group));
492 gcc_assert (ref != LCC_NOT_FOUND);
493 }
494 else
495 ref = LCC_NOT_FOUND;
496 streamer_write_hwi_stream (ob->main_stream, ref);
497
498 bp = bitpack_create (ob->main_stream);
499 bp_pack_value (&bp, node->local.local, 1);
500 bp_pack_value (&bp, node->symbol.externally_visible, 1);
501 bp_pack_value (&bp, node->local.finalized, 1);
502 bp_pack_value (&bp, node->local.versionable, 1);
503 bp_pack_value (&bp, node->local.can_change_signature, 1);
504 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
505 bp_pack_value (&bp, node->symbol.force_output, 1);
506 bp_pack_value (&bp, node->symbol.address_taken, 1);
507 bp_pack_value (&bp, node->abstract_and_needed, 1);
508 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
509 && !DECL_EXTERNAL (node->symbol.decl)
510 && !DECL_COMDAT (node->symbol.decl)
511 && (reachable_from_other_partition_p (node, set)
512 || referenced_from_other_partition_p (&node->symbol.ref_list,
513 set, vset)), 1);
514 bp_pack_value (&bp, node->lowered, 1);
515 bp_pack_value (&bp, in_other_partition, 1);
516 /* Real aliases in a boundary become non-aliases. However we still stream
517 alias info on weakrefs.
518 TODO: We lose a bit of information here - when we know that variable is
519 defined in other unit, we may use the info on aliases to resolve
520 symbol1 != symbol2 type tests that we can do only for locally defined objects
521 otherwise. */
522 bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl)), 1);
523 bp_pack_value (&bp, node->frequency, 2);
524 bp_pack_value (&bp, node->only_called_at_startup, 1);
525 bp_pack_value (&bp, node->only_called_at_exit, 1);
526 bp_pack_value (&bp, node->tm_clone, 1);
527 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
528 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
529 LDPR_NUM_KNOWN, node->symbol.resolution);
530 streamer_write_bitpack (&bp);
531
532 if (node->thunk.thunk_p && !boundary_p)
533 {
534 streamer_write_uhwi_stream
535 (ob->main_stream,
536 1 + (node->thunk.this_adjusting != 0) * 2
537 + (node->thunk.virtual_offset_p != 0) * 4);
538 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
539 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
540 }
541 if ((node->alias || node->thunk.thunk_p)
542 && (!boundary_p || (node->alias && DECL_EXTERNAL (node->symbol.decl))))
543 {
544 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
545 node->thunk.alias != NULL);
546 if (node->thunk.alias != NULL)
547 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
548 node->thunk.alias);
549 }
550 }
551
552 /* Output the varpool NODE to OB.
553 If NODE is not in SET, then NODE is a boundary. */
554
555 static void
556 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
557 lto_varpool_encoder_t varpool_encoder,
558 cgraph_node_set set, varpool_node_set vset)
559 {
560 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
561 struct bitpack_d bp;
562 int ref;
563
564 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
565 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
566 bp = bitpack_create (ob->main_stream);
567 bp_pack_value (&bp, node->symbol.externally_visible, 1);
568 bp_pack_value (&bp, node->symbol.force_output, 1);
569 bp_pack_value (&bp, node->finalized, 1);
570 bp_pack_value (&bp, node->alias, 1);
571 bp_pack_value (&bp, node->alias_of != NULL, 1);
572 gcc_assert (node->finalized || !node->analyzed);
573 /* Constant pool initializers can be de-unified into individual ltrans units.
574 FIXME: Alternatively at -Os we may want to avoid generating for them the local
575 labels and share them across LTRANS partitions. */
576 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
577 && !DECL_EXTERNAL (node->symbol.decl)
578 && !DECL_COMDAT (node->symbol.decl))
579 {
580 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
581 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
582 }
583 else
584 {
585 bp_pack_value (&bp, node->analyzed
586 && referenced_from_other_partition_p (&node->symbol.ref_list,
587 set, vset), 1);
588 bp_pack_value (&bp, boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
589 /* in_other_partition. */
590 }
591 streamer_write_bitpack (&bp);
592 if (node->alias_of)
593 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
594 if (node->symbol.same_comdat_group && !boundary_p)
595 {
596 ref = lto_varpool_encoder_lookup (varpool_encoder,
597 varpool (node->symbol.same_comdat_group));
598 gcc_assert (ref != LCC_NOT_FOUND);
599 }
600 else
601 ref = LCC_NOT_FOUND;
602 streamer_write_hwi_stream (ob->main_stream, ref);
603 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
604 LDPR_NUM_KNOWN, node->symbol.resolution);
605 }
606
607 /* Output the varpool NODE to OB.
608 If NODE is not in SET, then NODE is a boundary. */
609
610 static void
611 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
612 lto_cgraph_encoder_t encoder,
613 lto_varpool_encoder_t varpool_encoder)
614 {
615 struct bitpack_d bp;
616 bp = bitpack_create (ob->main_stream);
617 bp_pack_value (&bp, symtab_function_p (ref->referred), 1);
618 bp_pack_value (&bp, ref->use, 2);
619 streamer_write_bitpack (&bp);
620 if (symtab_function_p (ref->referred))
621 {
622 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
623 gcc_assert (nref != LCC_NOT_FOUND);
624 streamer_write_hwi_stream (ob->main_stream, nref);
625 }
626 else
627 {
628 int nref = lto_varpool_encoder_lookup (varpool_encoder,
629 ipa_ref_varpool_node (ref));
630 gcc_assert (nref != LCC_NOT_FOUND);
631 streamer_write_hwi_stream (ob->main_stream, nref);
632 }
633 }
634
635 /* Stream out profile_summary to OB. */
636
637 static void
638 output_profile_summary (struct lto_simple_output_block *ob)
639 {
640 if (profile_info)
641 {
642 /* We do not output num, sum_all and run_max, they are not used by
643 GCC profile feedback and they are difficult to merge from multiple
644 units. */
645 gcc_assert (profile_info->runs);
646 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
647 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
648 }
649 else
650 streamer_write_uhwi_stream (ob->main_stream, 0);
651 }
652
653 /* Add NODE into encoder as well as nodes it is cloned from.
654 Do it in a way so clones appear first. */
655
656 static void
657 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
658 bool include_body)
659 {
660 if (node->clone_of)
661 add_node_to (encoder, node->clone_of, include_body);
662 else if (include_body)
663 lto_set_cgraph_encoder_encode_body (encoder, node);
664 lto_cgraph_encoder_encode (encoder, node);
665 }
666
667 /* Add all references in LIST to encoders. */
668
669 static void
670 add_references (lto_cgraph_encoder_t encoder,
671 lto_varpool_encoder_t varpool_encoder,
672 struct ipa_ref_list *list)
673 {
674 int i;
675 struct ipa_ref *ref;
676 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
677 if (symtab_function_p (ref->referred))
678 add_node_to (encoder, ipa_ref_node (ref), false);
679 else
680 {
681 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
682 lto_varpool_encoder_encode (varpool_encoder, vnode);
683 }
684 }
685
686 /* Output all callees or indirect outgoing edges. EDGE must be the first such
687 edge. */
688
689 static void
690 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
691 struct lto_simple_output_block *ob,
692 lto_cgraph_encoder_t encoder)
693 {
694 if (!edge)
695 return;
696
697 /* Output edges in backward direction, so the reconstructed callgraph match
698 and it is easy to associate call sites in the IPA pass summaries. */
699 while (edge->next_callee)
700 edge = edge->next_callee;
701 for (; edge; edge = edge->prev_callee)
702 lto_output_edge (ob, edge, encoder);
703 }
704
705 /* Output the part of the cgraph in SET. */
706
707 static void
708 output_refs (cgraph_node_set set, varpool_node_set vset,
709 lto_cgraph_encoder_t encoder,
710 lto_varpool_encoder_t varpool_encoder)
711 {
712 cgraph_node_set_iterator csi;
713 varpool_node_set_iterator vsi;
714 struct lto_simple_output_block *ob;
715 int count;
716 struct ipa_ref *ref;
717 int i;
718
719 ob = lto_create_simple_output_block (LTO_section_refs);
720
721 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
722 {
723 struct cgraph_node *node = csi_node (csi);
724
725 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
726 if (count)
727 {
728 streamer_write_uhwi_stream (ob->main_stream, count);
729 streamer_write_uhwi_stream (ob->main_stream,
730 lto_cgraph_encoder_lookup (encoder, node));
731 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
732 i, ref); i++)
733 lto_output_ref (ob, ref, encoder, varpool_encoder);
734 }
735 }
736
737 streamer_write_uhwi_stream (ob->main_stream, 0);
738
739 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
740 {
741 struct varpool_node *node = vsi_node (vsi);
742
743 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
744 if (count)
745 {
746 streamer_write_uhwi_stream (ob->main_stream, count);
747 streamer_write_uhwi_stream (ob->main_stream,
748 lto_varpool_encoder_lookup (varpool_encoder,
749 node));
750 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
751 i, ref); i++)
752 lto_output_ref (ob, ref, encoder, varpool_encoder);
753 }
754 }
755
756 streamer_write_uhwi_stream (ob->main_stream, 0);
757
758 lto_destroy_simple_output_block (ob);
759 }
760
761 /* Find out all cgraph and varpool nodes we want to encode in current unit
762 and insert them to encoders. */
763 void
764 compute_ltrans_boundary (struct lto_out_decl_state *state,
765 cgraph_node_set set, varpool_node_set vset)
766 {
767 struct cgraph_node *node;
768 cgraph_node_set_iterator csi;
769 varpool_node_set_iterator vsi;
770 struct cgraph_edge *edge;
771 int i;
772 lto_cgraph_encoder_t encoder;
773 lto_varpool_encoder_t varpool_encoder;
774
775 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
776 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
777
778 /* Go over all the nodes in SET and assign references. */
779 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
780 {
781 node = csi_node (csi);
782 add_node_to (encoder, node, true);
783 add_references (encoder, varpool_encoder, &node->symbol.ref_list);
784 }
785 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
786 {
787 struct varpool_node *vnode = vsi_node (vsi);
788 gcc_assert (!vnode->alias || vnode->alias_of);
789 lto_varpool_encoder_encode (varpool_encoder, vnode);
790 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
791 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
792 }
793 /* Pickle in also the initializer of all referenced readonly variables
794 to help folding. Constant pool variables are not shared, so we must
795 pickle those too. */
796 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
797 {
798 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
799 if (DECL_INITIAL (vnode->symbol.decl)
800 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
801 vnode)
802 && const_value_known_p (vnode->symbol.decl))
803 {
804 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
805 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
806 }
807 else if (vnode->alias || vnode->alias_of)
808 add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
809 }
810
811 /* Go over all the nodes again to include callees that are not in
812 SET. */
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
814 {
815 node = csi_node (csi);
816 for (edge = node->callees; edge; edge = edge->next_callee)
817 {
818 struct cgraph_node *callee = edge->callee;
819 if (!cgraph_node_in_set_p (callee, set))
820 {
821 /* We should have moved all the inlines. */
822 gcc_assert (!callee->global.inlined_to);
823 add_node_to (encoder, callee, false);
824 }
825 }
826 }
827 }
828
829 /* Output the part of the cgraph in SET. */
830
831 void
832 output_cgraph (cgraph_node_set set, varpool_node_set vset)
833 {
834 struct cgraph_node *node;
835 struct lto_simple_output_block *ob;
836 cgraph_node_set_iterator csi;
837 int i, n_nodes;
838 lto_cgraph_encoder_t encoder;
839 lto_varpool_encoder_t varpool_encoder;
840 static bool asm_nodes_output = false;
841
842 if (flag_wpa)
843 output_cgraph_opt_summary (set);
844
845 ob = lto_create_simple_output_block (LTO_section_cgraph);
846
847 output_profile_summary (ob);
848
849 /* An encoder for cgraph nodes should have been created by
850 ipa_write_summaries_1. */
851 gcc_assert (ob->decl_state->cgraph_node_encoder);
852 gcc_assert (ob->decl_state->varpool_node_encoder);
853 encoder = ob->decl_state->cgraph_node_encoder;
854 varpool_encoder = ob->decl_state->varpool_node_encoder;
855
856 /* Write out the nodes. We must first output a node and then its clones,
857 otherwise at a time reading back the node there would be nothing to clone
858 from. */
859 n_nodes = lto_cgraph_encoder_size (encoder);
860 for (i = 0; i < n_nodes; i++)
861 {
862 node = lto_cgraph_encoder_deref (encoder, i);
863 lto_output_node (ob, node, encoder, set, vset);
864 }
865
866 /* Go over the nodes in SET again to write edges. */
867 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
868 {
869 node = csi_node (csi);
870 output_outgoing_cgraph_edges (node->callees, ob, encoder);
871 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
872 }
873
874 streamer_write_uhwi_stream (ob->main_stream, 0);
875
876 lto_destroy_simple_output_block (ob);
877
878 /* Emit toplevel asms.
879 When doing WPA we must output every asm just once. Since we do not partition asm
880 nodes at all, output them to first output. This is kind of hack, but should work
881 well. */
882 if (!asm_nodes_output)
883 {
884 asm_nodes_output = true;
885 lto_output_toplevel_asms ();
886 }
887
888 output_varpool (set, vset);
889 output_refs (set, vset, encoder, varpool_encoder);
890 }
891
892 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
893 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
894 NODE or to replace the values in it, for instance because the first
895 time we saw it, the function body was not available but now it
896 is. BP is a bitpack with all the bitflags for NODE read from the
897 stream. */
898
899 static void
900 input_overwrite_node (struct lto_file_decl_data *file_data,
901 struct cgraph_node *node,
902 enum LTO_cgraph_tags tag,
903 struct bitpack_d *bp)
904 {
905 node->symbol.aux = (void *) tag;
906 node->symbol.lto_file_data = file_data;
907
908 node->local.local = bp_unpack_value (bp, 1);
909 node->symbol.externally_visible = bp_unpack_value (bp, 1);
910 node->local.finalized = bp_unpack_value (bp, 1);
911 node->local.versionable = bp_unpack_value (bp, 1);
912 node->local.can_change_signature = bp_unpack_value (bp, 1);
913 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
914 node->symbol.force_output = bp_unpack_value (bp, 1);
915 node->symbol.address_taken = bp_unpack_value (bp, 1);
916 node->abstract_and_needed = bp_unpack_value (bp, 1);
917 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
918 node->lowered = bp_unpack_value (bp, 1);
919 node->analyzed = tag == LTO_cgraph_analyzed_node;
920 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
921 if (node->symbol.in_other_partition
922 /* Avoid updating decl when we are seeing just inline clone.
923 When inlining function that has functions already inlined into it,
924 we produce clones of inline clones.
925
926 WPA partitioning might put each clone into different unit and
927 we might end up streaming inline clone from other partition
928 to support clone we are interested in. */
929 && (!node->clone_of
930 || node->clone_of->symbol.decl != node->symbol.decl))
931 {
932 DECL_EXTERNAL (node->symbol.decl) = 1;
933 TREE_STATIC (node->symbol.decl) = 0;
934 }
935 node->alias = bp_unpack_value (bp, 1);
936 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
937 node->only_called_at_startup = bp_unpack_value (bp, 1);
938 node->only_called_at_exit = bp_unpack_value (bp, 1);
939 node->tm_clone = bp_unpack_value (bp, 1);
940 node->thunk.thunk_p = bp_unpack_value (bp, 1);
941 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
942 LDPR_NUM_KNOWN);
943 }
944
945 /* Output the part of the cgraph in SET. */
946
947 static void
948 output_varpool (cgraph_node_set set, varpool_node_set vset)
949 {
950 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
951 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
952 int len = lto_varpool_encoder_size (varpool_encoder), i;
953
954 streamer_write_uhwi_stream (ob->main_stream, len);
955
956 /* Write out the nodes. We must first output a node and then its clones,
957 otherwise at a time reading back the node there would be nothing to clone
958 from. */
959 for (i = 0; i < len; i++)
960 {
961 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
962 varpool_encoder,
963 set, vset);
964 }
965
966 lto_destroy_simple_output_block (ob);
967 }
968
969 /* Read a node from input_block IB. TAG is the node's tag just read.
970 Return the node read or overwriten. */
971
972 static struct cgraph_node *
973 input_node (struct lto_file_decl_data *file_data,
974 struct lto_input_block *ib,
975 enum LTO_cgraph_tags tag,
976 VEC(cgraph_node_ptr, heap) *nodes)
977 {
978 tree fn_decl;
979 struct cgraph_node *node;
980 struct bitpack_d bp;
981 unsigned decl_index;
982 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
983 int clone_ref;
984 int order;
985
986 order = streamer_read_hwi (ib) + order_base;
987 clone_ref = streamer_read_hwi (ib);
988
989 decl_index = streamer_read_uhwi (ib);
990 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
991
992 if (clone_ref != LCC_NOT_FOUND)
993 {
994 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
995 0, CGRAPH_FREQ_BASE, false, NULL, false);
996 }
997 else
998 node = cgraph_get_create_node (fn_decl);
999
1000 node->symbol.order = order;
1001 if (order >= symtab_order)
1002 symtab_order = order + 1;
1003
1004 node->count = streamer_read_hwi (ib);
1005 node->count_materialization_scale = streamer_read_hwi (ib);
1006
1007 if (tag == LTO_cgraph_analyzed_node)
1008 ref = streamer_read_hwi (ib);
1009
1010 ref2 = streamer_read_hwi (ib);
1011
1012 /* Make sure that we have not read this node before. Nodes that
1013 have already been read will have their tag stored in the 'aux'
1014 field. Since built-in functions can be referenced in multiple
1015 functions, they are expected to be read more than once. */
1016 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1017 internal_error ("bytecode stream: found multiple instances of cgraph "
1018 "node %d", node->uid);
1019
1020 bp = streamer_read_bitpack (ib);
1021 input_overwrite_node (file_data, node, tag, &bp);
1022
1023 /* Store a reference for now, and fix up later to be a pointer. */
1024 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1025
1026 /* Store a reference for now, and fix up later to be a pointer. */
1027 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1028
1029 if (node->thunk.thunk_p)
1030 {
1031 int type = streamer_read_uhwi (ib);
1032 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1033 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1034
1035 node->thunk.fixed_offset = fixed_offset;
1036 node->thunk.this_adjusting = (type & 2);
1037 node->thunk.virtual_value = virtual_value;
1038 node->thunk.virtual_offset_p = (type & 4);
1039 }
1040 if (node->thunk.thunk_p || node->alias)
1041 {
1042 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1043 {
1044 decl_index = streamer_read_uhwi (ib);
1045 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1046 decl_index);
1047 }
1048 }
1049 return node;
1050 }
1051
1052 /* Read a node from input_block IB. TAG is the node's tag just read.
1053 Return the node read or overwriten. */
1054
1055 static struct varpool_node *
1056 input_varpool_node (struct lto_file_decl_data *file_data,
1057 struct lto_input_block *ib)
1058 {
1059 int decl_index;
1060 tree var_decl;
1061 struct varpool_node *node;
1062 struct bitpack_d bp;
1063 int ref = LCC_NOT_FOUND;
1064 bool non_null_aliasof;
1065 int order;
1066
1067 order = streamer_read_hwi (ib) + order_base;
1068 decl_index = streamer_read_uhwi (ib);
1069 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1070 node = varpool_node (var_decl);
1071 node->symbol.order = order;
1072 if (order >= symtab_order)
1073 symtab_order = order + 1;
1074 node->symbol.lto_file_data = file_data;
1075
1076 bp = streamer_read_bitpack (ib);
1077 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1078 node->symbol.force_output = bp_unpack_value (&bp, 1);
1079 node->finalized = bp_unpack_value (&bp, 1);
1080 node->alias = bp_unpack_value (&bp, 1);
1081 non_null_aliasof = bp_unpack_value (&bp, 1);
1082 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1083 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1084 node->analyzed = (node->finalized && (!node->alias || !node->symbol.in_other_partition));
1085 if (node->symbol.in_other_partition)
1086 {
1087 DECL_EXTERNAL (node->symbol.decl) = 1;
1088 TREE_STATIC (node->symbol.decl) = 0;
1089 }
1090 if (non_null_aliasof)
1091 {
1092 decl_index = streamer_read_uhwi (ib);
1093 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1094 }
1095 ref = streamer_read_hwi (ib);
1096 /* Store a reference for now, and fix up later to be a pointer. */
1097 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1098 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1099 LDPR_NUM_KNOWN);
1100
1101 return node;
1102 }
1103
1104 /* Read a node from input_block IB. TAG is the node's tag just read.
1105 Return the node read or overwriten. */
1106
1107 static void
1108 input_ref (struct lto_input_block *ib,
1109 symtab_node referring_node,
1110 VEC(cgraph_node_ptr, heap) *nodes,
1111 VEC(varpool_node_ptr, heap) *varpool_nodes_vec)
1112 {
1113 struct cgraph_node *node = NULL;
1114 struct varpool_node *varpool_node = NULL;
1115 struct bitpack_d bp;
1116 int type;
1117 enum ipa_ref_use use;
1118
1119 bp = streamer_read_bitpack (ib);
1120 type = bp_unpack_value (&bp, 1);
1121 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1122 if (type)
1123 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1124 else
1125 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes_vec,
1126 streamer_read_hwi (ib));
1127 ipa_record_reference (referring_node,
1128 node ? (symtab_node) node : (symtab_node) varpool_node, use, NULL);
1129 }
1130
1131 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1132 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1133 edge being read is indirect (in the sense that it has
1134 indirect_unknown_callee set). */
1135
1136 static void
1137 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1138 bool indirect)
1139 {
1140 struct cgraph_node *caller, *callee;
1141 struct cgraph_edge *edge;
1142 unsigned int stmt_id;
1143 gcov_type count;
1144 int freq;
1145 cgraph_inline_failed_t inline_failed;
1146 struct bitpack_d bp;
1147 int ecf_flags = 0;
1148
1149 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1150 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1151 internal_error ("bytecode stream: no caller found while reading edge");
1152
1153 if (!indirect)
1154 {
1155 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1156 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1157 internal_error ("bytecode stream: no callee found while reading edge");
1158 }
1159 else
1160 callee = NULL;
1161
1162 count = (gcov_type) streamer_read_hwi (ib);
1163
1164 bp = streamer_read_bitpack (ib);
1165 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1166 stmt_id = bp_unpack_var_len_unsigned (&bp);
1167 freq = (int) bp_unpack_var_len_unsigned (&bp);
1168
1169 if (indirect)
1170 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1171 else
1172 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1173
1174 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1175 edge->lto_stmt_uid = stmt_id;
1176 edge->inline_failed = inline_failed;
1177 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1178 edge->can_throw_external = bp_unpack_value (&bp, 1);
1179 if (indirect)
1180 {
1181 if (bp_unpack_value (&bp, 1))
1182 ecf_flags |= ECF_CONST;
1183 if (bp_unpack_value (&bp, 1))
1184 ecf_flags |= ECF_PURE;
1185 if (bp_unpack_value (&bp, 1))
1186 ecf_flags |= ECF_NORETURN;
1187 if (bp_unpack_value (&bp, 1))
1188 ecf_flags |= ECF_MALLOC;
1189 if (bp_unpack_value (&bp, 1))
1190 ecf_flags |= ECF_NOTHROW;
1191 if (bp_unpack_value (&bp, 1))
1192 ecf_flags |= ECF_RETURNS_TWICE;
1193 edge->indirect_info->ecf_flags = ecf_flags;
1194 }
1195 }
1196
1197
1198 /* Read a cgraph from IB using the info in FILE_DATA. */
1199
1200 static VEC(cgraph_node_ptr, heap) *
1201 input_cgraph_1 (struct lto_file_decl_data *file_data,
1202 struct lto_input_block *ib)
1203 {
1204 enum LTO_cgraph_tags tag;
1205 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1206 struct cgraph_node *node;
1207 unsigned i;
1208
1209 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1210 order_base = symtab_order;
1211 while (tag)
1212 {
1213 if (tag == LTO_cgraph_edge)
1214 input_edge (ib, nodes, false);
1215 else if (tag == LTO_cgraph_indirect_edge)
1216 input_edge (ib, nodes, true);
1217 else
1218 {
1219 node = input_node (file_data, ib, tag,nodes);
1220 if (node == NULL || node->symbol.decl == NULL_TREE)
1221 internal_error ("bytecode stream: found empty cgraph node");
1222 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1223 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1224 }
1225
1226 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1227 }
1228
1229 lto_input_toplevel_asms (file_data, order_base);
1230
1231 /* AUX pointers should be all non-zero for nodes read from the stream. */
1232 #ifdef ENABLE_CHECKING
1233 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1234 gcc_assert (node->symbol.aux);
1235 #endif
1236 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1237 {
1238 int ref = (int) (intptr_t) node->global.inlined_to;
1239
1240 /* We share declaration of builtins, so we may read same node twice. */
1241 if (!node->symbol.aux)
1242 continue;
1243 node->symbol.aux = NULL;
1244
1245 /* Fixup inlined_to from reference to pointer. */
1246 if (ref != LCC_NOT_FOUND)
1247 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1248 else
1249 node->global.inlined_to = NULL;
1250
1251 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1252
1253 /* Fixup same_comdat_group from reference to pointer. */
1254 if (ref != LCC_NOT_FOUND)
1255 node->symbol.same_comdat_group = (symtab_node)VEC_index (cgraph_node_ptr, nodes, ref);
1256 else
1257 node->symbol.same_comdat_group = NULL;
1258 }
1259 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1260 node->symbol.aux = (void *)1;
1261 return nodes;
1262 }
1263
1264 /* Read a varpool from IB using the info in FILE_DATA. */
1265
1266 static VEC(varpool_node_ptr, heap) *
1267 input_varpool_1 (struct lto_file_decl_data *file_data,
1268 struct lto_input_block *ib)
1269 {
1270 unsigned HOST_WIDE_INT len;
1271 VEC(varpool_node_ptr, heap) *varpool = NULL;
1272 int i;
1273 struct varpool_node *node;
1274
1275 len = streamer_read_uhwi (ib);
1276 while (len)
1277 {
1278 VEC_safe_push (varpool_node_ptr, heap, varpool,
1279 input_varpool_node (file_data, ib));
1280 len--;
1281 }
1282 #ifdef ENABLE_CHECKING
1283 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1284 gcc_assert (!node->symbol.aux);
1285 #endif
1286 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1287 {
1288 int ref = (int) (intptr_t) node->symbol.same_comdat_group;
1289 /* We share declaration of builtins, so we may read same node twice. */
1290 if (node->symbol.aux)
1291 continue;
1292 node->symbol.aux = (void *)1;
1293
1294 /* Fixup same_comdat_group from reference to pointer. */
1295 if (ref != LCC_NOT_FOUND)
1296 node->symbol.same_comdat_group = (symtab_node)VEC_index (varpool_node_ptr, varpool, ref);
1297 else
1298 node->symbol.same_comdat_group = NULL;
1299 }
1300 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1301 node->symbol.aux = NULL;
1302 return varpool;
1303 }
1304
1305 /* Input ipa_refs. */
1306
1307 static void
1308 input_refs (struct lto_input_block *ib,
1309 VEC(cgraph_node_ptr, heap) *nodes,
1310 VEC(varpool_node_ptr, heap) *varpool)
1311 {
1312 int count;
1313 int idx;
1314 while (true)
1315 {
1316 struct cgraph_node *node;
1317 count = streamer_read_uhwi (ib);
1318 if (!count)
1319 break;
1320 idx = streamer_read_uhwi (ib);
1321 node = VEC_index (cgraph_node_ptr, nodes, idx);
1322 while (count)
1323 {
1324 input_ref (ib, (symtab_node) node, nodes, varpool);
1325 count--;
1326 }
1327 }
1328 while (true)
1329 {
1330 struct varpool_node *node;
1331 count = streamer_read_uhwi (ib);
1332 if (!count)
1333 break;
1334 node = VEC_index (varpool_node_ptr, varpool,
1335 streamer_read_uhwi (ib));
1336 while (count)
1337 {
1338 input_ref (ib, (symtab_node) node, nodes, varpool);
1339 count--;
1340 }
1341 }
1342 }
1343
1344
1345 static struct gcov_ctr_summary lto_gcov_summary;
1346
1347 /* Input profile_info from IB. */
1348 static void
1349 input_profile_summary (struct lto_input_block *ib,
1350 struct lto_file_decl_data *file_data)
1351 {
1352 unsigned int runs = streamer_read_uhwi (ib);
1353 if (runs)
1354 {
1355 file_data->profile_info.runs = runs;
1356 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1357 }
1358
1359 }
1360
1361 /* Rescale profile summaries to the same number of runs in the whole unit. */
1362
1363 static void
1364 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1365 {
1366 struct lto_file_decl_data *file_data;
1367 unsigned int j;
1368 gcov_unsigned_t max_runs = 0;
1369 struct cgraph_node *node;
1370 struct cgraph_edge *edge;
1371
1372 /* Find unit with maximal number of runs. If we ever get serious about
1373 roundoff errors, we might also consider computing smallest common
1374 multiply. */
1375 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1376 if (max_runs < file_data->profile_info.runs)
1377 max_runs = file_data->profile_info.runs;
1378
1379 if (!max_runs)
1380 return;
1381
1382 /* Simple overflow check. We probably don't need to support that many train
1383 runs. Such a large value probably imply data corruption anyway. */
1384 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1385 {
1386 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1387 INT_MAX / REG_BR_PROB_BASE);
1388 return;
1389 }
1390
1391 profile_info = &lto_gcov_summary;
1392 lto_gcov_summary.runs = max_runs;
1393 lto_gcov_summary.sum_max = 0;
1394
1395 /* Rescale all units to the maximal number of runs.
1396 sum_max can not be easily merged, as we have no idea what files come from
1397 the same run. We do not use the info anyway, so leave it 0. */
1398 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1399 if (file_data->profile_info.runs)
1400 {
1401 int scale = ((REG_BR_PROB_BASE * max_runs
1402 + file_data->profile_info.runs / 2)
1403 / file_data->profile_info.runs);
1404 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1405 (file_data->profile_info.sum_max
1406 * scale
1407 + REG_BR_PROB_BASE / 2)
1408 / REG_BR_PROB_BASE);
1409 }
1410
1411 /* Watch roundoff errors. */
1412 if (lto_gcov_summary.sum_max < max_runs)
1413 lto_gcov_summary.sum_max = max_runs;
1414
1415 /* If merging already happent at WPA time, we are done. */
1416 if (flag_ltrans)
1417 return;
1418
1419 /* Now compute count_materialization_scale of each node.
1420 During LTRANS we already have values of count_materialization_scale
1421 computed, so just update them. */
1422 FOR_EACH_FUNCTION (node)
1423 if (node->symbol.lto_file_data
1424 && node->symbol.lto_file_data->profile_info.runs)
1425 {
1426 int scale;
1427
1428 scale =
1429 ((node->count_materialization_scale * max_runs
1430 + node->symbol.lto_file_data->profile_info.runs / 2)
1431 / node->symbol.lto_file_data->profile_info.runs);
1432 node->count_materialization_scale = scale;
1433 if (scale < 0)
1434 fatal_error ("Profile information in %s corrupted",
1435 file_data->file_name);
1436
1437 if (scale == REG_BR_PROB_BASE)
1438 continue;
1439 for (edge = node->callees; edge; edge = edge->next_callee)
1440 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1441 / REG_BR_PROB_BASE);
1442 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1443 / REG_BR_PROB_BASE);
1444 }
1445 }
1446
1447 /* Input and merge the cgraph from each of the .o files passed to
1448 lto1. */
1449
1450 void
1451 input_cgraph (void)
1452 {
1453 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1454 struct lto_file_decl_data *file_data;
1455 unsigned int j = 0;
1456 struct cgraph_node *node;
1457
1458 cgraph_state = CGRAPH_STATE_IPA_SSA;
1459
1460 while ((file_data = file_data_vec[j++]))
1461 {
1462 const char *data;
1463 size_t len;
1464 struct lto_input_block *ib;
1465 VEC(cgraph_node_ptr, heap) *nodes;
1466 VEC(varpool_node_ptr, heap) *varpool;
1467
1468 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1469 &data, &len);
1470 if (!ib)
1471 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1472 input_profile_summary (ib, file_data);
1473 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1474 nodes = input_cgraph_1 (file_data, ib);
1475 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1476 ib, data, len);
1477
1478 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1479 &data, &len);
1480 if (!ib)
1481 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1482 varpool = input_varpool_1 (file_data, ib);
1483 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1484 ib, data, len);
1485
1486 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1487 &data, &len);
1488 if (!ib)
1489 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1490 input_refs (ib, nodes, varpool);
1491 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1492 ib, data, len);
1493 if (flag_ltrans)
1494 input_cgraph_opt_summary (nodes);
1495 VEC_free (cgraph_node_ptr, heap, nodes);
1496 VEC_free (varpool_node_ptr, heap, varpool);
1497 }
1498
1499 merge_profile_summaries (file_data_vec);
1500
1501 /* Clear out the aux field that was used to store enough state to
1502 tell which nodes should be overwritten. */
1503 FOR_EACH_FUNCTION (node)
1504 {
1505 /* Some nodes may have been created by cgraph_node. This
1506 happens when the callgraph contains nested functions. If the
1507 node for the parent function was never emitted to the gimple
1508 file, cgraph_node will create a node for it when setting the
1509 context of the nested function. */
1510 if (node->symbol.lto_file_data)
1511 node->symbol.aux = NULL;
1512 }
1513 }
1514
1515 /* True when we need optimization summary for NODE. */
1516
1517 static int
1518 output_cgraph_opt_summary_p (struct cgraph_node *node,
1519 cgraph_node_set set ATTRIBUTE_UNUSED)
1520 {
1521 return (node->clone_of
1522 && (node->clone.tree_map
1523 || node->clone.args_to_skip
1524 || node->clone.combined_args_to_skip));
1525 }
1526
1527 /* Output optimization summary for EDGE to OB. */
1528 static void
1529 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1530 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1531 {
1532 }
1533
1534 /* Output optimization summary for NODE to OB. */
1535
1536 static void
1537 output_node_opt_summary (struct output_block *ob,
1538 struct cgraph_node *node,
1539 cgraph_node_set set)
1540 {
1541 unsigned int index;
1542 bitmap_iterator bi;
1543 struct ipa_replace_map *map;
1544 struct bitpack_d bp;
1545 int i;
1546 struct cgraph_edge *e;
1547
1548 if (node->clone.args_to_skip)
1549 {
1550 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1551 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1552 streamer_write_uhwi (ob, index);
1553 }
1554 else
1555 streamer_write_uhwi (ob, 0);
1556 if (node->clone.combined_args_to_skip)
1557 {
1558 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1559 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1560 streamer_write_uhwi (ob, index);
1561 }
1562 else
1563 streamer_write_uhwi (ob, 0);
1564 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1565 node->clone.tree_map));
1566 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1567 {
1568 int parm_num;
1569 tree parm;
1570
1571 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1572 parm = DECL_CHAIN (parm), parm_num++)
1573 if (map->old_tree == parm)
1574 break;
1575 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1576 mechanism to store function local declarations into summaries. */
1577 gcc_assert (parm);
1578 streamer_write_uhwi (ob, parm_num);
1579 stream_write_tree (ob, map->new_tree, true);
1580 bp = bitpack_create (ob->main_stream);
1581 bp_pack_value (&bp, map->replace_p, 1);
1582 bp_pack_value (&bp, map->ref_p, 1);
1583 streamer_write_bitpack (&bp);
1584 }
1585
1586 if (cgraph_node_in_set_p (node, set))
1587 {
1588 for (e = node->callees; e; e = e->next_callee)
1589 output_edge_opt_summary (ob, e);
1590 for (e = node->indirect_calls; e; e = e->next_callee)
1591 output_edge_opt_summary (ob, e);
1592 }
1593 }
1594
1595 /* Output optimization summaries stored in callgraph.
1596 At the moment it is the clone info structure. */
1597
1598 static void
1599 output_cgraph_opt_summary (cgraph_node_set set)
1600 {
1601 struct cgraph_node *node;
1602 int i, n_nodes;
1603 lto_cgraph_encoder_t encoder;
1604 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1605 unsigned count = 0;
1606
1607 ob->cgraph_node = NULL;
1608 encoder = ob->decl_state->cgraph_node_encoder;
1609 n_nodes = lto_cgraph_encoder_size (encoder);
1610 for (i = 0; i < n_nodes; i++)
1611 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1612 set))
1613 count++;
1614 streamer_write_uhwi (ob, count);
1615 for (i = 0; i < n_nodes; i++)
1616 {
1617 node = lto_cgraph_encoder_deref (encoder, i);
1618 if (output_cgraph_opt_summary_p (node, set))
1619 {
1620 streamer_write_uhwi (ob, i);
1621 output_node_opt_summary (ob, node, set);
1622 }
1623 }
1624 produce_asm (ob, NULL);
1625 destroy_output_block (ob);
1626 }
1627
1628 /* Input optimisation summary of EDGE. */
1629
1630 static void
1631 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1632 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1633 {
1634 }
1635
1636 /* Input optimisation summary of NODE. */
1637
1638 static void
1639 input_node_opt_summary (struct cgraph_node *node,
1640 struct lto_input_block *ib_main,
1641 struct data_in *data_in)
1642 {
1643 int i;
1644 int count;
1645 int bit;
1646 struct bitpack_d bp;
1647 struct cgraph_edge *e;
1648
1649 count = streamer_read_uhwi (ib_main);
1650 if (count)
1651 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1652 for (i = 0; i < count; i++)
1653 {
1654 bit = streamer_read_uhwi (ib_main);
1655 bitmap_set_bit (node->clone.args_to_skip, bit);
1656 }
1657 count = streamer_read_uhwi (ib_main);
1658 if (count)
1659 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1660 for (i = 0; i < count; i++)
1661 {
1662 bit = streamer_read_uhwi (ib_main);
1663 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1664 }
1665 count = streamer_read_uhwi (ib_main);
1666 for (i = 0; i < count; i++)
1667 {
1668 int parm_num;
1669 tree parm;
1670 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1671
1672 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1673 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm_num;
1674 parm = DECL_CHAIN (parm))
1675 parm_num --;
1676 map->parm_num = streamer_read_uhwi (ib_main);
1677 map->old_tree = NULL;
1678 map->new_tree = stream_read_tree (ib_main, data_in);
1679 bp = streamer_read_bitpack (ib_main);
1680 map->replace_p = bp_unpack_value (&bp, 1);
1681 map->ref_p = bp_unpack_value (&bp, 1);
1682 }
1683 for (e = node->callees; e; e = e->next_callee)
1684 input_edge_opt_summary (e, ib_main);
1685 for (e = node->indirect_calls; e; e = e->next_callee)
1686 input_edge_opt_summary (e, ib_main);
1687 }
1688
1689 /* Read section in file FILE_DATA of length LEN with data DATA. */
1690
1691 static void
1692 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1693 const char *data, size_t len, VEC (cgraph_node_ptr,
1694 heap) * nodes)
1695 {
1696 const struct lto_function_header *header =
1697 (const struct lto_function_header *) data;
1698 const int cfg_offset = sizeof (struct lto_function_header);
1699 const int main_offset = cfg_offset + header->cfg_size;
1700 const int string_offset = main_offset + header->main_size;
1701 struct data_in *data_in;
1702 struct lto_input_block ib_main;
1703 unsigned int i;
1704 unsigned int count;
1705
1706 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1707 header->main_size);
1708
1709 data_in =
1710 lto_data_in_create (file_data, (const char *) data + string_offset,
1711 header->string_size, NULL);
1712 count = streamer_read_uhwi (&ib_main);
1713
1714 for (i = 0; i < count; i++)
1715 {
1716 int ref = streamer_read_uhwi (&ib_main);
1717 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1718 &ib_main, data_in);
1719 }
1720 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1721 len);
1722 lto_data_in_delete (data_in);
1723 }
1724
1725 /* Input optimization summary of cgraph. */
1726
1727 static void
1728 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1729 {
1730 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1731 struct lto_file_decl_data *file_data;
1732 unsigned int j = 0;
1733
1734 while ((file_data = file_data_vec[j++]))
1735 {
1736 size_t len;
1737 const char *data =
1738 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1739 &len);
1740
1741 if (data)
1742 input_cgraph_opt_section (file_data, data, len, nodes);
1743 }
1744 }