re PR libfortran/47894 (Documentation text for VERIFY intrinsic function is wrong.)
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "gcov-io.h"
47
48 static void output_varpool (cgraph_node_set, varpool_node_set);
49 static void output_cgraph_opt_summary (cgraph_node_set set);
50 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
51
52
53 /* Cgraph streaming is organized as set of record whose type
54 is indicated by a tag. */
55 enum LTO_cgraph_tags
56 {
57 /* Must leave 0 for the stopper. */
58
59 /* Cgraph node without body available. */
60 LTO_cgraph_unavail_node = 1,
61 /* Cgraph node with function body. */
62 LTO_cgraph_analyzed_node,
63 /* Cgraph edges. */
64 LTO_cgraph_edge,
65 LTO_cgraph_indirect_edge
66 };
67
68 /* Create a new cgraph encoder. */
69
70 lto_cgraph_encoder_t
71 lto_cgraph_encoder_new (void)
72 {
73 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
74 encoder->map = pointer_map_create ();
75 encoder->nodes = NULL;
76 encoder->body = pointer_set_create ();
77 return encoder;
78 }
79
80
81 /* Delete ENCODER and its components. */
82
83 void
84 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
85 {
86 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
87 pointer_map_destroy (encoder->map);
88 pointer_set_destroy (encoder->body);
89 free (encoder);
90 }
91
92
93 /* Return the existing reference number of NODE in the cgraph encoder in
94 output block OB. Assign a new reference if this is the first time
95 NODE is encoded. */
96
97 int
98 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
99 struct cgraph_node *node)
100 {
101 int ref;
102 void **slot;
103
104 slot = pointer_map_contains (encoder->map, node);
105 if (!slot)
106 {
107 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
108 slot = pointer_map_insert (encoder->map, node);
109 *slot = (void *) (intptr_t) ref;
110 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 }
112 else
113 ref = (int) (intptr_t) *slot;
114
115 return ref;
116 }
117
118 #define LCC_NOT_FOUND (-1)
119
120 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
121 or LCC_NOT_FOUND if it is not there. */
122
123 int
124 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
125 struct cgraph_node *node)
126 {
127 void **slot = pointer_map_contains (encoder->map, node);
128 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
129 }
130
131
132 /* Return the cgraph node corresponding to REF using ENCODER. */
133
134 struct cgraph_node *
135 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
136 {
137 if (ref == LCC_NOT_FOUND)
138 return NULL;
139
140 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
141 }
142
143
144 /* Return TRUE if we should encode initializer of NODE (if any). */
145
146 bool
147 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
148 struct cgraph_node *node)
149 {
150 return pointer_set_contains (encoder->body, node);
151 }
152
153 /* Return TRUE if we should encode body of NODE (if any). */
154
155 static void
156 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
157 struct cgraph_node *node)
158 {
159 pointer_set_insert (encoder->body, node);
160 }
161
162 /* Create a new varpool encoder. */
163
164 lto_varpool_encoder_t
165 lto_varpool_encoder_new (void)
166 {
167 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
168 encoder->map = pointer_map_create ();
169 encoder->initializer = pointer_set_create ();
170 encoder->nodes = NULL;
171 return encoder;
172 }
173
174
175 /* Delete ENCODER and its components. */
176
177 void
178 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
179 {
180 VEC_free (varpool_node_ptr, heap, encoder->nodes);
181 pointer_map_destroy (encoder->map);
182 pointer_set_destroy (encoder->initializer);
183 free (encoder);
184 }
185
186
187 /* Return the existing reference number of NODE in the varpool encoder in
188 output block OB. Assign a new reference if this is the first time
189 NODE is encoded. */
190
191 int
192 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
193 struct varpool_node *node)
194 {
195 int ref;
196 void **slot;
197
198 slot = pointer_map_contains (encoder->map, node);
199 if (!slot)
200 {
201 ref = VEC_length (varpool_node_ptr, encoder->nodes);
202 slot = pointer_map_insert (encoder->map, node);
203 *slot = (void *) (intptr_t) ref;
204 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 }
206 else
207 ref = (int) (intptr_t) *slot;
208
209 return ref;
210 }
211
212 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
213 or LCC_NOT_FOUND if it is not there. */
214
215 int
216 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
217 struct varpool_node *node)
218 {
219 void **slot = pointer_map_contains (encoder->map, node);
220 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
221 }
222
223
224 /* Return the varpool node corresponding to REF using ENCODER. */
225
226 struct varpool_node *
227 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
228 {
229 if (ref == LCC_NOT_FOUND)
230 return NULL;
231
232 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
233 }
234
235
236 /* Return TRUE if we should encode initializer of NODE (if any). */
237
238 bool
239 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
240 struct varpool_node *node)
241 {
242 return pointer_set_contains (encoder->initializer, node);
243 }
244
245 /* Return TRUE if we should encode initializer of NODE (if any). */
246
247 static void
248 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
249 struct varpool_node *node)
250 {
251 pointer_set_insert (encoder->initializer, node);
252 }
253
254 /* Output the cgraph EDGE to OB using ENCODER. */
255
256 static void
257 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
258 lto_cgraph_encoder_t encoder)
259 {
260 unsigned int uid;
261 intptr_t ref;
262 struct bitpack_d bp;
263
264 if (edge->indirect_unknown_callee)
265 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
266 else
267 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
268
269 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
272
273 if (!edge->indirect_unknown_callee)
274 {
275 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278 }
279
280 lto_output_sleb128_stream (ob->main_stream, edge->count);
281
282 bp = bitpack_create (ob->main_stream);
283 uid = (!gimple_has_body_p (edge->caller->decl)
284 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
285 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
286 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->loop_nest, 30);
289 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
290 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
291 bp_pack_value (&bp, edge->can_throw_external, 1);
292 if (edge->indirect_unknown_callee)
293 {
294 int flags = edge->indirect_info->ecf_flags;
295 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
296 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
301 /* Flags that should not appear on indirect calls. */
302 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
303 | ECF_MAY_BE_ALLOCA
304 | ECF_SIBCALL
305 | ECF_LEAF
306 | ECF_NOVOPS)));
307 }
308 lto_output_bitpack (&bp);
309 }
310
311 /* Return if LIST contain references from other partitions. */
312
313 bool
314 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
315 varpool_node_set vset)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
320 {
321 if (ref->refering_type == IPA_REF_CGRAPH)
322 {
323 if (ipa_ref_refering_node (ref)->in_other_partition
324 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
325 return true;
326 }
327 else
328 {
329 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
330 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
331 vset))
332 return true;
333 }
334 }
335 return false;
336 }
337
338 /* Return true when node is reachable from other partition. */
339
340 bool
341 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
342 {
343 struct cgraph_edge *e;
344 if (!node->analyzed)
345 return false;
346 if (node->global.inlined_to)
347 return false;
348 for (e = node->callers; e; e = e->next_caller)
349 if (e->caller->in_other_partition
350 || !cgraph_node_in_set_p (e->caller, set))
351 return true;
352 return false;
353 }
354
355 /* Return if LIST contain references from other partitions. */
356
357 bool
358 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
359 varpool_node_set vset)
360 {
361 int i;
362 struct ipa_ref *ref;
363 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
364 {
365 if (ref->refering_type == IPA_REF_CGRAPH)
366 {
367 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
368 return true;
369 }
370 else
371 {
372 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
373 vset))
374 return true;
375 }
376 }
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
384 {
385 struct cgraph_edge *e;
386 for (e = node->callers; e; e = e->next_caller)
387 if (cgraph_node_in_set_p (e->caller, set))
388 return true;
389 return false;
390 }
391
392 /* Output the cgraph NODE to OB. ENCODER is used to find the
393 reference number of NODE->inlined_to. SET is the set of nodes we
394 are writing to the current file. If NODE is not in SET, then NODE
395 is a boundary of a cgraph_node_set and we pretend NODE just has a
396 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
397 that have had their callgraph node written so far. This is used to
398 determine if NODE is a clone of a previously written node. */
399
400 static void
401 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
402 lto_cgraph_encoder_t encoder, cgraph_node_set set,
403 varpool_node_set vset)
404 {
405 unsigned int tag;
406 struct bitpack_d bp;
407 bool boundary_p;
408 intptr_t ref;
409 bool in_other_partition = false;
410 struct cgraph_node *clone_of;
411
412 boundary_p = !cgraph_node_in_set_p (node, set);
413
414 if (node->analyzed && !boundary_p)
415 tag = LTO_cgraph_analyzed_node;
416 else
417 tag = LTO_cgraph_unavail_node;
418
419 lto_output_uleb128_stream (ob->main_stream, tag);
420
421 /* In WPA mode, we only output part of the call-graph. Also, we
422 fake cgraph node attributes. There are two cases that we care.
423
424 Boundary nodes: There are nodes that are not part of SET but are
425 called from within SET. We artificially make them look like
426 externally visible nodes with no function body.
427
428 Cherry-picked nodes: These are nodes we pulled from other
429 translation units into SET during IPA-inlining. We make them as
430 local static nodes to prevent clashes with other local statics. */
431 if (boundary_p && node->analyzed)
432 {
433 /* Inline clones can not be part of boundary.
434 gcc_assert (!node->global.inlined_to);
435
436 FIXME: At the moment they can be, when partition contains an inline
437 clone that is clone of inline clone from outside partition. We can
438 reshape the clone tree and make other tree to be the root, but it
439 needs a bit extra work and will be promplty done by cgraph_remove_node
440 after reading back. */
441 in_other_partition = 1;
442 }
443
444 clone_of = node->clone_of;
445 while (clone_of
446 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
447 if (clone_of->prev_sibling_clone)
448 clone_of = clone_of->prev_sibling_clone;
449 else
450 clone_of = clone_of->clone_of;
451
452 if (LTO_cgraph_analyzed_node)
453 gcc_assert (clone_of || !node->clone_of);
454 if (!clone_of)
455 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 lto_output_sleb128_stream (ob->main_stream, ref);
458
459
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 lto_output_sleb128_stream (ob->main_stream, node->count);
462 lto_output_sleb128_stream (ob->main_stream, node->count_materialization_scale);
463
464 if (tag == LTO_cgraph_analyzed_node)
465 {
466 lto_output_sleb128_stream (ob->main_stream,
467 node->local.inline_summary.estimated_self_stack_size);
468 lto_output_sleb128_stream (ob->main_stream,
469 node->local.inline_summary.self_size);
470 lto_output_sleb128_stream (ob->main_stream,
471 node->local.inline_summary.size_inlining_benefit);
472 lto_output_sleb128_stream (ob->main_stream,
473 node->local.inline_summary.self_time);
474 lto_output_sleb128_stream (ob->main_stream,
475 node->local.inline_summary.time_inlining_benefit);
476 if (node->global.inlined_to)
477 {
478 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
479 gcc_assert (ref != LCC_NOT_FOUND);
480 }
481 else
482 ref = LCC_NOT_FOUND;
483
484 lto_output_sleb128_stream (ob->main_stream, ref);
485 }
486
487 if (node->same_comdat_group && !boundary_p)
488 {
489 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
490 gcc_assert (ref != LCC_NOT_FOUND);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 lto_output_sleb128_stream (ob->main_stream, ref);
495
496 bp = bitpack_create (ob->main_stream);
497 bp_pack_value (&bp, node->local.local, 1);
498 bp_pack_value (&bp, node->local.externally_visible, 1);
499 bp_pack_value (&bp, node->local.finalized, 1);
500 bp_pack_value (&bp, node->local.inlinable, 1);
501 bp_pack_value (&bp, node->local.versionable, 1);
502 bp_pack_value (&bp, node->local.can_change_signature, 1);
503 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
504 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
505 bp_pack_value (&bp, node->local.vtable_method, 1);
506 bp_pack_value (&bp, node->needed, 1);
507 bp_pack_value (&bp, node->address_taken, 1);
508 bp_pack_value (&bp, node->abstract_and_needed, 1);
509 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
510 && !DECL_EXTERNAL (node->decl)
511 && !DECL_COMDAT (node->decl)
512 && (reachable_from_other_partition_p (node, set)
513 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
514 bp_pack_value (&bp, node->lowered, 1);
515 bp_pack_value (&bp, in_other_partition, 1);
516 bp_pack_value (&bp, node->alias, 1);
517 bp_pack_value (&bp, node->finalized_by_frontend, 1);
518 bp_pack_value (&bp, node->frequency, 2);
519 bp_pack_value (&bp, node->only_called_at_startup, 1);
520 bp_pack_value (&bp, node->only_called_at_exit, 1);
521 lto_output_bitpack (&bp);
522 lto_output_uleb128_stream (ob->main_stream, node->resolution);
523
524 if (node->same_body)
525 {
526 struct cgraph_node *alias;
527 unsigned long alias_count = 1;
528 for (alias = node->same_body; alias->next; alias = alias->next)
529 alias_count++;
530 lto_output_uleb128_stream (ob->main_stream, alias_count);
531 do
532 {
533 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
534 alias->decl);
535 if (alias->thunk.thunk_p)
536 {
537 lto_output_uleb128_stream
538 (ob->main_stream,
539 1 + (alias->thunk.this_adjusting != 0) * 2
540 + (alias->thunk.virtual_offset_p != 0) * 4);
541 lto_output_uleb128_stream (ob->main_stream,
542 alias->thunk.fixed_offset);
543 lto_output_uleb128_stream (ob->main_stream,
544 alias->thunk.virtual_value);
545 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
546 alias->thunk.alias);
547 }
548 else
549 {
550 lto_output_uleb128_stream (ob->main_stream, 0);
551 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
552 alias->thunk.alias);
553 }
554 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
555 alias = alias->previous;
556 }
557 while (alias);
558 }
559 else
560 lto_output_uleb128_stream (ob->main_stream, 0);
561 }
562
563 /* Output the varpool NODE to OB.
564 If NODE is not in SET, then NODE is a boundary. */
565
566 static void
567 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
568 lto_varpool_encoder_t varpool_encoder,
569 cgraph_node_set set, varpool_node_set vset)
570 {
571 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
572 struct bitpack_d bp;
573 struct varpool_node *alias;
574 int count = 0;
575 int ref;
576
577 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
578 bp = bitpack_create (ob->main_stream);
579 bp_pack_value (&bp, node->externally_visible, 1);
580 bp_pack_value (&bp, node->force_output, 1);
581 bp_pack_value (&bp, node->finalized, 1);
582 bp_pack_value (&bp, node->alias, 1);
583 gcc_assert (!node->alias || !node->extra_name);
584 gcc_assert (node->finalized || !node->analyzed);
585 gcc_assert (node->needed);
586 /* Constant pool initializers can be de-unified into individual ltrans units.
587 FIXME: Alternatively at -Os we may want to avoid generating for them the local
588 labels and share them across LTRANS partitions. */
589 if (DECL_IN_CONSTANT_POOL (node->decl)
590 && !DECL_COMDAT (node->decl))
591 {
592 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
593 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
594 }
595 else
596 {
597 bp_pack_value (&bp, node->analyzed
598 && referenced_from_other_partition_p (&node->ref_list,
599 set, vset), 1);
600 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
601 }
602 /* Also emit any extra name aliases. */
603 for (alias = node->extra_name; alias; alias = alias->next)
604 count++;
605 bp_pack_value (&bp, count != 0, 1);
606 lto_output_bitpack (&bp);
607 if (node->same_comdat_group && !boundary_p)
608 {
609 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
610 gcc_assert (ref != LCC_NOT_FOUND);
611 }
612 else
613 ref = LCC_NOT_FOUND;
614 lto_output_sleb128_stream (ob->main_stream, ref);
615 lto_output_uleb128_stream (ob->main_stream, node->resolution);
616
617 if (count)
618 {
619 lto_output_uleb128_stream (ob->main_stream, count);
620 for (alias = node->extra_name; alias; alias = alias->next)
621 {
622 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
623 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
624 }
625 }
626 }
627
628 /* Output the varpool NODE to OB.
629 If NODE is not in SET, then NODE is a boundary. */
630
631 static void
632 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
633 lto_cgraph_encoder_t encoder,
634 lto_varpool_encoder_t varpool_encoder)
635 {
636 struct bitpack_d bp;
637 bp = bitpack_create (ob->main_stream);
638 bp_pack_value (&bp, ref->refered_type, 1);
639 bp_pack_value (&bp, ref->use, 2);
640 lto_output_bitpack (&bp);
641 if (ref->refered_type == IPA_REF_CGRAPH)
642 {
643 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
644 gcc_assert (nref != LCC_NOT_FOUND);
645 lto_output_sleb128_stream (ob->main_stream, nref);
646 }
647 else
648 {
649 int nref = lto_varpool_encoder_lookup (varpool_encoder,
650 ipa_ref_varpool_node (ref));
651 gcc_assert (nref != LCC_NOT_FOUND);
652 lto_output_sleb128_stream (ob->main_stream, nref);
653 }
654 }
655
656 /* Stream out profile_summary to OB. */
657
658 static void
659 output_profile_summary (struct lto_simple_output_block *ob)
660 {
661 if (profile_info)
662 {
663 /* We do not output num, sum_all and run_max, they are not used by
664 GCC profile feedback and they are difficult to merge from multiple
665 units. */
666 gcc_assert (profile_info->runs);
667 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
668 lto_output_uleb128_stream (ob->main_stream, profile_info->sum_max);
669 }
670 else
671 lto_output_uleb128_stream (ob->main_stream, 0);
672 }
673
674 /* Add NODE into encoder as well as nodes it is cloned from.
675 Do it in a way so clones appear first. */
676
677 static void
678 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
679 bool include_body)
680 {
681 if (node->clone_of)
682 add_node_to (encoder, node->clone_of, include_body);
683 else if (include_body)
684 lto_set_cgraph_encoder_encode_body (encoder, node);
685 lto_cgraph_encoder_encode (encoder, node);
686 }
687
688 /* Add all references in LIST to encoders. */
689
690 static void
691 add_references (lto_cgraph_encoder_t encoder,
692 lto_varpool_encoder_t varpool_encoder,
693 struct ipa_ref_list *list)
694 {
695 int i;
696 struct ipa_ref *ref;
697 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
698 if (ref->refered_type == IPA_REF_CGRAPH)
699 add_node_to (encoder, ipa_ref_node (ref), false);
700 else
701 {
702 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
703 lto_varpool_encoder_encode (varpool_encoder, vnode);
704 }
705 }
706
707 /* Output all callees or indirect outgoing edges. EDGE must be the first such
708 edge. */
709
710 static void
711 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
712 struct lto_simple_output_block *ob,
713 lto_cgraph_encoder_t encoder)
714 {
715 if (!edge)
716 return;
717
718 /* Output edges in backward direction, so the reconstructed callgraph match
719 and it is easy to associate call sites in the IPA pass summaries. */
720 while (edge->next_callee)
721 edge = edge->next_callee;
722 for (; edge; edge = edge->prev_callee)
723 lto_output_edge (ob, edge, encoder);
724 }
725
726 /* Output the part of the cgraph in SET. */
727
728 static void
729 output_refs (cgraph_node_set set, varpool_node_set vset,
730 lto_cgraph_encoder_t encoder,
731 lto_varpool_encoder_t varpool_encoder)
732 {
733 cgraph_node_set_iterator csi;
734 varpool_node_set_iterator vsi;
735 struct lto_simple_output_block *ob;
736 int count;
737 struct ipa_ref *ref;
738 int i;
739
740 ob = lto_create_simple_output_block (LTO_section_refs);
741
742 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
743 {
744 struct cgraph_node *node = csi_node (csi);
745
746 count = ipa_ref_list_nreferences (&node->ref_list);
747 if (count)
748 {
749 lto_output_uleb128_stream (ob->main_stream, count);
750 lto_output_uleb128_stream (ob->main_stream,
751 lto_cgraph_encoder_lookup (encoder, node));
752 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
753 lto_output_ref (ob, ref, encoder, varpool_encoder);
754 }
755 }
756
757 lto_output_uleb128_stream (ob->main_stream, 0);
758
759 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
760 {
761 struct varpool_node *node = vsi_node (vsi);
762
763 count = ipa_ref_list_nreferences (&node->ref_list);
764 if (count)
765 {
766 lto_output_uleb128_stream (ob->main_stream, count);
767 lto_output_uleb128_stream (ob->main_stream,
768 lto_varpool_encoder_lookup (varpool_encoder,
769 node));
770 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
771 lto_output_ref (ob, ref, encoder, varpool_encoder);
772 }
773 }
774
775 lto_output_uleb128_stream (ob->main_stream, 0);
776
777 lto_destroy_simple_output_block (ob);
778 }
779
780 /* Find out all cgraph and varpool nodes we want to encode in current unit
781 and insert them to encoders. */
782 void
783 compute_ltrans_boundary (struct lto_out_decl_state *state,
784 cgraph_node_set set, varpool_node_set vset)
785 {
786 struct cgraph_node *node;
787 cgraph_node_set_iterator csi;
788 varpool_node_set_iterator vsi;
789 struct cgraph_edge *edge;
790 int i;
791 lto_cgraph_encoder_t encoder;
792 lto_varpool_encoder_t varpool_encoder;
793
794 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
795 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
796
797 /* Go over all the nodes in SET and assign references. */
798 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
799 {
800 node = csi_node (csi);
801 add_node_to (encoder, node, true);
802 add_references (encoder, varpool_encoder, &node->ref_list);
803 }
804 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
805 {
806 struct varpool_node *vnode = vsi_node (vsi);
807 gcc_assert (!vnode->alias);
808 lto_varpool_encoder_encode (varpool_encoder, vnode);
809 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
810 add_references (encoder, varpool_encoder, &vnode->ref_list);
811 }
812 /* Pickle in also the initializer of all referenced readonly variables
813 to help folding. Constant pool variables are not shared, so we must
814 pickle those too. */
815 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
816 {
817 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
818 if (DECL_INITIAL (vnode->decl)
819 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
820 vnode)
821 && const_value_known_p (vnode->decl))
822 {
823 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
824 add_references (encoder, varpool_encoder, &vnode->ref_list);
825 }
826 }
827
828 /* Go over all the nodes again to include callees that are not in
829 SET. */
830 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
831 {
832 node = csi_node (csi);
833 for (edge = node->callees; edge; edge = edge->next_callee)
834 {
835 struct cgraph_node *callee = edge->callee;
836 if (!cgraph_node_in_set_p (callee, set))
837 {
838 /* We should have moved all the inlines. */
839 gcc_assert (!callee->global.inlined_to);
840 add_node_to (encoder, callee, false);
841 }
842 }
843 }
844 }
845
846 /* Output the part of the cgraph in SET. */
847
848 void
849 output_cgraph (cgraph_node_set set, varpool_node_set vset)
850 {
851 struct cgraph_node *node;
852 struct lto_simple_output_block *ob;
853 cgraph_node_set_iterator csi;
854 int i, n_nodes;
855 lto_cgraph_encoder_t encoder;
856 lto_varpool_encoder_t varpool_encoder;
857 struct cgraph_asm_node *can;
858 static bool asm_nodes_output = false;
859
860 if (flag_wpa)
861 output_cgraph_opt_summary (set);
862
863 ob = lto_create_simple_output_block (LTO_section_cgraph);
864
865 output_profile_summary (ob);
866
867 /* An encoder for cgraph nodes should have been created by
868 ipa_write_summaries_1. */
869 gcc_assert (ob->decl_state->cgraph_node_encoder);
870 gcc_assert (ob->decl_state->varpool_node_encoder);
871 encoder = ob->decl_state->cgraph_node_encoder;
872 varpool_encoder = ob->decl_state->varpool_node_encoder;
873
874 /* Write out the nodes. We must first output a node and then its clones,
875 otherwise at a time reading back the node there would be nothing to clone
876 from. */
877 n_nodes = lto_cgraph_encoder_size (encoder);
878 for (i = 0; i < n_nodes; i++)
879 {
880 node = lto_cgraph_encoder_deref (encoder, i);
881 lto_output_node (ob, node, encoder, set, vset);
882 }
883
884 /* Go over the nodes in SET again to write edges. */
885 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
886 {
887 node = csi_node (csi);
888 output_outgoing_cgraph_edges (node->callees, ob, encoder);
889 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
890 }
891
892 lto_output_uleb128_stream (ob->main_stream, 0);
893
894 /* Emit toplevel asms.
895 When doing WPA we must output every asm just once. Since we do not partition asm
896 nodes at all, output them to first output. This is kind of hack, but should work
897 well. */
898 if (!asm_nodes_output)
899 {
900 asm_nodes_output = true;
901 for (can = cgraph_asm_nodes; can; can = can->next)
902 {
903 int len = TREE_STRING_LENGTH (can->asm_str);
904 lto_output_uleb128_stream (ob->main_stream, len);
905 for (i = 0; i < len; ++i)
906 lto_output_1_stream (ob->main_stream,
907 TREE_STRING_POINTER (can->asm_str)[i]);
908 }
909 }
910
911 lto_output_uleb128_stream (ob->main_stream, 0);
912
913 lto_destroy_simple_output_block (ob);
914 output_varpool (set, vset);
915 output_refs (set, vset, encoder, varpool_encoder);
916 }
917
918 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
919 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
920 NODE or to replace the values in it, for instance because the first
921 time we saw it, the function body was not available but now it
922 is. BP is a bitpack with all the bitflags for NODE read from the
923 stream. */
924
925 static void
926 input_overwrite_node (struct lto_file_decl_data *file_data,
927 struct cgraph_node *node,
928 enum LTO_cgraph_tags tag,
929 struct bitpack_d *bp,
930 unsigned int stack_size,
931 unsigned int self_time,
932 unsigned int time_inlining_benefit,
933 unsigned int self_size,
934 unsigned int size_inlining_benefit,
935 enum ld_plugin_symbol_resolution resolution)
936 {
937 node->aux = (void *) tag;
938 node->local.inline_summary.estimated_self_stack_size = stack_size;
939 node->local.inline_summary.self_time = self_time;
940 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
941 node->local.inline_summary.self_size = self_size;
942 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
943 node->global.time = self_time;
944 node->global.size = self_size;
945 node->global.estimated_stack_size = stack_size;
946 node->global.estimated_growth = INT_MIN;
947 node->local.lto_file_data = file_data;
948
949 node->local.local = bp_unpack_value (bp, 1);
950 node->local.externally_visible = bp_unpack_value (bp, 1);
951 node->local.finalized = bp_unpack_value (bp, 1);
952 node->local.inlinable = bp_unpack_value (bp, 1);
953 node->local.versionable = bp_unpack_value (bp, 1);
954 node->local.can_change_signature = bp_unpack_value (bp, 1);
955 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
956 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
957 node->local.vtable_method = bp_unpack_value (bp, 1);
958 node->needed = bp_unpack_value (bp, 1);
959 node->address_taken = bp_unpack_value (bp, 1);
960 node->abstract_and_needed = bp_unpack_value (bp, 1);
961 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
962 node->lowered = bp_unpack_value (bp, 1);
963 node->analyzed = tag == LTO_cgraph_analyzed_node;
964 node->in_other_partition = bp_unpack_value (bp, 1);
965 if (node->in_other_partition
966 /* Avoid updating decl when we are seeing just inline clone.
967 When inlining function that has functions already inlined into it,
968 we produce clones of inline clones.
969
970 WPA partitioning might put each clone into different unit and
971 we might end up streaming inline clone from other partition
972 to support clone we are interested in. */
973 && (!node->clone_of
974 || node->clone_of->decl != node->decl))
975 {
976 DECL_EXTERNAL (node->decl) = 1;
977 TREE_STATIC (node->decl) = 0;
978 }
979 node->alias = bp_unpack_value (bp, 1);
980 node->finalized_by_frontend = bp_unpack_value (bp, 1);
981 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
982 node->only_called_at_startup = bp_unpack_value (bp, 1);
983 node->only_called_at_exit = bp_unpack_value (bp, 1);
984 node->resolution = resolution;
985 }
986
987 /* Output the part of the cgraph in SET. */
988
989 static void
990 output_varpool (cgraph_node_set set, varpool_node_set vset)
991 {
992 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
993 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
994 int len = lto_varpool_encoder_size (varpool_encoder), i;
995
996 lto_output_uleb128_stream (ob->main_stream, len);
997
998 /* Write out the nodes. We must first output a node and then its clones,
999 otherwise at a time reading back the node there would be nothing to clone
1000 from. */
1001 for (i = 0; i < len; i++)
1002 {
1003 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
1004 varpool_encoder,
1005 set, vset);
1006 }
1007
1008 lto_destroy_simple_output_block (ob);
1009 }
1010
1011 /* Read a node from input_block IB. TAG is the node's tag just read.
1012 Return the node read or overwriten. */
1013
1014 static struct cgraph_node *
1015 input_node (struct lto_file_decl_data *file_data,
1016 struct lto_input_block *ib,
1017 enum LTO_cgraph_tags tag,
1018 VEC(cgraph_node_ptr, heap) *nodes)
1019 {
1020 tree fn_decl;
1021 struct cgraph_node *node;
1022 struct bitpack_d bp;
1023 int stack_size = 0;
1024 unsigned decl_index;
1025 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1026 int self_time = 0;
1027 int self_size = 0;
1028 int time_inlining_benefit = 0;
1029 int size_inlining_benefit = 0;
1030 unsigned long same_body_count = 0;
1031 int clone_ref;
1032 enum ld_plugin_symbol_resolution resolution;
1033
1034 clone_ref = lto_input_sleb128 (ib);
1035
1036 decl_index = lto_input_uleb128 (ib);
1037 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1038
1039 if (clone_ref != LCC_NOT_FOUND)
1040 {
1041 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1042 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1043 }
1044 else
1045 node = cgraph_node (fn_decl);
1046
1047 node->count = lto_input_sleb128 (ib);
1048 node->count_materialization_scale = lto_input_sleb128 (ib);
1049
1050 if (tag == LTO_cgraph_analyzed_node)
1051 {
1052 stack_size = lto_input_sleb128 (ib);
1053 self_size = lto_input_sleb128 (ib);
1054 size_inlining_benefit = lto_input_sleb128 (ib);
1055 self_time = lto_input_sleb128 (ib);
1056 time_inlining_benefit = lto_input_sleb128 (ib);
1057
1058 ref = lto_input_sleb128 (ib);
1059 }
1060
1061 ref2 = lto_input_sleb128 (ib);
1062
1063 /* Make sure that we have not read this node before. Nodes that
1064 have already been read will have their tag stored in the 'aux'
1065 field. Since built-in functions can be referenced in multiple
1066 functions, they are expected to be read more than once. */
1067 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1068 internal_error ("bytecode stream: found multiple instances of cgraph "
1069 "node %d", node->uid);
1070
1071 bp = lto_input_bitpack (ib);
1072 resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1073 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1074 time_inlining_benefit, self_size,
1075 size_inlining_benefit, resolution);
1076
1077 /* Store a reference for now, and fix up later to be a pointer. */
1078 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1079
1080 /* Store a reference for now, and fix up later to be a pointer. */
1081 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1082
1083 same_body_count = lto_input_uleb128 (ib);
1084 while (same_body_count-- > 0)
1085 {
1086 tree alias_decl;
1087 int type;
1088 struct cgraph_node *alias;
1089 decl_index = lto_input_uleb128 (ib);
1090 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1091 type = lto_input_uleb128 (ib);
1092 if (!type)
1093 {
1094 tree real_alias;
1095 decl_index = lto_input_uleb128 (ib);
1096 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1097 alias = cgraph_same_body_alias (alias_decl, real_alias);
1098 }
1099 else
1100 {
1101 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1102 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1103 tree real_alias;
1104 decl_index = lto_input_uleb128 (ib);
1105 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1106 alias = cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1107 virtual_value,
1108 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1109 real_alias);
1110 }
1111 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1112 }
1113 return node;
1114 }
1115
1116 /* Read a node from input_block IB. TAG is the node's tag just read.
1117 Return the node read or overwriten. */
1118
1119 static struct varpool_node *
1120 input_varpool_node (struct lto_file_decl_data *file_data,
1121 struct lto_input_block *ib)
1122 {
1123 int decl_index;
1124 tree var_decl;
1125 struct varpool_node *node;
1126 struct bitpack_d bp;
1127 bool aliases_p;
1128 int count;
1129 int ref = LCC_NOT_FOUND;
1130
1131 decl_index = lto_input_uleb128 (ib);
1132 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1133 node = varpool_node (var_decl);
1134 node->lto_file_data = file_data;
1135
1136 bp = lto_input_bitpack (ib);
1137 node->externally_visible = bp_unpack_value (&bp, 1);
1138 node->force_output = bp_unpack_value (&bp, 1);
1139 node->finalized = bp_unpack_value (&bp, 1);
1140 node->alias = bp_unpack_value (&bp, 1);
1141 node->analyzed = node->finalized;
1142 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1143 node->in_other_partition = bp_unpack_value (&bp, 1);
1144 if (node->in_other_partition)
1145 {
1146 DECL_EXTERNAL (node->decl) = 1;
1147 TREE_STATIC (node->decl) = 0;
1148 }
1149 aliases_p = bp_unpack_value (&bp, 1);
1150 if (node->finalized)
1151 varpool_mark_needed_node (node);
1152 ref = lto_input_sleb128 (ib);
1153 /* Store a reference for now, and fix up later to be a pointer. */
1154 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1155 node->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1156 if (aliases_p)
1157 {
1158 count = lto_input_uleb128 (ib);
1159 for (; count > 0; count --)
1160 {
1161 tree decl = lto_file_decl_data_get_var_decl (file_data,
1162 lto_input_uleb128 (ib));
1163 struct varpool_node *alias;
1164 alias = varpool_extra_name_alias (decl, var_decl);
1165 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1166 }
1167 }
1168 return node;
1169 }
1170
1171 /* Read a node from input_block IB. TAG is the node's tag just read.
1172 Return the node read or overwriten. */
1173
1174 static void
1175 input_ref (struct lto_input_block *ib,
1176 struct cgraph_node *refering_node,
1177 struct varpool_node *refering_varpool_node,
1178 VEC(cgraph_node_ptr, heap) *nodes,
1179 VEC(varpool_node_ptr, heap) *varpool_nodes)
1180 {
1181 struct cgraph_node *node = NULL;
1182 struct varpool_node *varpool_node = NULL;
1183 struct bitpack_d bp;
1184 enum ipa_ref_type type;
1185 enum ipa_ref_use use;
1186
1187 bp = lto_input_bitpack (ib);
1188 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1189 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1190 if (type == IPA_REF_CGRAPH)
1191 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1192 else
1193 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1194 ipa_record_reference (refering_node, refering_varpool_node,
1195 node, varpool_node, use, NULL);
1196 }
1197
1198 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1199 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1200 edge being read is indirect (in the sense that it has
1201 indirect_unknown_callee set). */
1202
1203 static void
1204 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1205 bool indirect)
1206 {
1207 struct cgraph_node *caller, *callee;
1208 struct cgraph_edge *edge;
1209 unsigned int stmt_id;
1210 gcov_type count;
1211 int freq;
1212 unsigned int nest;
1213 cgraph_inline_failed_t inline_failed;
1214 struct bitpack_d bp;
1215 int ecf_flags = 0;
1216
1217 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1218 if (caller == NULL || caller->decl == NULL_TREE)
1219 internal_error ("bytecode stream: no caller found while reading edge");
1220
1221 if (!indirect)
1222 {
1223 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1224 if (callee == NULL || callee->decl == NULL_TREE)
1225 internal_error ("bytecode stream: no callee found while reading edge");
1226 }
1227 else
1228 callee = NULL;
1229
1230 count = (gcov_type) lto_input_sleb128 (ib);
1231
1232 bp = lto_input_bitpack (ib);
1233 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1234 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1235 HOST_BITS_PER_INT);
1236 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1237 nest = (unsigned) bp_unpack_value (&bp, 30);
1238
1239 if (indirect)
1240 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1241 else
1242 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1243
1244 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1245 edge->lto_stmt_uid = stmt_id;
1246 edge->inline_failed = inline_failed;
1247 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1248 edge->can_throw_external = bp_unpack_value (&bp, 1);
1249 if (indirect)
1250 {
1251 if (bp_unpack_value (&bp, 1))
1252 ecf_flags |= ECF_CONST;
1253 if (bp_unpack_value (&bp, 1))
1254 ecf_flags |= ECF_PURE;
1255 if (bp_unpack_value (&bp, 1))
1256 ecf_flags |= ECF_NORETURN;
1257 if (bp_unpack_value (&bp, 1))
1258 ecf_flags |= ECF_MALLOC;
1259 if (bp_unpack_value (&bp, 1))
1260 ecf_flags |= ECF_NOTHROW;
1261 if (bp_unpack_value (&bp, 1))
1262 ecf_flags |= ECF_RETURNS_TWICE;
1263 edge->indirect_info->ecf_flags = ecf_flags;
1264 }
1265 }
1266
1267
1268 /* Read a cgraph from IB using the info in FILE_DATA. */
1269
1270 static VEC(cgraph_node_ptr, heap) *
1271 input_cgraph_1 (struct lto_file_decl_data *file_data,
1272 struct lto_input_block *ib)
1273 {
1274 enum LTO_cgraph_tags tag;
1275 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1276 struct cgraph_node *node;
1277 unsigned i;
1278 unsigned HOST_WIDE_INT len;
1279
1280 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1281 while (tag)
1282 {
1283 if (tag == LTO_cgraph_edge)
1284 input_edge (ib, nodes, false);
1285 else if (tag == LTO_cgraph_indirect_edge)
1286 input_edge (ib, nodes, true);
1287 else
1288 {
1289 node = input_node (file_data, ib, tag,nodes);
1290 if (node == NULL || node->decl == NULL_TREE)
1291 internal_error ("bytecode stream: found empty cgraph node");
1292 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1293 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1294 }
1295
1296 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1297 }
1298
1299 /* Input toplevel asms. */
1300 len = lto_input_uleb128 (ib);
1301 while (len)
1302 {
1303 char *str = (char *)xmalloc (len + 1);
1304 for (i = 0; i < len; ++i)
1305 str[i] = lto_input_1_unsigned (ib);
1306 cgraph_add_asm_node (build_string (len, str));
1307 free (str);
1308
1309 len = lto_input_uleb128 (ib);
1310 }
1311 /* AUX pointers should be all non-zero for nodes read from the stream. */
1312 #ifdef ENABLE_CHECKING
1313 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1314 gcc_assert (node->aux);
1315 #endif
1316 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1317 {
1318 int ref = (int) (intptr_t) node->global.inlined_to;
1319
1320 /* We share declaration of builtins, so we may read same node twice. */
1321 if (!node->aux)
1322 continue;
1323 node->aux = NULL;
1324
1325 /* Fixup inlined_to from reference to pointer. */
1326 if (ref != LCC_NOT_FOUND)
1327 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1328 else
1329 node->global.inlined_to = NULL;
1330
1331 ref = (int) (intptr_t) node->same_comdat_group;
1332
1333 /* Fixup same_comdat_group from reference to pointer. */
1334 if (ref != LCC_NOT_FOUND)
1335 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1336 else
1337 node->same_comdat_group = NULL;
1338 }
1339 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1340 node->aux = (void *)1;
1341 return nodes;
1342 }
1343
1344 /* Read a varpool from IB using the info in FILE_DATA. */
1345
1346 static VEC(varpool_node_ptr, heap) *
1347 input_varpool_1 (struct lto_file_decl_data *file_data,
1348 struct lto_input_block *ib)
1349 {
1350 unsigned HOST_WIDE_INT len;
1351 VEC(varpool_node_ptr, heap) *varpool = NULL;
1352 int i;
1353 struct varpool_node *node;
1354
1355 len = lto_input_uleb128 (ib);
1356 while (len)
1357 {
1358 VEC_safe_push (varpool_node_ptr, heap, varpool,
1359 input_varpool_node (file_data, ib));
1360 len--;
1361 }
1362 #ifdef ENABLE_CHECKING
1363 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1364 gcc_assert (!node->aux);
1365 #endif
1366 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1367 {
1368 int ref = (int) (intptr_t) node->same_comdat_group;
1369 /* We share declaration of builtins, so we may read same node twice. */
1370 if (node->aux)
1371 continue;
1372 node->aux = (void *)1;
1373
1374 /* Fixup same_comdat_group from reference to pointer. */
1375 if (ref != LCC_NOT_FOUND)
1376 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1377 else
1378 node->same_comdat_group = NULL;
1379 }
1380 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1381 node->aux = NULL;
1382 return varpool;
1383 }
1384
1385 /* Input ipa_refs. */
1386
1387 static void
1388 input_refs (struct lto_input_block *ib,
1389 VEC(cgraph_node_ptr, heap) *nodes,
1390 VEC(varpool_node_ptr, heap) *varpool)
1391 {
1392 int count;
1393 int idx;
1394 while (true)
1395 {
1396 struct cgraph_node *node;
1397 count = lto_input_uleb128 (ib);
1398 if (!count)
1399 break;
1400 idx = lto_input_uleb128 (ib);
1401 node = VEC_index (cgraph_node_ptr, nodes, idx);
1402 while (count)
1403 {
1404 input_ref (ib, node, NULL, nodes, varpool);
1405 count--;
1406 }
1407 }
1408 while (true)
1409 {
1410 struct varpool_node *node;
1411 count = lto_input_uleb128 (ib);
1412 if (!count)
1413 break;
1414 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1415 while (count)
1416 {
1417 input_ref (ib, NULL, node, nodes, varpool);
1418 count--;
1419 }
1420 }
1421 }
1422
1423
1424 static struct gcov_ctr_summary lto_gcov_summary;
1425
1426 /* Input profile_info from IB. */
1427 static void
1428 input_profile_summary (struct lto_input_block *ib,
1429 struct lto_file_decl_data *file_data)
1430 {
1431 unsigned int runs = lto_input_uleb128 (ib);
1432 if (runs)
1433 {
1434 file_data->profile_info.runs = runs;
1435 file_data->profile_info.sum_max = lto_input_uleb128 (ib);
1436 }
1437
1438 }
1439
1440 /* Rescale profile summaries to the same number of runs in the whole unit. */
1441
1442 static void
1443 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1444 {
1445 struct lto_file_decl_data *file_data;
1446 unsigned int j;
1447 gcov_unsigned_t max_runs = 0;
1448 struct cgraph_node *node;
1449 struct cgraph_edge *edge;
1450
1451 /* Find unit with maximal number of runs. If we ever get serious about
1452 roundoff errors, we might also consider computing smallest common
1453 multiply. */
1454 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1455 if (max_runs < file_data->profile_info.runs)
1456 max_runs = file_data->profile_info.runs;
1457
1458 if (!max_runs)
1459 return;
1460
1461 /* Simple overflow check. We probably don't need to support that many train
1462 runs. Such a large value probably imply data corruption anyway. */
1463 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1464 {
1465 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1466 INT_MAX / REG_BR_PROB_BASE);
1467 return;
1468 }
1469
1470 profile_info = &lto_gcov_summary;
1471 lto_gcov_summary.runs = max_runs;
1472 lto_gcov_summary.sum_max = 0;
1473
1474 /* Rescale all units to the maximal number of runs.
1475 sum_max can not be easily merged, as we have no idea what files come from
1476 the same run. We do not use the info anyway, so leave it 0. */
1477 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1478 if (file_data->profile_info.runs)
1479 {
1480 int scale = ((REG_BR_PROB_BASE * max_runs
1481 + file_data->profile_info.runs / 2)
1482 / file_data->profile_info.runs);
1483 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1484 (file_data->profile_info.sum_max
1485 * scale
1486 + REG_BR_PROB_BASE / 2)
1487 / REG_BR_PROB_BASE);
1488 }
1489
1490 /* Watch roundoff errors. */
1491 if (lto_gcov_summary.sum_max < max_runs)
1492 lto_gcov_summary.sum_max = max_runs;
1493
1494 /* If merging already happent at WPA time, we are done. */
1495 if (flag_ltrans)
1496 return;
1497
1498 /* Now compute count_materialization_scale of each node.
1499 During LTRANS we already have values of count_materialization_scale
1500 computed, so just update them. */
1501 for (node = cgraph_nodes; node; node = node->next)
1502 if (node->local.lto_file_data->profile_info.runs)
1503 {
1504 int scale;
1505
1506 scale =
1507 ((node->count_materialization_scale * max_runs
1508 + node->local.lto_file_data->profile_info.runs / 2)
1509 / node->local.lto_file_data->profile_info.runs);
1510 node->count_materialization_scale = scale;
1511 if (scale < 0)
1512 fatal_error ("Profile information in %s corrupted",
1513 file_data->file_name);
1514
1515 if (scale == REG_BR_PROB_BASE)
1516 continue;
1517 for (edge = node->callees; edge; edge = edge->next_callee)
1518 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1519 / REG_BR_PROB_BASE);
1520 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1521 / REG_BR_PROB_BASE);
1522 }
1523 }
1524
1525 /* Input and merge the cgraph from each of the .o files passed to
1526 lto1. */
1527
1528 void
1529 input_cgraph (void)
1530 {
1531 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1532 struct lto_file_decl_data *file_data;
1533 unsigned int j = 0;
1534 struct cgraph_node *node;
1535
1536 while ((file_data = file_data_vec[j++]))
1537 {
1538 const char *data;
1539 size_t len;
1540 struct lto_input_block *ib;
1541 VEC(cgraph_node_ptr, heap) *nodes;
1542 VEC(varpool_node_ptr, heap) *varpool;
1543
1544 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1545 &data, &len);
1546 if (!ib)
1547 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1548 input_profile_summary (ib, file_data);
1549 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1550 nodes = input_cgraph_1 (file_data, ib);
1551 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1552 ib, data, len);
1553
1554 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1555 &data, &len);
1556 if (!ib)
1557 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1558 varpool = input_varpool_1 (file_data, ib);
1559 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1560 ib, data, len);
1561
1562 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1563 &data, &len);
1564 if (!ib)
1565 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1566 input_refs (ib, nodes, varpool);
1567 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1568 ib, data, len);
1569 if (flag_ltrans)
1570 input_cgraph_opt_summary (nodes);
1571 VEC_free (cgraph_node_ptr, heap, nodes);
1572 VEC_free (varpool_node_ptr, heap, varpool);
1573 }
1574 merge_profile_summaries (file_data_vec);
1575
1576
1577 /* Clear out the aux field that was used to store enough state to
1578 tell which nodes should be overwritten. */
1579 for (node = cgraph_nodes; node; node = node->next)
1580 {
1581 /* Some nodes may have been created by cgraph_node. This
1582 happens when the callgraph contains nested functions. If the
1583 node for the parent function was never emitted to the gimple
1584 file, cgraph_node will create a node for it when setting the
1585 context of the nested function. */
1586 if (node->local.lto_file_data)
1587 node->aux = NULL;
1588 }
1589 }
1590
1591 /* True when we need optimization summary for NODE. */
1592
1593 static int
1594 output_cgraph_opt_summary_p (struct cgraph_node *node, cgraph_node_set set)
1595 {
1596 struct cgraph_edge *e;
1597
1598 if (cgraph_node_in_set_p (node, set))
1599 {
1600 for (e = node->callees; e; e = e->next_callee)
1601 if (e->indirect_info
1602 && e->indirect_info->thunk_delta != 0)
1603 return true;
1604
1605 for (e = node->indirect_calls; e; e = e->next_callee)
1606 if (e->indirect_info->thunk_delta != 0)
1607 return true;
1608 }
1609
1610 return (node->clone_of
1611 && (node->clone.tree_map
1612 || node->clone.args_to_skip
1613 || node->clone.combined_args_to_skip));
1614 }
1615
1616 /* Output optimization summary for EDGE to OB. */
1617 static void
1618 output_edge_opt_summary (struct output_block *ob,
1619 struct cgraph_edge *edge)
1620 {
1621 if (edge->indirect_info)
1622 lto_output_sleb128_stream (ob->main_stream,
1623 edge->indirect_info->thunk_delta);
1624 else
1625 lto_output_sleb128_stream (ob->main_stream, 0);
1626 }
1627
1628 /* Output optimization summary for NODE to OB. */
1629
1630 static void
1631 output_node_opt_summary (struct output_block *ob,
1632 struct cgraph_node *node,
1633 cgraph_node_set set)
1634 {
1635 unsigned int index;
1636 bitmap_iterator bi;
1637 struct ipa_replace_map *map;
1638 struct bitpack_d bp;
1639 int i;
1640 struct cgraph_edge *e;
1641
1642 lto_output_uleb128_stream (ob->main_stream,
1643 bitmap_count_bits (node->clone.args_to_skip));
1644 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1645 lto_output_uleb128_stream (ob->main_stream, index);
1646 lto_output_uleb128_stream (ob->main_stream,
1647 bitmap_count_bits (node->clone.combined_args_to_skip));
1648 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1649 lto_output_uleb128_stream (ob->main_stream, index);
1650 lto_output_uleb128_stream (ob->main_stream,
1651 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1652 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1653 {
1654 int parm_num;
1655 tree parm;
1656
1657 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1658 parm = DECL_CHAIN (parm), parm_num++)
1659 if (map->old_tree == parm)
1660 break;
1661 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1662 mechanism to store function local declarations into summaries. */
1663 gcc_assert (parm);
1664 lto_output_uleb128_stream (ob->main_stream, parm_num);
1665 lto_output_tree (ob, map->new_tree, true);
1666 bp = bitpack_create (ob->main_stream);
1667 bp_pack_value (&bp, map->replace_p, 1);
1668 bp_pack_value (&bp, map->ref_p, 1);
1669 lto_output_bitpack (&bp);
1670 }
1671
1672 if (cgraph_node_in_set_p (node, set))
1673 {
1674 for (e = node->callees; e; e = e->next_callee)
1675 output_edge_opt_summary (ob, e);
1676 for (e = node->indirect_calls; e; e = e->next_callee)
1677 output_edge_opt_summary (ob, e);
1678 }
1679 }
1680
1681 /* Output optimization summaries stored in callgraph.
1682 At the moment it is the clone info structure. */
1683
1684 static void
1685 output_cgraph_opt_summary (cgraph_node_set set)
1686 {
1687 struct cgraph_node *node;
1688 int i, n_nodes;
1689 lto_cgraph_encoder_t encoder;
1690 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1691 unsigned count = 0;
1692
1693 ob->cgraph_node = NULL;
1694 encoder = ob->decl_state->cgraph_node_encoder;
1695 n_nodes = lto_cgraph_encoder_size (encoder);
1696 for (i = 0; i < n_nodes; i++)
1697 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1698 set))
1699 count++;
1700 lto_output_uleb128_stream (ob->main_stream, count);
1701 for (i = 0; i < n_nodes; i++)
1702 {
1703 node = lto_cgraph_encoder_deref (encoder, i);
1704 if (output_cgraph_opt_summary_p (node, set))
1705 {
1706 lto_output_uleb128_stream (ob->main_stream, i);
1707 output_node_opt_summary (ob, node, set);
1708 }
1709 }
1710 produce_asm (ob, NULL);
1711 destroy_output_block (ob);
1712 }
1713
1714 /* Input optimisation summary of EDGE. */
1715
1716 static void
1717 input_edge_opt_summary (struct cgraph_edge *edge,
1718 struct lto_input_block *ib_main)
1719 {
1720 HOST_WIDE_INT thunk_delta;
1721 thunk_delta = lto_input_sleb128 (ib_main);
1722 if (thunk_delta != 0)
1723 {
1724 gcc_assert (!edge->indirect_info);
1725 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1726 edge->indirect_info->thunk_delta = thunk_delta;
1727 }
1728 }
1729
1730 /* Input optimisation summary of NODE. */
1731
1732 static void
1733 input_node_opt_summary (struct cgraph_node *node,
1734 struct lto_input_block *ib_main,
1735 struct data_in *data_in)
1736 {
1737 int i;
1738 int count;
1739 int bit;
1740 struct bitpack_d bp;
1741 struct cgraph_edge *e;
1742
1743 count = lto_input_uleb128 (ib_main);
1744 if (count)
1745 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1746 for (i = 0; i < count; i++)
1747 {
1748 bit = lto_input_uleb128 (ib_main);
1749 bitmap_set_bit (node->clone.args_to_skip, bit);
1750 }
1751 count = lto_input_uleb128 (ib_main);
1752 if (count)
1753 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1754 for (i = 0; i < count; i++)
1755 {
1756 bit = lto_input_uleb128 (ib_main);
1757 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1758 }
1759 count = lto_input_uleb128 (ib_main);
1760 for (i = 0; i < count; i++)
1761 {
1762 int parm_num;
1763 tree parm;
1764 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1765
1766 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1767 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1768 parm = DECL_CHAIN (parm))
1769 parm_num --;
1770 map->parm_num = lto_input_uleb128 (ib_main);
1771 map->old_tree = NULL;
1772 map->new_tree = lto_input_tree (ib_main, data_in);
1773 bp = lto_input_bitpack (ib_main);
1774 map->replace_p = bp_unpack_value (&bp, 1);
1775 map->ref_p = bp_unpack_value (&bp, 1);
1776 }
1777 for (e = node->callees; e; e = e->next_callee)
1778 input_edge_opt_summary (e, ib_main);
1779 for (e = node->indirect_calls; e; e = e->next_callee)
1780 input_edge_opt_summary (e, ib_main);
1781 }
1782
1783 /* Read section in file FILE_DATA of length LEN with data DATA. */
1784
1785 static void
1786 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1787 const char *data, size_t len, VEC (cgraph_node_ptr,
1788 heap) * nodes)
1789 {
1790 const struct lto_function_header *header =
1791 (const struct lto_function_header *) data;
1792 const int32_t cfg_offset = sizeof (struct lto_function_header);
1793 const int32_t main_offset = cfg_offset + header->cfg_size;
1794 const int32_t string_offset = main_offset + header->main_size;
1795 struct data_in *data_in;
1796 struct lto_input_block ib_main;
1797 unsigned int i;
1798 unsigned int count;
1799
1800 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1801 header->main_size);
1802
1803 data_in =
1804 lto_data_in_create (file_data, (const char *) data + string_offset,
1805 header->string_size, NULL);
1806 count = lto_input_uleb128 (&ib_main);
1807
1808 for (i = 0; i < count; i++)
1809 {
1810 int ref = lto_input_uleb128 (&ib_main);
1811 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1812 &ib_main, data_in);
1813 }
1814 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1815 len);
1816 lto_data_in_delete (data_in);
1817 }
1818
1819 /* Input optimization summary of cgraph. */
1820
1821 static void
1822 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1823 {
1824 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1825 struct lto_file_decl_data *file_data;
1826 unsigned int j = 0;
1827
1828 while ((file_data = file_data_vec[j++]))
1829 {
1830 size_t len;
1831 const char *data =
1832 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1833 &len);
1834
1835 if (data)
1836 input_cgraph_opt_section (file_data, data, len, nodes);
1837 }
1838 }