re PR tree-optimization/47053 (ICE: verify_flow_info failed: BB 2 can not throw but...
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "output.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "gcov-io.h"
47
48 static void output_varpool (cgraph_node_set, varpool_node_set);
49 static void output_cgraph_opt_summary (cgraph_node_set set);
50 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
51
52
53 /* Cgraph streaming is organized as set of record whose type
54 is indicated by a tag. */
55 enum LTO_cgraph_tags
56 {
57 /* Must leave 0 for the stopper. */
58
59 /* Cgraph node without body available. */
60 LTO_cgraph_unavail_node = 1,
61 /* Cgraph node with function body. */
62 LTO_cgraph_analyzed_node,
63 /* Cgraph edges. */
64 LTO_cgraph_edge,
65 LTO_cgraph_indirect_edge
66 };
67
68 /* Create a new cgraph encoder. */
69
70 lto_cgraph_encoder_t
71 lto_cgraph_encoder_new (void)
72 {
73 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
74 encoder->map = pointer_map_create ();
75 encoder->nodes = NULL;
76 encoder->body = pointer_set_create ();
77 return encoder;
78 }
79
80
81 /* Delete ENCODER and its components. */
82
83 void
84 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
85 {
86 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
87 pointer_map_destroy (encoder->map);
88 pointer_set_destroy (encoder->body);
89 free (encoder);
90 }
91
92
93 /* Return the existing reference number of NODE in the cgraph encoder in
94 output block OB. Assign a new reference if this is the first time
95 NODE is encoded. */
96
97 int
98 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
99 struct cgraph_node *node)
100 {
101 int ref;
102 void **slot;
103
104 slot = pointer_map_contains (encoder->map, node);
105 if (!slot)
106 {
107 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
108 slot = pointer_map_insert (encoder->map, node);
109 *slot = (void *) (intptr_t) ref;
110 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 }
112 else
113 ref = (int) (intptr_t) *slot;
114
115 return ref;
116 }
117
118 #define LCC_NOT_FOUND (-1)
119
120 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
121 or LCC_NOT_FOUND if it is not there. */
122
123 int
124 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
125 struct cgraph_node *node)
126 {
127 void **slot = pointer_map_contains (encoder->map, node);
128 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
129 }
130
131
132 /* Return the cgraph node corresponding to REF using ENCODER. */
133
134 struct cgraph_node *
135 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
136 {
137 if (ref == LCC_NOT_FOUND)
138 return NULL;
139
140 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
141 }
142
143
144 /* Return TRUE if we should encode initializer of NODE (if any). */
145
146 bool
147 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
148 struct cgraph_node *node)
149 {
150 return pointer_set_contains (encoder->body, node);
151 }
152
153 /* Return TRUE if we should encode body of NODE (if any). */
154
155 static void
156 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
157 struct cgraph_node *node)
158 {
159 pointer_set_insert (encoder->body, node);
160 }
161
162 /* Create a new varpool encoder. */
163
164 lto_varpool_encoder_t
165 lto_varpool_encoder_new (void)
166 {
167 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
168 encoder->map = pointer_map_create ();
169 encoder->initializer = pointer_set_create ();
170 encoder->nodes = NULL;
171 return encoder;
172 }
173
174
175 /* Delete ENCODER and its components. */
176
177 void
178 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
179 {
180 VEC_free (varpool_node_ptr, heap, encoder->nodes);
181 pointer_map_destroy (encoder->map);
182 pointer_set_destroy (encoder->initializer);
183 free (encoder);
184 }
185
186
187 /* Return the existing reference number of NODE in the varpool encoder in
188 output block OB. Assign a new reference if this is the first time
189 NODE is encoded. */
190
191 int
192 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
193 struct varpool_node *node)
194 {
195 int ref;
196 void **slot;
197
198 slot = pointer_map_contains (encoder->map, node);
199 if (!slot)
200 {
201 ref = VEC_length (varpool_node_ptr, encoder->nodes);
202 slot = pointer_map_insert (encoder->map, node);
203 *slot = (void *) (intptr_t) ref;
204 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 }
206 else
207 ref = (int) (intptr_t) *slot;
208
209 return ref;
210 }
211
212 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
213 or LCC_NOT_FOUND if it is not there. */
214
215 int
216 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
217 struct varpool_node *node)
218 {
219 void **slot = pointer_map_contains (encoder->map, node);
220 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
221 }
222
223
224 /* Return the varpool node corresponding to REF using ENCODER. */
225
226 struct varpool_node *
227 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
228 {
229 if (ref == LCC_NOT_FOUND)
230 return NULL;
231
232 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
233 }
234
235
236 /* Return TRUE if we should encode initializer of NODE (if any). */
237
238 bool
239 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
240 struct varpool_node *node)
241 {
242 return pointer_set_contains (encoder->initializer, node);
243 }
244
245 /* Return TRUE if we should encode initializer of NODE (if any). */
246
247 static void
248 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
249 struct varpool_node *node)
250 {
251 pointer_set_insert (encoder->initializer, node);
252 }
253
254 /* Output the cgraph EDGE to OB using ENCODER. */
255
256 static void
257 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
258 lto_cgraph_encoder_t encoder)
259 {
260 unsigned int uid;
261 intptr_t ref;
262 struct bitpack_d bp;
263
264 if (edge->indirect_unknown_callee)
265 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
266 else
267 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
268
269 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
272
273 if (!edge->indirect_unknown_callee)
274 {
275 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
278 }
279
280 lto_output_sleb128_stream (ob->main_stream, edge->count);
281
282 bp = bitpack_create (ob->main_stream);
283 uid = (!gimple_has_body_p (edge->caller->decl)
284 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
285 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
286 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->loop_nest, 30);
289 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
290 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
291 bp_pack_value (&bp, edge->can_throw_external, 1);
292 if (edge->indirect_unknown_callee)
293 {
294 int flags = edge->indirect_info->ecf_flags;
295 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
296 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
301 /* Flags that should not appear on indirect calls. */
302 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
303 | ECF_MAY_BE_ALLOCA
304 | ECF_SIBCALL
305 | ECF_LEAF
306 | ECF_NOVOPS)));
307 }
308 lto_output_bitpack (&bp);
309 }
310
311 /* Return if LIST contain references from other partitions. */
312
313 bool
314 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
315 varpool_node_set vset)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
320 {
321 if (ref->refering_type == IPA_REF_CGRAPH)
322 {
323 if (ipa_ref_refering_node (ref)->in_other_partition
324 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
325 return true;
326 }
327 else
328 {
329 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
330 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
331 vset))
332 return true;
333 }
334 }
335 return false;
336 }
337
338 /* Return true when node is reachable from other partition. */
339
340 bool
341 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
342 {
343 struct cgraph_edge *e;
344 if (!node->analyzed)
345 return false;
346 if (node->global.inlined_to)
347 return false;
348 for (e = node->callers; e; e = e->next_caller)
349 if (e->caller->in_other_partition
350 || !cgraph_node_in_set_p (e->caller, set))
351 return true;
352 return false;
353 }
354
355 /* Return if LIST contain references from other partitions. */
356
357 bool
358 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
359 varpool_node_set vset)
360 {
361 int i;
362 struct ipa_ref *ref;
363 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
364 {
365 if (ref->refering_type == IPA_REF_CGRAPH)
366 {
367 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
368 return true;
369 }
370 else
371 {
372 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
373 vset))
374 return true;
375 }
376 }
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
384 {
385 struct cgraph_edge *e;
386 if (!node->analyzed)
387 return false;
388 if (node->global.inlined_to)
389 return false;
390 for (e = node->callers; e; e = e->next_caller)
391 if (cgraph_node_in_set_p (e->caller, set))
392 return true;
393 return false;
394 }
395
396 /* Output the cgraph NODE to OB. ENCODER is used to find the
397 reference number of NODE->inlined_to. SET is the set of nodes we
398 are writing to the current file. If NODE is not in SET, then NODE
399 is a boundary of a cgraph_node_set and we pretend NODE just has a
400 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
401 that have had their callgraph node written so far. This is used to
402 determine if NODE is a clone of a previously written node. */
403
404 static void
405 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
406 lto_cgraph_encoder_t encoder, cgraph_node_set set,
407 varpool_node_set vset)
408 {
409 unsigned int tag;
410 struct bitpack_d bp;
411 bool boundary_p;
412 intptr_t ref;
413 bool in_other_partition = false;
414 struct cgraph_node *clone_of;
415
416 boundary_p = !cgraph_node_in_set_p (node, set);
417
418 if (node->analyzed && !boundary_p)
419 tag = LTO_cgraph_analyzed_node;
420 else
421 tag = LTO_cgraph_unavail_node;
422
423 lto_output_uleb128_stream (ob->main_stream, tag);
424
425 /* In WPA mode, we only output part of the call-graph. Also, we
426 fake cgraph node attributes. There are two cases that we care.
427
428 Boundary nodes: There are nodes that are not part of SET but are
429 called from within SET. We artificially make them look like
430 externally visible nodes with no function body.
431
432 Cherry-picked nodes: These are nodes we pulled from other
433 translation units into SET during IPA-inlining. We make them as
434 local static nodes to prevent clashes with other local statics. */
435 if (boundary_p && node->analyzed)
436 {
437 /* Inline clones can not be part of boundary.
438 gcc_assert (!node->global.inlined_to);
439
440 FIXME: At the moment they can be, when partition contains an inline
441 clone that is clone of inline clone from outside partition. We can
442 reshape the clone tree and make other tree to be the root, but it
443 needs a bit extra work and will be promplty done by cgraph_remove_node
444 after reading back. */
445 in_other_partition = 1;
446 }
447
448 clone_of = node->clone_of;
449 while (clone_of
450 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
451 if (clone_of->prev_sibling_clone)
452 clone_of = clone_of->prev_sibling_clone;
453 else
454 clone_of = clone_of->clone_of;
455
456 if (LTO_cgraph_analyzed_node)
457 gcc_assert (clone_of || !node->clone_of);
458 if (!clone_of)
459 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
460 else
461 lto_output_sleb128_stream (ob->main_stream, ref);
462
463
464 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
465 lto_output_sleb128_stream (ob->main_stream, node->count);
466 lto_output_sleb128_stream (ob->main_stream, node->count_materialization_scale);
467
468 if (tag == LTO_cgraph_analyzed_node)
469 {
470 lto_output_sleb128_stream (ob->main_stream,
471 node->local.inline_summary.estimated_self_stack_size);
472 lto_output_sleb128_stream (ob->main_stream,
473 node->local.inline_summary.self_size);
474 lto_output_sleb128_stream (ob->main_stream,
475 node->local.inline_summary.size_inlining_benefit);
476 lto_output_sleb128_stream (ob->main_stream,
477 node->local.inline_summary.self_time);
478 lto_output_sleb128_stream (ob->main_stream,
479 node->local.inline_summary.time_inlining_benefit);
480 if (node->global.inlined_to)
481 {
482 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
483 gcc_assert (ref != LCC_NOT_FOUND);
484 }
485 else
486 ref = LCC_NOT_FOUND;
487
488 lto_output_sleb128_stream (ob->main_stream, ref);
489 }
490
491 if (node->same_comdat_group && !boundary_p)
492 {
493 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
494 gcc_assert (ref != LCC_NOT_FOUND);
495 }
496 else
497 ref = LCC_NOT_FOUND;
498 lto_output_sleb128_stream (ob->main_stream, ref);
499
500 bp = bitpack_create (ob->main_stream);
501 bp_pack_value (&bp, node->local.local, 1);
502 bp_pack_value (&bp, node->local.externally_visible, 1);
503 bp_pack_value (&bp, node->local.finalized, 1);
504 bp_pack_value (&bp, node->local.inlinable, 1);
505 bp_pack_value (&bp, node->local.versionable, 1);
506 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
507 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
508 bp_pack_value (&bp, node->local.vtable_method, 1);
509 bp_pack_value (&bp, node->needed, 1);
510 bp_pack_value (&bp, node->address_taken, 1);
511 bp_pack_value (&bp, node->abstract_and_needed, 1);
512 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
513 && !DECL_EXTERNAL (node->decl)
514 && !DECL_COMDAT (node->decl)
515 && (reachable_from_other_partition_p (node, set)
516 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
517 bp_pack_value (&bp, node->lowered, 1);
518 bp_pack_value (&bp, in_other_partition, 1);
519 bp_pack_value (&bp, node->alias, 1);
520 bp_pack_value (&bp, node->finalized_by_frontend, 1);
521 bp_pack_value (&bp, node->frequency, 2);
522 bp_pack_value (&bp, node->only_called_at_startup, 1);
523 bp_pack_value (&bp, node->only_called_at_exit, 1);
524 lto_output_bitpack (&bp);
525 lto_output_uleb128_stream (ob->main_stream, node->resolution);
526
527 if (node->same_body)
528 {
529 struct cgraph_node *alias;
530 unsigned long alias_count = 1;
531 for (alias = node->same_body; alias->next; alias = alias->next)
532 alias_count++;
533 lto_output_uleb128_stream (ob->main_stream, alias_count);
534 do
535 {
536 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
537 alias->decl);
538 if (alias->thunk.thunk_p)
539 {
540 lto_output_uleb128_stream
541 (ob->main_stream,
542 1 + (alias->thunk.this_adjusting != 0) * 2
543 + (alias->thunk.virtual_offset_p != 0) * 4);
544 lto_output_uleb128_stream (ob->main_stream,
545 alias->thunk.fixed_offset);
546 lto_output_uleb128_stream (ob->main_stream,
547 alias->thunk.virtual_value);
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
549 alias->thunk.alias);
550 }
551 else
552 {
553 lto_output_uleb128_stream (ob->main_stream, 0);
554 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
555 alias->thunk.alias);
556 }
557 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
558 alias = alias->previous;
559 }
560 while (alias);
561 }
562 else
563 lto_output_uleb128_stream (ob->main_stream, 0);
564 }
565
566 /* Output the varpool NODE to OB.
567 If NODE is not in SET, then NODE is a boundary. */
568
569 static void
570 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
571 lto_varpool_encoder_t varpool_encoder,
572 cgraph_node_set set, varpool_node_set vset)
573 {
574 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
575 struct bitpack_d bp;
576 struct varpool_node *alias;
577 int count = 0;
578 int ref;
579
580 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
581 bp = bitpack_create (ob->main_stream);
582 bp_pack_value (&bp, node->externally_visible, 1);
583 bp_pack_value (&bp, node->force_output, 1);
584 bp_pack_value (&bp, node->finalized, 1);
585 bp_pack_value (&bp, node->alias, 1);
586 gcc_assert (!node->alias || !node->extra_name);
587 gcc_assert (node->finalized || !node->analyzed);
588 gcc_assert (node->needed);
589 /* Constant pool initializers can be de-unified into individual ltrans units.
590 FIXME: Alternatively at -Os we may want to avoid generating for them the local
591 labels and share them across LTRANS partitions. */
592 if (DECL_IN_CONSTANT_POOL (node->decl)
593 && !DECL_COMDAT (node->decl))
594 {
595 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
596 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
597 }
598 else
599 {
600 bp_pack_value (&bp, node->analyzed
601 && referenced_from_other_partition_p (&node->ref_list,
602 set, vset), 1);
603 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
604 }
605 /* Also emit any extra name aliases. */
606 for (alias = node->extra_name; alias; alias = alias->next)
607 count++;
608 bp_pack_value (&bp, count != 0, 1);
609 lto_output_bitpack (&bp);
610 if (node->same_comdat_group && !boundary_p)
611 {
612 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
613 gcc_assert (ref != LCC_NOT_FOUND);
614 }
615 else
616 ref = LCC_NOT_FOUND;
617 lto_output_sleb128_stream (ob->main_stream, ref);
618 lto_output_uleb128_stream (ob->main_stream, node->resolution);
619
620 if (count)
621 {
622 lto_output_uleb128_stream (ob->main_stream, count);
623 for (alias = node->extra_name; alias; alias = alias->next)
624 {
625 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
626 lto_output_uleb128_stream (ob->main_stream, alias->resolution);
627 }
628 }
629 }
630
631 /* Output the varpool NODE to OB.
632 If NODE is not in SET, then NODE is a boundary. */
633
634 static void
635 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
636 lto_cgraph_encoder_t encoder,
637 lto_varpool_encoder_t varpool_encoder)
638 {
639 struct bitpack_d bp;
640 bp = bitpack_create (ob->main_stream);
641 bp_pack_value (&bp, ref->refered_type, 1);
642 bp_pack_value (&bp, ref->use, 2);
643 lto_output_bitpack (&bp);
644 if (ref->refered_type == IPA_REF_CGRAPH)
645 {
646 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
647 gcc_assert (nref != LCC_NOT_FOUND);
648 lto_output_sleb128_stream (ob->main_stream, nref);
649 }
650 else
651 {
652 int nref = lto_varpool_encoder_lookup (varpool_encoder,
653 ipa_ref_varpool_node (ref));
654 gcc_assert (nref != LCC_NOT_FOUND);
655 lto_output_sleb128_stream (ob->main_stream, nref);
656 }
657 }
658
659 /* Stream out profile_summary to OB. */
660
661 static void
662 output_profile_summary (struct lto_simple_output_block *ob)
663 {
664 if (profile_info)
665 {
666 /* We do not output num, sum_all and run_max, they are not used by
667 GCC profile feedback and they are difficult to merge from multiple
668 units. */
669 gcc_assert (profile_info->runs);
670 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
671 lto_output_uleb128_stream (ob->main_stream, profile_info->sum_max);
672 }
673 else
674 lto_output_uleb128_stream (ob->main_stream, 0);
675 }
676
677 /* Add NODE into encoder as well as nodes it is cloned from.
678 Do it in a way so clones appear first. */
679
680 static void
681 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
682 bool include_body)
683 {
684 if (node->clone_of)
685 add_node_to (encoder, node->clone_of, include_body);
686 else if (include_body)
687 lto_set_cgraph_encoder_encode_body (encoder, node);
688 lto_cgraph_encoder_encode (encoder, node);
689 }
690
691 /* Add all references in LIST to encoders. */
692
693 static void
694 add_references (lto_cgraph_encoder_t encoder,
695 lto_varpool_encoder_t varpool_encoder,
696 struct ipa_ref_list *list)
697 {
698 int i;
699 struct ipa_ref *ref;
700 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
701 if (ref->refered_type == IPA_REF_CGRAPH)
702 add_node_to (encoder, ipa_ref_node (ref), false);
703 else
704 {
705 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
706 lto_varpool_encoder_encode (varpool_encoder, vnode);
707 }
708 }
709
710 /* Output all callees or indirect outgoing edges. EDGE must be the first such
711 edge. */
712
713 static void
714 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
715 struct lto_simple_output_block *ob,
716 lto_cgraph_encoder_t encoder)
717 {
718 if (!edge)
719 return;
720
721 /* Output edges in backward direction, so the reconstructed callgraph match
722 and it is easy to associate call sites in the IPA pass summaries. */
723 while (edge->next_callee)
724 edge = edge->next_callee;
725 for (; edge; edge = edge->prev_callee)
726 lto_output_edge (ob, edge, encoder);
727 }
728
729 /* Output the part of the cgraph in SET. */
730
731 static void
732 output_refs (cgraph_node_set set, varpool_node_set vset,
733 lto_cgraph_encoder_t encoder,
734 lto_varpool_encoder_t varpool_encoder)
735 {
736 cgraph_node_set_iterator csi;
737 varpool_node_set_iterator vsi;
738 struct lto_simple_output_block *ob;
739 int count;
740 struct ipa_ref *ref;
741 int i;
742
743 ob = lto_create_simple_output_block (LTO_section_refs);
744
745 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
746 {
747 struct cgraph_node *node = csi_node (csi);
748
749 count = ipa_ref_list_nreferences (&node->ref_list);
750 if (count)
751 {
752 lto_output_uleb128_stream (ob->main_stream, count);
753 lto_output_uleb128_stream (ob->main_stream,
754 lto_cgraph_encoder_lookup (encoder, node));
755 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
756 lto_output_ref (ob, ref, encoder, varpool_encoder);
757 }
758 }
759
760 lto_output_uleb128_stream (ob->main_stream, 0);
761
762 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
763 {
764 struct varpool_node *node = vsi_node (vsi);
765
766 count = ipa_ref_list_nreferences (&node->ref_list);
767 if (count)
768 {
769 lto_output_uleb128_stream (ob->main_stream, count);
770 lto_output_uleb128_stream (ob->main_stream,
771 lto_varpool_encoder_lookup (varpool_encoder,
772 node));
773 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
774 lto_output_ref (ob, ref, encoder, varpool_encoder);
775 }
776 }
777
778 lto_output_uleb128_stream (ob->main_stream, 0);
779
780 lto_destroy_simple_output_block (ob);
781 }
782
783 /* Find out all cgraph and varpool nodes we want to encode in current unit
784 and insert them to encoders. */
785 void
786 compute_ltrans_boundary (struct lto_out_decl_state *state,
787 cgraph_node_set set, varpool_node_set vset)
788 {
789 struct cgraph_node *node;
790 cgraph_node_set_iterator csi;
791 varpool_node_set_iterator vsi;
792 struct cgraph_edge *edge;
793 int i;
794 lto_cgraph_encoder_t encoder;
795 lto_varpool_encoder_t varpool_encoder;
796
797 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
798 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
799
800 /* Go over all the nodes in SET and assign references. */
801 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
802 {
803 node = csi_node (csi);
804 add_node_to (encoder, node, true);
805 add_references (encoder, varpool_encoder, &node->ref_list);
806 }
807 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
808 {
809 struct varpool_node *vnode = vsi_node (vsi);
810 gcc_assert (!vnode->alias);
811 lto_varpool_encoder_encode (varpool_encoder, vnode);
812 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
813 add_references (encoder, varpool_encoder, &vnode->ref_list);
814 }
815 /* Pickle in also the initializer of all referenced readonly variables
816 to help folding. Constant pool variables are not shared, so we must
817 pickle those too. */
818 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
819 {
820 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
821 if (DECL_INITIAL (vnode->decl)
822 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
823 vnode)
824 && const_value_known_p (vnode->decl))
825 {
826 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
827 add_references (encoder, varpool_encoder, &vnode->ref_list);
828 }
829 }
830
831 /* Go over all the nodes again to include callees that are not in
832 SET. */
833 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
834 {
835 node = csi_node (csi);
836 for (edge = node->callees; edge; edge = edge->next_callee)
837 {
838 struct cgraph_node *callee = edge->callee;
839 if (!cgraph_node_in_set_p (callee, set))
840 {
841 /* We should have moved all the inlines. */
842 gcc_assert (!callee->global.inlined_to);
843 add_node_to (encoder, callee, false);
844 }
845 }
846 }
847 }
848
849 /* Output the part of the cgraph in SET. */
850
851 void
852 output_cgraph (cgraph_node_set set, varpool_node_set vset)
853 {
854 struct cgraph_node *node;
855 struct lto_simple_output_block *ob;
856 cgraph_node_set_iterator csi;
857 int i, n_nodes;
858 lto_cgraph_encoder_t encoder;
859 lto_varpool_encoder_t varpool_encoder;
860 struct cgraph_asm_node *can;
861 static bool asm_nodes_output = false;
862
863 if (flag_wpa)
864 output_cgraph_opt_summary (set);
865
866 ob = lto_create_simple_output_block (LTO_section_cgraph);
867
868 output_profile_summary (ob);
869
870 /* An encoder for cgraph nodes should have been created by
871 ipa_write_summaries_1. */
872 gcc_assert (ob->decl_state->cgraph_node_encoder);
873 gcc_assert (ob->decl_state->varpool_node_encoder);
874 encoder = ob->decl_state->cgraph_node_encoder;
875 varpool_encoder = ob->decl_state->varpool_node_encoder;
876
877 /* Write out the nodes. We must first output a node and then its clones,
878 otherwise at a time reading back the node there would be nothing to clone
879 from. */
880 n_nodes = lto_cgraph_encoder_size (encoder);
881 for (i = 0; i < n_nodes; i++)
882 {
883 node = lto_cgraph_encoder_deref (encoder, i);
884 lto_output_node (ob, node, encoder, set, vset);
885 }
886
887 /* Go over the nodes in SET again to write edges. */
888 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
889 {
890 node = csi_node (csi);
891 output_outgoing_cgraph_edges (node->callees, ob, encoder);
892 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
893 }
894
895 lto_output_uleb128_stream (ob->main_stream, 0);
896
897 /* Emit toplevel asms.
898 When doing WPA we must output every asm just once. Since we do not partition asm
899 nodes at all, output them to first output. This is kind of hack, but should work
900 well. */
901 if (!asm_nodes_output)
902 {
903 asm_nodes_output = true;
904 for (can = cgraph_asm_nodes; can; can = can->next)
905 {
906 int len = TREE_STRING_LENGTH (can->asm_str);
907 lto_output_uleb128_stream (ob->main_stream, len);
908 for (i = 0; i < len; ++i)
909 lto_output_1_stream (ob->main_stream,
910 TREE_STRING_POINTER (can->asm_str)[i]);
911 }
912 }
913
914 lto_output_uleb128_stream (ob->main_stream, 0);
915
916 lto_destroy_simple_output_block (ob);
917 output_varpool (set, vset);
918 output_refs (set, vset, encoder, varpool_encoder);
919 }
920
921 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
922 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
923 NODE or to replace the values in it, for instance because the first
924 time we saw it, the function body was not available but now it
925 is. BP is a bitpack with all the bitflags for NODE read from the
926 stream. */
927
928 static void
929 input_overwrite_node (struct lto_file_decl_data *file_data,
930 struct cgraph_node *node,
931 enum LTO_cgraph_tags tag,
932 struct bitpack_d *bp,
933 unsigned int stack_size,
934 unsigned int self_time,
935 unsigned int time_inlining_benefit,
936 unsigned int self_size,
937 unsigned int size_inlining_benefit,
938 enum ld_plugin_symbol_resolution resolution)
939 {
940 node->aux = (void *) tag;
941 node->local.inline_summary.estimated_self_stack_size = stack_size;
942 node->local.inline_summary.self_time = self_time;
943 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
944 node->local.inline_summary.self_size = self_size;
945 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
946 node->global.time = self_time;
947 node->global.size = self_size;
948 node->global.estimated_stack_size = stack_size;
949 node->global.estimated_growth = INT_MIN;
950 node->local.lto_file_data = file_data;
951
952 node->local.local = bp_unpack_value (bp, 1);
953 node->local.externally_visible = bp_unpack_value (bp, 1);
954 node->local.finalized = bp_unpack_value (bp, 1);
955 node->local.inlinable = bp_unpack_value (bp, 1);
956 node->local.versionable = bp_unpack_value (bp, 1);
957 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
958 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
959 node->local.vtable_method = bp_unpack_value (bp, 1);
960 node->needed = bp_unpack_value (bp, 1);
961 node->address_taken = bp_unpack_value (bp, 1);
962 node->abstract_and_needed = bp_unpack_value (bp, 1);
963 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
964 node->lowered = bp_unpack_value (bp, 1);
965 node->analyzed = tag == LTO_cgraph_analyzed_node;
966 node->in_other_partition = bp_unpack_value (bp, 1);
967 if (node->in_other_partition
968 /* Avoid updating decl when we are seeing just inline clone.
969 When inlining function that has functions already inlined into it,
970 we produce clones of inline clones.
971
972 WPA partitioning might put each clone into different unit and
973 we might end up streaming inline clone from other partition
974 to support clone we are interested in. */
975 && (!node->clone_of
976 || node->clone_of->decl != node->decl))
977 {
978 DECL_EXTERNAL (node->decl) = 1;
979 TREE_STATIC (node->decl) = 0;
980 }
981 node->alias = bp_unpack_value (bp, 1);
982 node->finalized_by_frontend = bp_unpack_value (bp, 1);
983 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
984 node->only_called_at_startup = bp_unpack_value (bp, 1);
985 node->only_called_at_exit = bp_unpack_value (bp, 1);
986 node->resolution = resolution;
987 }
988
989 /* Output the part of the cgraph in SET. */
990
991 static void
992 output_varpool (cgraph_node_set set, varpool_node_set vset)
993 {
994 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
995 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
996 int len = lto_varpool_encoder_size (varpool_encoder), i;
997
998 lto_output_uleb128_stream (ob->main_stream, len);
999
1000 /* Write out the nodes. We must first output a node and then its clones,
1001 otherwise at a time reading back the node there would be nothing to clone
1002 from. */
1003 for (i = 0; i < len; i++)
1004 {
1005 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
1006 varpool_encoder,
1007 set, vset);
1008 }
1009
1010 lto_destroy_simple_output_block (ob);
1011 }
1012
1013 /* Read a node from input_block IB. TAG is the node's tag just read.
1014 Return the node read or overwriten. */
1015
1016 static struct cgraph_node *
1017 input_node (struct lto_file_decl_data *file_data,
1018 struct lto_input_block *ib,
1019 enum LTO_cgraph_tags tag,
1020 VEC(cgraph_node_ptr, heap) *nodes)
1021 {
1022 tree fn_decl;
1023 struct cgraph_node *node;
1024 struct bitpack_d bp;
1025 int stack_size = 0;
1026 unsigned decl_index;
1027 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1028 int self_time = 0;
1029 int self_size = 0;
1030 int time_inlining_benefit = 0;
1031 int size_inlining_benefit = 0;
1032 unsigned long same_body_count = 0;
1033 int clone_ref;
1034 enum ld_plugin_symbol_resolution resolution;
1035
1036 clone_ref = lto_input_sleb128 (ib);
1037
1038 decl_index = lto_input_uleb128 (ib);
1039 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1040
1041 if (clone_ref != LCC_NOT_FOUND)
1042 {
1043 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1044 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1045 }
1046 else
1047 node = cgraph_node (fn_decl);
1048
1049 node->count = lto_input_sleb128 (ib);
1050 node->count_materialization_scale = lto_input_sleb128 (ib);
1051
1052 if (tag == LTO_cgraph_analyzed_node)
1053 {
1054 stack_size = lto_input_sleb128 (ib);
1055 self_size = lto_input_sleb128 (ib);
1056 size_inlining_benefit = lto_input_sleb128 (ib);
1057 self_time = lto_input_sleb128 (ib);
1058 time_inlining_benefit = lto_input_sleb128 (ib);
1059
1060 ref = lto_input_sleb128 (ib);
1061 }
1062
1063 ref2 = lto_input_sleb128 (ib);
1064
1065 /* Make sure that we have not read this node before. Nodes that
1066 have already been read will have their tag stored in the 'aux'
1067 field. Since built-in functions can be referenced in multiple
1068 functions, they are expected to be read more than once. */
1069 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1070 internal_error ("bytecode stream: found multiple instances of cgraph "
1071 "node %d", node->uid);
1072
1073 bp = lto_input_bitpack (ib);
1074 resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1075 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1076 time_inlining_benefit, self_size,
1077 size_inlining_benefit, resolution);
1078
1079 /* Store a reference for now, and fix up later to be a pointer. */
1080 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1081
1082 /* Store a reference for now, and fix up later to be a pointer. */
1083 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1084
1085 same_body_count = lto_input_uleb128 (ib);
1086 while (same_body_count-- > 0)
1087 {
1088 tree alias_decl;
1089 int type;
1090 struct cgraph_node *alias;
1091 decl_index = lto_input_uleb128 (ib);
1092 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1093 type = lto_input_uleb128 (ib);
1094 if (!type)
1095 {
1096 tree real_alias;
1097 decl_index = lto_input_uleb128 (ib);
1098 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1099 alias = cgraph_same_body_alias (alias_decl, real_alias);
1100 }
1101 else
1102 {
1103 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1104 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1105 tree real_alias;
1106 decl_index = lto_input_uleb128 (ib);
1107 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1108 alias = cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1109 virtual_value,
1110 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1111 real_alias);
1112 }
1113 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1114 }
1115 return node;
1116 }
1117
1118 /* Read a node from input_block IB. TAG is the node's tag just read.
1119 Return the node read or overwriten. */
1120
1121 static struct varpool_node *
1122 input_varpool_node (struct lto_file_decl_data *file_data,
1123 struct lto_input_block *ib)
1124 {
1125 int decl_index;
1126 tree var_decl;
1127 struct varpool_node *node;
1128 struct bitpack_d bp;
1129 bool aliases_p;
1130 int count;
1131 int ref = LCC_NOT_FOUND;
1132
1133 decl_index = lto_input_uleb128 (ib);
1134 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1135 node = varpool_node (var_decl);
1136 node->lto_file_data = file_data;
1137
1138 bp = lto_input_bitpack (ib);
1139 node->externally_visible = bp_unpack_value (&bp, 1);
1140 node->force_output = bp_unpack_value (&bp, 1);
1141 node->finalized = bp_unpack_value (&bp, 1);
1142 node->alias = bp_unpack_value (&bp, 1);
1143 node->analyzed = node->finalized;
1144 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1145 node->in_other_partition = bp_unpack_value (&bp, 1);
1146 if (node->in_other_partition)
1147 {
1148 DECL_EXTERNAL (node->decl) = 1;
1149 TREE_STATIC (node->decl) = 0;
1150 }
1151 aliases_p = bp_unpack_value (&bp, 1);
1152 if (node->finalized)
1153 varpool_mark_needed_node (node);
1154 ref = lto_input_sleb128 (ib);
1155 /* Store a reference for now, and fix up later to be a pointer. */
1156 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1157 node->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1158 if (aliases_p)
1159 {
1160 count = lto_input_uleb128 (ib);
1161 for (; count > 0; count --)
1162 {
1163 tree decl = lto_file_decl_data_get_var_decl (file_data,
1164 lto_input_uleb128 (ib));
1165 struct varpool_node *alias;
1166 alias = varpool_extra_name_alias (decl, var_decl);
1167 alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
1168 }
1169 }
1170 return node;
1171 }
1172
1173 /* Read a node from input_block IB. TAG is the node's tag just read.
1174 Return the node read or overwriten. */
1175
1176 static void
1177 input_ref (struct lto_input_block *ib,
1178 struct cgraph_node *refering_node,
1179 struct varpool_node *refering_varpool_node,
1180 VEC(cgraph_node_ptr, heap) *nodes,
1181 VEC(varpool_node_ptr, heap) *varpool_nodes)
1182 {
1183 struct cgraph_node *node = NULL;
1184 struct varpool_node *varpool_node = NULL;
1185 struct bitpack_d bp;
1186 enum ipa_ref_type type;
1187 enum ipa_ref_use use;
1188
1189 bp = lto_input_bitpack (ib);
1190 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1191 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1192 if (type == IPA_REF_CGRAPH)
1193 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1194 else
1195 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1196 ipa_record_reference (refering_node, refering_varpool_node,
1197 node, varpool_node, use, NULL);
1198 }
1199
1200 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1201 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1202 edge being read is indirect (in the sense that it has
1203 indirect_unknown_callee set). */
1204
1205 static void
1206 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1207 bool indirect)
1208 {
1209 struct cgraph_node *caller, *callee;
1210 struct cgraph_edge *edge;
1211 unsigned int stmt_id;
1212 gcov_type count;
1213 int freq;
1214 unsigned int nest;
1215 cgraph_inline_failed_t inline_failed;
1216 struct bitpack_d bp;
1217 int ecf_flags = 0;
1218
1219 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1220 if (caller == NULL || caller->decl == NULL_TREE)
1221 internal_error ("bytecode stream: no caller found while reading edge");
1222
1223 if (!indirect)
1224 {
1225 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1226 if (callee == NULL || callee->decl == NULL_TREE)
1227 internal_error ("bytecode stream: no callee found while reading edge");
1228 }
1229 else
1230 callee = NULL;
1231
1232 count = (gcov_type) lto_input_sleb128 (ib);
1233
1234 bp = lto_input_bitpack (ib);
1235 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1236 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1237 HOST_BITS_PER_INT);
1238 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1239 nest = (unsigned) bp_unpack_value (&bp, 30);
1240
1241 if (indirect)
1242 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1243 else
1244 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1245
1246 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1247 edge->lto_stmt_uid = stmt_id;
1248 edge->inline_failed = inline_failed;
1249 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1250 edge->can_throw_external = bp_unpack_value (&bp, 1);
1251 if (indirect)
1252 {
1253 if (bp_unpack_value (&bp, 1))
1254 ecf_flags |= ECF_CONST;
1255 if (bp_unpack_value (&bp, 1))
1256 ecf_flags |= ECF_PURE;
1257 if (bp_unpack_value (&bp, 1))
1258 ecf_flags |= ECF_NORETURN;
1259 if (bp_unpack_value (&bp, 1))
1260 ecf_flags |= ECF_MALLOC;
1261 if (bp_unpack_value (&bp, 1))
1262 ecf_flags |= ECF_NOTHROW;
1263 if (bp_unpack_value (&bp, 1))
1264 ecf_flags |= ECF_RETURNS_TWICE;
1265 edge->indirect_info->ecf_flags = ecf_flags;
1266 }
1267 }
1268
1269
1270 /* Read a cgraph from IB using the info in FILE_DATA. */
1271
1272 static VEC(cgraph_node_ptr, heap) *
1273 input_cgraph_1 (struct lto_file_decl_data *file_data,
1274 struct lto_input_block *ib)
1275 {
1276 enum LTO_cgraph_tags tag;
1277 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1278 struct cgraph_node *node;
1279 unsigned i;
1280 unsigned HOST_WIDE_INT len;
1281
1282 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1283 while (tag)
1284 {
1285 if (tag == LTO_cgraph_edge)
1286 input_edge (ib, nodes, false);
1287 else if (tag == LTO_cgraph_indirect_edge)
1288 input_edge (ib, nodes, true);
1289 else
1290 {
1291 node = input_node (file_data, ib, tag,nodes);
1292 if (node == NULL || node->decl == NULL_TREE)
1293 internal_error ("bytecode stream: found empty cgraph node");
1294 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1295 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1296 }
1297
1298 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1299 }
1300
1301 /* Input toplevel asms. */
1302 len = lto_input_uleb128 (ib);
1303 while (len)
1304 {
1305 char *str = (char *)xmalloc (len + 1);
1306 for (i = 0; i < len; ++i)
1307 str[i] = lto_input_1_unsigned (ib);
1308 cgraph_add_asm_node (build_string (len, str));
1309 free (str);
1310
1311 len = lto_input_uleb128 (ib);
1312 }
1313 /* AUX pointers should be all non-zero for nodes read from the stream. */
1314 #ifdef ENABLE_CHECKING
1315 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1316 gcc_assert (node->aux);
1317 #endif
1318 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1319 {
1320 int ref = (int) (intptr_t) node->global.inlined_to;
1321
1322 /* We share declaration of builtins, so we may read same node twice. */
1323 if (!node->aux)
1324 continue;
1325 node->aux = NULL;
1326
1327 /* Fixup inlined_to from reference to pointer. */
1328 if (ref != LCC_NOT_FOUND)
1329 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1330 else
1331 node->global.inlined_to = NULL;
1332
1333 ref = (int) (intptr_t) node->same_comdat_group;
1334
1335 /* Fixup same_comdat_group from reference to pointer. */
1336 if (ref != LCC_NOT_FOUND)
1337 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1338 else
1339 node->same_comdat_group = NULL;
1340 }
1341 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1342 node->aux = (void *)1;
1343 return nodes;
1344 }
1345
1346 /* Read a varpool from IB using the info in FILE_DATA. */
1347
1348 static VEC(varpool_node_ptr, heap) *
1349 input_varpool_1 (struct lto_file_decl_data *file_data,
1350 struct lto_input_block *ib)
1351 {
1352 unsigned HOST_WIDE_INT len;
1353 VEC(varpool_node_ptr, heap) *varpool = NULL;
1354 int i;
1355 struct varpool_node *node;
1356
1357 len = lto_input_uleb128 (ib);
1358 while (len)
1359 {
1360 VEC_safe_push (varpool_node_ptr, heap, varpool,
1361 input_varpool_node (file_data, ib));
1362 len--;
1363 }
1364 #ifdef ENABLE_CHECKING
1365 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1366 gcc_assert (!node->aux);
1367 #endif
1368 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1369 {
1370 int ref = (int) (intptr_t) node->same_comdat_group;
1371 /* We share declaration of builtins, so we may read same node twice. */
1372 if (node->aux)
1373 continue;
1374 node->aux = (void *)1;
1375
1376 /* Fixup same_comdat_group from reference to pointer. */
1377 if (ref != LCC_NOT_FOUND)
1378 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1379 else
1380 node->same_comdat_group = NULL;
1381 }
1382 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1383 node->aux = NULL;
1384 return varpool;
1385 }
1386
1387 /* Input ipa_refs. */
1388
1389 static void
1390 input_refs (struct lto_input_block *ib,
1391 VEC(cgraph_node_ptr, heap) *nodes,
1392 VEC(varpool_node_ptr, heap) *varpool)
1393 {
1394 int count;
1395 int idx;
1396 while (true)
1397 {
1398 struct cgraph_node *node;
1399 count = lto_input_uleb128 (ib);
1400 if (!count)
1401 break;
1402 idx = lto_input_uleb128 (ib);
1403 node = VEC_index (cgraph_node_ptr, nodes, idx);
1404 while (count)
1405 {
1406 input_ref (ib, node, NULL, nodes, varpool);
1407 count--;
1408 }
1409 }
1410 while (true)
1411 {
1412 struct varpool_node *node;
1413 count = lto_input_uleb128 (ib);
1414 if (!count)
1415 break;
1416 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1417 while (count)
1418 {
1419 input_ref (ib, NULL, node, nodes, varpool);
1420 count--;
1421 }
1422 }
1423 }
1424
1425
1426 static struct gcov_ctr_summary lto_gcov_summary;
1427
1428 /* Input profile_info from IB. */
1429 static void
1430 input_profile_summary (struct lto_input_block *ib,
1431 struct lto_file_decl_data *file_data)
1432 {
1433 unsigned int runs = lto_input_uleb128 (ib);
1434 if (runs)
1435 {
1436 file_data->profile_info.runs = runs;
1437 file_data->profile_info.sum_max = lto_input_uleb128 (ib);
1438 }
1439
1440 }
1441
1442 /* Rescale profile summaries to the same number of runs in the whole unit. */
1443
1444 static void
1445 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1446 {
1447 struct lto_file_decl_data *file_data;
1448 unsigned int j;
1449 gcov_unsigned_t max_runs = 0;
1450 struct cgraph_node *node;
1451 struct cgraph_edge *edge;
1452
1453 /* Find unit with maximal number of runs. If we ever get serious about
1454 roundoff errors, we might also consider computing smallest common
1455 multiply. */
1456 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1457 if (max_runs < file_data->profile_info.runs)
1458 max_runs = file_data->profile_info.runs;
1459
1460 if (!max_runs)
1461 return;
1462
1463 /* Simple overflow check. We probably don't need to support that many train
1464 runs. Such a large value probably imply data corruption anyway. */
1465 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1466 {
1467 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1468 INT_MAX / REG_BR_PROB_BASE);
1469 return;
1470 }
1471
1472 profile_info = &lto_gcov_summary;
1473 lto_gcov_summary.runs = max_runs;
1474 lto_gcov_summary.sum_max = 0;
1475
1476 /* Rescale all units to the maximal number of runs.
1477 sum_max can not be easily merged, as we have no idea what files come from
1478 the same run. We do not use the info anyway, so leave it 0. */
1479 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1480 if (file_data->profile_info.runs)
1481 {
1482 int scale = ((REG_BR_PROB_BASE * max_runs
1483 + file_data->profile_info.runs / 2)
1484 / file_data->profile_info.runs);
1485 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1486 (file_data->profile_info.sum_max
1487 * scale
1488 + REG_BR_PROB_BASE / 2)
1489 / REG_BR_PROB_BASE);
1490 }
1491
1492 /* Watch roundoff errors. */
1493 if (lto_gcov_summary.sum_max < max_runs)
1494 lto_gcov_summary.sum_max = max_runs;
1495
1496 /* If merging already happent at WPA time, we are done. */
1497 if (flag_ltrans)
1498 return;
1499
1500 /* Now compute count_materialization_scale of each node.
1501 During LTRANS we already have values of count_materialization_scale
1502 computed, so just update them. */
1503 for (node = cgraph_nodes; node; node = node->next)
1504 if (node->local.lto_file_data->profile_info.runs)
1505 {
1506 int scale;
1507
1508 scale =
1509 ((node->count_materialization_scale * max_runs
1510 + node->local.lto_file_data->profile_info.runs / 2)
1511 / node->local.lto_file_data->profile_info.runs);
1512 node->count_materialization_scale = scale;
1513 if (scale < 0)
1514 fatal_error ("Profile information in %s corrupted",
1515 file_data->file_name);
1516
1517 if (scale == REG_BR_PROB_BASE)
1518 continue;
1519 for (edge = node->callees; edge; edge = edge->next_callee)
1520 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1521 / REG_BR_PROB_BASE);
1522 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1523 / REG_BR_PROB_BASE);
1524 }
1525 }
1526
1527 /* Input and merge the cgraph from each of the .o files passed to
1528 lto1. */
1529
1530 void
1531 input_cgraph (void)
1532 {
1533 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1534 struct lto_file_decl_data *file_data;
1535 unsigned int j = 0;
1536 struct cgraph_node *node;
1537
1538 while ((file_data = file_data_vec[j++]))
1539 {
1540 const char *data;
1541 size_t len;
1542 struct lto_input_block *ib;
1543 VEC(cgraph_node_ptr, heap) *nodes;
1544 VEC(varpool_node_ptr, heap) *varpool;
1545
1546 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1547 &data, &len);
1548 if (!ib)
1549 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1550 input_profile_summary (ib, file_data);
1551 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1552 nodes = input_cgraph_1 (file_data, ib);
1553 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1554 ib, data, len);
1555
1556 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1557 &data, &len);
1558 if (!ib)
1559 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1560 varpool = input_varpool_1 (file_data, ib);
1561 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1562 ib, data, len);
1563
1564 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1565 &data, &len);
1566 if (!ib)
1567 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1568 input_refs (ib, nodes, varpool);
1569 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1570 ib, data, len);
1571 if (flag_ltrans)
1572 input_cgraph_opt_summary (nodes);
1573 VEC_free (cgraph_node_ptr, heap, nodes);
1574 VEC_free (varpool_node_ptr, heap, varpool);
1575 }
1576 merge_profile_summaries (file_data_vec);
1577
1578
1579 /* Clear out the aux field that was used to store enough state to
1580 tell which nodes should be overwritten. */
1581 for (node = cgraph_nodes; node; node = node->next)
1582 {
1583 /* Some nodes may have been created by cgraph_node. This
1584 happens when the callgraph contains nested functions. If the
1585 node for the parent function was never emitted to the gimple
1586 file, cgraph_node will create a node for it when setting the
1587 context of the nested function. */
1588 if (node->local.lto_file_data)
1589 node->aux = NULL;
1590 }
1591 }
1592
1593 /* True when we need optimization summary for NODE. */
1594
1595 static int
1596 output_cgraph_opt_summary_p (struct cgraph_node *node, cgraph_node_set set)
1597 {
1598 struct cgraph_edge *e;
1599
1600 if (cgraph_node_in_set_p (node, set))
1601 {
1602 for (e = node->callees; e; e = e->next_callee)
1603 if (e->indirect_info
1604 && e->indirect_info->thunk_delta != 0)
1605 return true;
1606
1607 for (e = node->indirect_calls; e; e = e->next_callee)
1608 if (e->indirect_info->thunk_delta != 0)
1609 return true;
1610 }
1611
1612 return (node->clone_of
1613 && (node->clone.tree_map
1614 || node->clone.args_to_skip
1615 || node->clone.combined_args_to_skip));
1616 }
1617
1618 /* Output optimization summary for EDGE to OB. */
1619 static void
1620 output_edge_opt_summary (struct output_block *ob,
1621 struct cgraph_edge *edge)
1622 {
1623 if (edge->indirect_info)
1624 lto_output_sleb128_stream (ob->main_stream,
1625 edge->indirect_info->thunk_delta);
1626 else
1627 lto_output_sleb128_stream (ob->main_stream, 0);
1628 }
1629
1630 /* Output optimization summary for NODE to OB. */
1631
1632 static void
1633 output_node_opt_summary (struct output_block *ob,
1634 struct cgraph_node *node,
1635 cgraph_node_set set)
1636 {
1637 unsigned int index;
1638 bitmap_iterator bi;
1639 struct ipa_replace_map *map;
1640 struct bitpack_d bp;
1641 int i;
1642 struct cgraph_edge *e;
1643
1644 lto_output_uleb128_stream (ob->main_stream,
1645 bitmap_count_bits (node->clone.args_to_skip));
1646 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1647 lto_output_uleb128_stream (ob->main_stream, index);
1648 lto_output_uleb128_stream (ob->main_stream,
1649 bitmap_count_bits (node->clone.combined_args_to_skip));
1650 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1651 lto_output_uleb128_stream (ob->main_stream, index);
1652 lto_output_uleb128_stream (ob->main_stream,
1653 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1654 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1655 {
1656 int parm_num;
1657 tree parm;
1658
1659 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1660 parm = DECL_CHAIN (parm), parm_num++)
1661 if (map->old_tree == parm)
1662 break;
1663 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1664 mechanism to store function local declarations into summaries. */
1665 gcc_assert (parm);
1666 lto_output_uleb128_stream (ob->main_stream, parm_num);
1667 lto_output_tree (ob, map->new_tree, true);
1668 bp = bitpack_create (ob->main_stream);
1669 bp_pack_value (&bp, map->replace_p, 1);
1670 bp_pack_value (&bp, map->ref_p, 1);
1671 lto_output_bitpack (&bp);
1672 }
1673
1674 if (cgraph_node_in_set_p (node, set))
1675 {
1676 for (e = node->callees; e; e = e->next_callee)
1677 output_edge_opt_summary (ob, e);
1678 for (e = node->indirect_calls; e; e = e->next_callee)
1679 output_edge_opt_summary (ob, e);
1680 }
1681 }
1682
1683 /* Output optimization summaries stored in callgraph.
1684 At the moment it is the clone info structure. */
1685
1686 static void
1687 output_cgraph_opt_summary (cgraph_node_set set)
1688 {
1689 struct cgraph_node *node;
1690 int i, n_nodes;
1691 lto_cgraph_encoder_t encoder;
1692 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1693 unsigned count = 0;
1694
1695 ob->cgraph_node = NULL;
1696 encoder = ob->decl_state->cgraph_node_encoder;
1697 n_nodes = lto_cgraph_encoder_size (encoder);
1698 for (i = 0; i < n_nodes; i++)
1699 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1700 set))
1701 count++;
1702 lto_output_uleb128_stream (ob->main_stream, count);
1703 for (i = 0; i < n_nodes; i++)
1704 {
1705 node = lto_cgraph_encoder_deref (encoder, i);
1706 if (output_cgraph_opt_summary_p (node, set))
1707 {
1708 lto_output_uleb128_stream (ob->main_stream, i);
1709 output_node_opt_summary (ob, node, set);
1710 }
1711 }
1712 produce_asm (ob, NULL);
1713 destroy_output_block (ob);
1714 }
1715
1716 /* Input optimisation summary of EDGE. */
1717
1718 static void
1719 input_edge_opt_summary (struct cgraph_edge *edge,
1720 struct lto_input_block *ib_main)
1721 {
1722 HOST_WIDE_INT thunk_delta;
1723 thunk_delta = lto_input_sleb128 (ib_main);
1724 if (thunk_delta != 0)
1725 {
1726 gcc_assert (!edge->indirect_info);
1727 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1728 edge->indirect_info->thunk_delta = thunk_delta;
1729 }
1730 }
1731
1732 /* Input optimisation summary of NODE. */
1733
1734 static void
1735 input_node_opt_summary (struct cgraph_node *node,
1736 struct lto_input_block *ib_main,
1737 struct data_in *data_in)
1738 {
1739 int i;
1740 int count;
1741 int bit;
1742 struct bitpack_d bp;
1743 struct cgraph_edge *e;
1744
1745 count = lto_input_uleb128 (ib_main);
1746 if (count)
1747 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1748 for (i = 0; i < count; i++)
1749 {
1750 bit = lto_input_uleb128 (ib_main);
1751 bitmap_set_bit (node->clone.args_to_skip, bit);
1752 }
1753 count = lto_input_uleb128 (ib_main);
1754 if (count)
1755 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1756 for (i = 0; i < count; i++)
1757 {
1758 bit = lto_input_uleb128 (ib_main);
1759 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1760 }
1761 count = lto_input_uleb128 (ib_main);
1762 for (i = 0; i < count; i++)
1763 {
1764 int parm_num;
1765 tree parm;
1766 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1767
1768 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1769 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1770 parm = DECL_CHAIN (parm))
1771 parm_num --;
1772 map->parm_num = lto_input_uleb128 (ib_main);
1773 map->old_tree = NULL;
1774 map->new_tree = lto_input_tree (ib_main, data_in);
1775 bp = lto_input_bitpack (ib_main);
1776 map->replace_p = bp_unpack_value (&bp, 1);
1777 map->ref_p = bp_unpack_value (&bp, 1);
1778 }
1779 for (e = node->callees; e; e = e->next_callee)
1780 input_edge_opt_summary (e, ib_main);
1781 for (e = node->indirect_calls; e; e = e->next_callee)
1782 input_edge_opt_summary (e, ib_main);
1783 }
1784
1785 /* Read section in file FILE_DATA of length LEN with data DATA. */
1786
1787 static void
1788 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1789 const char *data, size_t len, VEC (cgraph_node_ptr,
1790 heap) * nodes)
1791 {
1792 const struct lto_function_header *header =
1793 (const struct lto_function_header *) data;
1794 const int32_t cfg_offset = sizeof (struct lto_function_header);
1795 const int32_t main_offset = cfg_offset + header->cfg_size;
1796 const int32_t string_offset = main_offset + header->main_size;
1797 struct data_in *data_in;
1798 struct lto_input_block ib_main;
1799 unsigned int i;
1800 unsigned int count;
1801
1802 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1803 header->main_size);
1804
1805 data_in =
1806 lto_data_in_create (file_data, (const char *) data + string_offset,
1807 header->string_size, NULL);
1808 count = lto_input_uleb128 (&ib_main);
1809
1810 for (i = 0; i < count; i++)
1811 {
1812 int ref = lto_input_uleb128 (&ib_main);
1813 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1814 &ib_main, data_in);
1815 }
1816 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1817 len);
1818 lto_data_in_delete (data_in);
1819 }
1820
1821 /* Input optimization summary of cgraph. */
1822
1823 static void
1824 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1825 {
1826 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1827 struct lto_file_decl_data *file_data;
1828 unsigned int j = 0;
1829
1830 while ((file_data = file_data_vec[j++]))
1831 {
1832 size_t len;
1833 const char *data =
1834 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1835 &len);
1836
1837 if (data)
1838 input_cgraph_opt_section (file_data, data, len, nodes);
1839 }
1840 }