* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "timevar.h"
44 #include "output.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
47 #include "gcov-io.h"
48
49 static void output_varpool (cgraph_node_set, varpool_node_set);
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
52
53
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_cgraph_tags
57 {
58 /* Must leave 0 for the stopper. */
59
60 /* Cgraph node without body available. */
61 LTO_cgraph_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_cgraph_analyzed_node,
64 /* Cgraph edges. */
65 LTO_cgraph_edge,
66 LTO_cgraph_indirect_edge
67 };
68
69 /* Create a new cgraph encoder. */
70
71 lto_cgraph_encoder_t
72 lto_cgraph_encoder_new (void)
73 {
74 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
75 encoder->map = pointer_map_create ();
76 encoder->nodes = NULL;
77 encoder->body = pointer_set_create ();
78 return encoder;
79 }
80
81
82 /* Delete ENCODER and its components. */
83
84 void
85 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
86 {
87 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
88 pointer_map_destroy (encoder->map);
89 pointer_set_destroy (encoder->body);
90 free (encoder);
91 }
92
93
94 /* Return the existing reference number of NODE in the cgraph encoder in
95 output block OB. Assign a new reference if this is the first time
96 NODE is encoded. */
97
98 int
99 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
100 struct cgraph_node *node)
101 {
102 int ref;
103 void **slot;
104
105 slot = pointer_map_contains (encoder->map, node);
106 if (!slot)
107 {
108 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
109 slot = pointer_map_insert (encoder->map, node);
110 *slot = (void *) (intptr_t) ref;
111 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
112 }
113 else
114 ref = (int) (intptr_t) *slot;
115
116 return ref;
117 }
118
119 #define LCC_NOT_FOUND (-1)
120
121 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
122 or LCC_NOT_FOUND if it is not there. */
123
124 int
125 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
126 struct cgraph_node *node)
127 {
128 void **slot = pointer_map_contains (encoder->map, node);
129 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 }
131
132
133 /* Return the cgraph node corresponding to REF using ENCODER. */
134
135 struct cgraph_node *
136 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
137 {
138 if (ref == LCC_NOT_FOUND)
139 return NULL;
140
141 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 }
143
144
145 /* Return TRUE if we should encode initializer of NODE (if any). */
146
147 bool
148 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
149 struct cgraph_node *node)
150 {
151 return pointer_set_contains (encoder->body, node);
152 }
153
154 /* Return TRUE if we should encode body of NODE (if any). */
155
156 static void
157 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
158 struct cgraph_node *node)
159 {
160 pointer_set_insert (encoder->body, node);
161 }
162
163 /* Create a new varpool encoder. */
164
165 lto_varpool_encoder_t
166 lto_varpool_encoder_new (void)
167 {
168 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
169 encoder->map = pointer_map_create ();
170 encoder->initializer = pointer_set_create ();
171 encoder->nodes = NULL;
172 return encoder;
173 }
174
175
176 /* Delete ENCODER and its components. */
177
178 void
179 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
180 {
181 VEC_free (varpool_node_ptr, heap, encoder->nodes);
182 pointer_map_destroy (encoder->map);
183 pointer_set_destroy (encoder->initializer);
184 free (encoder);
185 }
186
187
188 /* Return the existing reference number of NODE in the varpool encoder in
189 output block OB. Assign a new reference if this is the first time
190 NODE is encoded. */
191
192 int
193 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
194 struct varpool_node *node)
195 {
196 int ref;
197 void **slot;
198
199 slot = pointer_map_contains (encoder->map, node);
200 if (!slot)
201 {
202 ref = VEC_length (varpool_node_ptr, encoder->nodes);
203 slot = pointer_map_insert (encoder->map, node);
204 *slot = (void *) (intptr_t) ref;
205 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
206 }
207 else
208 ref = (int) (intptr_t) *slot;
209
210 return ref;
211 }
212
213 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
214 or LCC_NOT_FOUND if it is not there. */
215
216 int
217 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
218 struct varpool_node *node)
219 {
220 void **slot = pointer_map_contains (encoder->map, node);
221 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 }
223
224
225 /* Return the varpool node corresponding to REF using ENCODER. */
226
227 struct varpool_node *
228 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
229 {
230 if (ref == LCC_NOT_FOUND)
231 return NULL;
232
233 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 }
235
236
237 /* Return TRUE if we should encode initializer of NODE (if any). */
238
239 bool
240 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
241 struct varpool_node *node)
242 {
243 return pointer_set_contains (encoder->initializer, node);
244 }
245
246 /* Return TRUE if we should encode initializer of NODE (if any). */
247
248 static void
249 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
250 struct varpool_node *node)
251 {
252 pointer_set_insert (encoder->initializer, node);
253 }
254
255 /* Output the cgraph EDGE to OB using ENCODER. */
256
257 static void
258 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
259 lto_cgraph_encoder_t encoder)
260 {
261 unsigned int uid;
262 intptr_t ref;
263 struct bitpack_d bp;
264
265 if (edge->indirect_unknown_callee)
266 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
267 else
268 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
269
270 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
271 gcc_assert (ref != LCC_NOT_FOUND);
272 lto_output_sleb128_stream (ob->main_stream, ref);
273
274 if (!edge->indirect_unknown_callee)
275 {
276 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 lto_output_sleb128_stream (ob->main_stream, ref);
279 }
280
281 lto_output_sleb128_stream (ob->main_stream, edge->count);
282
283 bp = bitpack_create (ob->main_stream);
284 uid = (!gimple_has_body_p (edge->caller->decl)
285 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
286 bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
287 bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
288 bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
289 bp_pack_value (&bp, edge->loop_nest, 30);
290 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
291 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
292 bp_pack_value (&bp, edge->can_throw_external, 1);
293 if (edge->indirect_unknown_callee)
294 {
295 int flags = edge->indirect_info->ecf_flags;
296 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
302 /* Flags that should not appear on indirect calls. */
303 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
304 | ECF_MAY_BE_ALLOCA
305 | ECF_SIBCALL
306 | ECF_NOVOPS)));
307 }
308 lto_output_bitpack (&bp);
309 }
310
311 /* Return if LIST contain references from other partitions. */
312
313 bool
314 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
315 varpool_node_set vset)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
320 {
321 if (ref->refering_type == IPA_REF_CGRAPH)
322 {
323 if (ipa_ref_refering_node (ref)->in_other_partition
324 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
325 return true;
326 }
327 else
328 {
329 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
330 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
331 vset))
332 return true;
333 }
334 }
335 return false;
336 }
337
338 /* Return true when node is reachable from other partition. */
339
340 bool
341 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
342 {
343 struct cgraph_edge *e;
344 if (!node->analyzed)
345 return false;
346 if (node->global.inlined_to)
347 return false;
348 for (e = node->callers; e; e = e->next_caller)
349 if (e->caller->in_other_partition
350 || !cgraph_node_in_set_p (e->caller, set))
351 return true;
352 return false;
353 }
354
355 /* Return if LIST contain references from other partitions. */
356
357 bool
358 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
359 varpool_node_set vset)
360 {
361 int i;
362 struct ipa_ref *ref;
363 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
364 {
365 if (ref->refering_type == IPA_REF_CGRAPH)
366 {
367 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
368 return true;
369 }
370 else
371 {
372 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
373 vset))
374 return true;
375 }
376 }
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
384 {
385 struct cgraph_edge *e;
386 if (!node->analyzed)
387 return false;
388 if (node->global.inlined_to)
389 return false;
390 for (e = node->callers; e; e = e->next_caller)
391 if (cgraph_node_in_set_p (e->caller, set))
392 return true;
393 return false;
394 }
395
396 /* Output the cgraph NODE to OB. ENCODER is used to find the
397 reference number of NODE->inlined_to. SET is the set of nodes we
398 are writing to the current file. If NODE is not in SET, then NODE
399 is a boundary of a cgraph_node_set and we pretend NODE just has a
400 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
401 that have had their callgraph node written so far. This is used to
402 determine if NODE is a clone of a previously written node. */
403
404 static void
405 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
406 lto_cgraph_encoder_t encoder, cgraph_node_set set,
407 varpool_node_set vset)
408 {
409 unsigned int tag;
410 struct bitpack_d bp;
411 bool boundary_p;
412 intptr_t ref;
413 bool in_other_partition = false;
414 struct cgraph_node *clone_of;
415
416 boundary_p = !cgraph_node_in_set_p (node, set);
417
418 if (node->analyzed && !boundary_p)
419 tag = LTO_cgraph_analyzed_node;
420 else
421 tag = LTO_cgraph_unavail_node;
422
423 lto_output_uleb128_stream (ob->main_stream, tag);
424
425 /* In WPA mode, we only output part of the call-graph. Also, we
426 fake cgraph node attributes. There are two cases that we care.
427
428 Boundary nodes: There are nodes that are not part of SET but are
429 called from within SET. We artificially make them look like
430 externally visible nodes with no function body.
431
432 Cherry-picked nodes: These are nodes we pulled from other
433 translation units into SET during IPA-inlining. We make them as
434 local static nodes to prevent clashes with other local statics. */
435 if (boundary_p && node->analyzed)
436 {
437 /* Inline clones can not be part of boundary.
438 gcc_assert (!node->global.inlined_to);
439
440 FIXME: At the moment they can be, when partition contains an inline
441 clone that is clone of inline clone from outside partition. We can
442 reshape the clone tree and make other tree to be the root, but it
443 needs a bit extra work and will be promplty done by cgraph_remove_node
444 after reading back. */
445 in_other_partition = 1;
446 }
447
448 clone_of = node->clone_of;
449 while (clone_of
450 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
451 if (clone_of->prev_sibling_clone)
452 clone_of = clone_of->prev_sibling_clone;
453 else
454 clone_of = clone_of->clone_of;
455 if (!clone_of)
456 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
457 else
458 lto_output_sleb128_stream (ob->main_stream, ref);
459
460
461 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
462 lto_output_sleb128_stream (ob->main_stream, node->count);
463
464 if (tag == LTO_cgraph_analyzed_node)
465 {
466 lto_output_sleb128_stream (ob->main_stream,
467 node->local.inline_summary.estimated_self_stack_size);
468 lto_output_sleb128_stream (ob->main_stream,
469 node->local.inline_summary.self_size);
470 lto_output_sleb128_stream (ob->main_stream,
471 node->local.inline_summary.size_inlining_benefit);
472 lto_output_sleb128_stream (ob->main_stream,
473 node->local.inline_summary.self_time);
474 lto_output_sleb128_stream (ob->main_stream,
475 node->local.inline_summary.time_inlining_benefit);
476 if (node->global.inlined_to)
477 {
478 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
479 gcc_assert (ref != LCC_NOT_FOUND);
480 }
481 else
482 ref = LCC_NOT_FOUND;
483
484 lto_output_sleb128_stream (ob->main_stream, ref);
485 }
486
487 if (node->same_comdat_group && !boundary_p)
488 {
489 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
490 gcc_assert (ref != LCC_NOT_FOUND);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 lto_output_sleb128_stream (ob->main_stream, ref);
495
496 bp = bitpack_create (ob->main_stream);
497 bp_pack_value (&bp, node->local.local, 1);
498 bp_pack_value (&bp, node->local.externally_visible, 1);
499 bp_pack_value (&bp, node->local.finalized, 1);
500 bp_pack_value (&bp, node->local.inlinable, 1);
501 bp_pack_value (&bp, node->local.versionable, 1);
502 bp_pack_value (&bp, node->local.disregard_inline_limits, 1);
503 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
504 bp_pack_value (&bp, node->local.vtable_method, 1);
505 bp_pack_value (&bp, node->needed, 1);
506 bp_pack_value (&bp, node->address_taken, 1);
507 bp_pack_value (&bp, node->abstract_and_needed, 1);
508 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
509 && !DECL_EXTERNAL (node->decl)
510 && !DECL_COMDAT (node->decl)
511 && (reachable_from_other_partition_p (node, set)
512 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
513 bp_pack_value (&bp, node->lowered, 1);
514 bp_pack_value (&bp, in_other_partition, 1);
515 bp_pack_value (&bp, node->alias, 1);
516 bp_pack_value (&bp, node->finalized_by_frontend, 1);
517 bp_pack_value (&bp, node->frequency, 2);
518 lto_output_bitpack (&bp);
519
520 if (node->same_body)
521 {
522 struct cgraph_node *alias;
523 unsigned long alias_count = 1;
524 for (alias = node->same_body; alias->next; alias = alias->next)
525 alias_count++;
526 lto_output_uleb128_stream (ob->main_stream, alias_count);
527 do
528 {
529 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
530 alias->decl);
531 if (alias->thunk.thunk_p)
532 {
533 lto_output_uleb128_stream
534 (ob->main_stream,
535 1 + (alias->thunk.this_adjusting != 0) * 2
536 + (alias->thunk.virtual_offset_p != 0) * 4);
537 lto_output_uleb128_stream (ob->main_stream,
538 alias->thunk.fixed_offset);
539 lto_output_uleb128_stream (ob->main_stream,
540 alias->thunk.virtual_value);
541 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
542 alias->thunk.alias);
543 }
544 else
545 {
546 lto_output_uleb128_stream (ob->main_stream, 0);
547 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
548 alias->thunk.alias);
549 }
550 alias = alias->previous;
551 }
552 while (alias);
553 }
554 else
555 lto_output_uleb128_stream (ob->main_stream, 0);
556 }
557
558 /* Output the varpool NODE to OB.
559 If NODE is not in SET, then NODE is a boundary. */
560
561 static void
562 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
563 lto_varpool_encoder_t varpool_encoder,
564 cgraph_node_set set, varpool_node_set vset)
565 {
566 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
567 struct bitpack_d bp;
568 struct varpool_node *alias;
569 int count = 0;
570 int ref;
571
572 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
573 bp = bitpack_create (ob->main_stream);
574 bp_pack_value (&bp, node->externally_visible, 1);
575 bp_pack_value (&bp, node->force_output, 1);
576 bp_pack_value (&bp, node->finalized, 1);
577 bp_pack_value (&bp, node->alias, 1);
578 gcc_assert (!node->alias || !node->extra_name);
579 gcc_assert (node->finalized || !node->analyzed);
580 gcc_assert (node->needed);
581 /* Constant pool initializers can be de-unified into individual ltrans units.
582 FIXME: Alternatively at -Os we may want to avoid generating for them the local
583 labels and share them across LTRANS partitions. */
584 if (DECL_IN_CONSTANT_POOL (node->decl)
585 && !DECL_COMDAT (node->decl))
586 {
587 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
588 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
589 }
590 else
591 {
592 bp_pack_value (&bp, node->analyzed
593 && referenced_from_other_partition_p (&node->ref_list,
594 set, vset), 1);
595 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
596 }
597 /* Also emit any extra name aliases. */
598 for (alias = node->extra_name; alias; alias = alias->next)
599 count++;
600 bp_pack_value (&bp, count != 0, 1);
601 lto_output_bitpack (&bp);
602 if (node->same_comdat_group && !boundary_p)
603 {
604 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
605 gcc_assert (ref != LCC_NOT_FOUND);
606 }
607 else
608 ref = LCC_NOT_FOUND;
609 lto_output_sleb128_stream (ob->main_stream, ref);
610
611 if (count)
612 {
613 lto_output_uleb128_stream (ob->main_stream, count);
614 for (alias = node->extra_name; alias; alias = alias->next)
615 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
616 }
617 }
618
619 /* Output the varpool NODE to OB.
620 If NODE is not in SET, then NODE is a boundary. */
621
622 static void
623 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
624 lto_cgraph_encoder_t encoder,
625 lto_varpool_encoder_t varpool_encoder)
626 {
627 struct bitpack_d bp;
628 bp = bitpack_create (ob->main_stream);
629 bp_pack_value (&bp, ref->refered_type, 1);
630 bp_pack_value (&bp, ref->use, 2);
631 lto_output_bitpack (&bp);
632 if (ref->refered_type == IPA_REF_CGRAPH)
633 {
634 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
635 gcc_assert (nref != LCC_NOT_FOUND);
636 lto_output_sleb128_stream (ob->main_stream, nref);
637 }
638 else
639 {
640 int nref = lto_varpool_encoder_lookup (varpool_encoder,
641 ipa_ref_varpool_node (ref));
642 gcc_assert (nref != LCC_NOT_FOUND);
643 lto_output_sleb128_stream (ob->main_stream, nref);
644 }
645 }
646
647 /* Stream out profile_summary to OB. */
648
649 static void
650 output_profile_summary (struct lto_simple_output_block *ob)
651 {
652 if (profile_info)
653 {
654 /* We do not output num, it is not terribly useful. */
655 gcc_assert (profile_info->runs);
656 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
657 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
658 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
659 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
660 }
661 else
662 lto_output_uleb128_stream (ob->main_stream, 0);
663 }
664
665 /* Add NODE into encoder as well as nodes it is cloned from.
666 Do it in a way so clones appear first. */
667
668 static void
669 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
670 bool include_body)
671 {
672 if (node->clone_of)
673 add_node_to (encoder, node->clone_of, include_body);
674 else if (include_body)
675 lto_set_cgraph_encoder_encode_body (encoder, node);
676 lto_cgraph_encoder_encode (encoder, node);
677 }
678
679 /* Add all references in LIST to encoders. */
680
681 static void
682 add_references (lto_cgraph_encoder_t encoder,
683 lto_varpool_encoder_t varpool_encoder,
684 struct ipa_ref_list *list)
685 {
686 int i;
687 struct ipa_ref *ref;
688 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
689 if (ref->refered_type == IPA_REF_CGRAPH)
690 add_node_to (encoder, ipa_ref_node (ref), false);
691 else
692 {
693 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
694 lto_varpool_encoder_encode (varpool_encoder, vnode);
695 }
696 }
697
698 /* Output all callees or indirect outgoing edges. EDGE must be the first such
699 edge. */
700
701 static void
702 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
703 struct lto_simple_output_block *ob,
704 lto_cgraph_encoder_t encoder)
705 {
706 if (!edge)
707 return;
708
709 /* Output edges in backward direction, so the reconstructed callgraph match
710 and it is easy to associate call sites in the IPA pass summaries. */
711 while (edge->next_callee)
712 edge = edge->next_callee;
713 for (; edge; edge = edge->prev_callee)
714 lto_output_edge (ob, edge, encoder);
715 }
716
717 /* Output the part of the cgraph in SET. */
718
719 static void
720 output_refs (cgraph_node_set set, varpool_node_set vset,
721 lto_cgraph_encoder_t encoder,
722 lto_varpool_encoder_t varpool_encoder)
723 {
724 cgraph_node_set_iterator csi;
725 varpool_node_set_iterator vsi;
726 struct lto_simple_output_block *ob;
727 int count;
728 struct ipa_ref *ref;
729 int i;
730
731 ob = lto_create_simple_output_block (LTO_section_refs);
732
733 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
734 {
735 struct cgraph_node *node = csi_node (csi);
736
737 count = ipa_ref_list_nreferences (&node->ref_list);
738 if (count)
739 {
740 lto_output_uleb128_stream (ob->main_stream, count);
741 lto_output_uleb128_stream (ob->main_stream,
742 lto_cgraph_encoder_lookup (encoder, node));
743 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
744 lto_output_ref (ob, ref, encoder, varpool_encoder);
745 }
746 }
747
748 lto_output_uleb128_stream (ob->main_stream, 0);
749
750 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
751 {
752 struct varpool_node *node = vsi_node (vsi);
753
754 count = ipa_ref_list_nreferences (&node->ref_list);
755 if (count)
756 {
757 lto_output_uleb128_stream (ob->main_stream, count);
758 lto_output_uleb128_stream (ob->main_stream,
759 lto_varpool_encoder_lookup (varpool_encoder,
760 node));
761 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
762 lto_output_ref (ob, ref, encoder, varpool_encoder);
763 }
764 }
765
766 lto_output_uleb128_stream (ob->main_stream, 0);
767
768 lto_destroy_simple_output_block (ob);
769 }
770
771 /* Find out all cgraph and varpool nodes we want to encode in current unit
772 and insert them to encoders. */
773 void
774 compute_ltrans_boundary (struct lto_out_decl_state *state,
775 cgraph_node_set set, varpool_node_set vset)
776 {
777 struct cgraph_node *node;
778 cgraph_node_set_iterator csi;
779 varpool_node_set_iterator vsi;
780 struct cgraph_edge *edge;
781 int i;
782 lto_cgraph_encoder_t encoder;
783 lto_varpool_encoder_t varpool_encoder;
784
785 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
786 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
787
788 /* Go over all the nodes in SET and assign references. */
789 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
790 {
791 node = csi_node (csi);
792 add_node_to (encoder, node, true);
793 add_references (encoder, varpool_encoder, &node->ref_list);
794 }
795 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
796 {
797 struct varpool_node *vnode = vsi_node (vsi);
798 gcc_assert (!vnode->alias);
799 lto_varpool_encoder_encode (varpool_encoder, vnode);
800 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
801 add_references (encoder, varpool_encoder, &vnode->ref_list);
802 }
803 /* Pickle in also the initializer of all referenced readonly variables
804 to help folding. Constant pool variables are not shared, so we must
805 pickle those too. */
806 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
807 {
808 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
809 if (DECL_INITIAL (vnode->decl)
810 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
811 vnode)
812 && (DECL_IN_CONSTANT_POOL (vnode->decl)
813 || TREE_READONLY (vnode->decl)))
814 {
815 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
816 add_references (encoder, varpool_encoder, &vnode->ref_list);
817 }
818 }
819
820 /* Go over all the nodes again to include callees that are not in
821 SET. */
822 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
823 {
824 node = csi_node (csi);
825 for (edge = node->callees; edge; edge = edge->next_callee)
826 {
827 struct cgraph_node *callee = edge->callee;
828 if (!cgraph_node_in_set_p (callee, set))
829 {
830 /* We should have moved all the inlines. */
831 gcc_assert (!callee->global.inlined_to);
832 add_node_to (encoder, callee, false);
833 }
834 }
835 }
836 }
837
838 /* Output the part of the cgraph in SET. */
839
840 void
841 output_cgraph (cgraph_node_set set, varpool_node_set vset)
842 {
843 struct cgraph_node *node;
844 struct lto_simple_output_block *ob;
845 cgraph_node_set_iterator csi;
846 int i, n_nodes;
847 lto_cgraph_encoder_t encoder;
848 lto_varpool_encoder_t varpool_encoder;
849 struct cgraph_asm_node *can;
850 static bool asm_nodes_output = false;
851
852 if (flag_wpa)
853 output_cgraph_opt_summary ();
854
855 ob = lto_create_simple_output_block (LTO_section_cgraph);
856
857 output_profile_summary (ob);
858
859 /* An encoder for cgraph nodes should have been created by
860 ipa_write_summaries_1. */
861 gcc_assert (ob->decl_state->cgraph_node_encoder);
862 gcc_assert (ob->decl_state->varpool_node_encoder);
863 encoder = ob->decl_state->cgraph_node_encoder;
864 varpool_encoder = ob->decl_state->varpool_node_encoder;
865
866 /* Write out the nodes. We must first output a node and then its clones,
867 otherwise at a time reading back the node there would be nothing to clone
868 from. */
869 n_nodes = lto_cgraph_encoder_size (encoder);
870 for (i = 0; i < n_nodes; i++)
871 {
872 node = lto_cgraph_encoder_deref (encoder, i);
873 lto_output_node (ob, node, encoder, set, vset);
874 }
875
876 /* Go over the nodes in SET again to write edges. */
877 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
878 {
879 node = csi_node (csi);
880 output_outgoing_cgraph_edges (node->callees, ob, encoder);
881 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
882 }
883
884 lto_output_uleb128_stream (ob->main_stream, 0);
885
886 /* Emit toplevel asms.
887 When doing WPA we must output every asm just once. Since we do not partition asm
888 nodes at all, output them to first output. This is kind of hack, but should work
889 well. */
890 if (!asm_nodes_output)
891 {
892 asm_nodes_output = true;
893 for (can = cgraph_asm_nodes; can; can = can->next)
894 {
895 int len = TREE_STRING_LENGTH (can->asm_str);
896 lto_output_uleb128_stream (ob->main_stream, len);
897 for (i = 0; i < len; ++i)
898 lto_output_1_stream (ob->main_stream,
899 TREE_STRING_POINTER (can->asm_str)[i]);
900 }
901 }
902
903 lto_output_uleb128_stream (ob->main_stream, 0);
904
905 lto_destroy_simple_output_block (ob);
906 output_varpool (set, vset);
907 output_refs (set, vset, encoder, varpool_encoder);
908 }
909
910 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
911 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
912 NODE or to replace the values in it, for instance because the first
913 time we saw it, the function body was not available but now it
914 is. BP is a bitpack with all the bitflags for NODE read from the
915 stream. */
916
917 static void
918 input_overwrite_node (struct lto_file_decl_data *file_data,
919 struct cgraph_node *node,
920 enum LTO_cgraph_tags tag,
921 struct bitpack_d *bp,
922 unsigned int stack_size,
923 unsigned int self_time,
924 unsigned int time_inlining_benefit,
925 unsigned int self_size,
926 unsigned int size_inlining_benefit)
927 {
928 node->aux = (void *) tag;
929 node->local.inline_summary.estimated_self_stack_size = stack_size;
930 node->local.inline_summary.self_time = self_time;
931 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
932 node->local.inline_summary.self_size = self_size;
933 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
934 node->global.time = self_time;
935 node->global.size = self_size;
936 node->global.estimated_stack_size = stack_size;
937 node->global.estimated_growth = INT_MIN;
938 node->local.lto_file_data = file_data;
939
940 node->local.local = bp_unpack_value (bp, 1);
941 node->local.externally_visible = bp_unpack_value (bp, 1);
942 node->local.finalized = bp_unpack_value (bp, 1);
943 node->local.inlinable = bp_unpack_value (bp, 1);
944 node->local.versionable = bp_unpack_value (bp, 1);
945 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
946 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
947 node->local.vtable_method = bp_unpack_value (bp, 1);
948 node->needed = bp_unpack_value (bp, 1);
949 node->address_taken = bp_unpack_value (bp, 1);
950 node->abstract_and_needed = bp_unpack_value (bp, 1);
951 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
952 node->lowered = bp_unpack_value (bp, 1);
953 node->analyzed = tag == LTO_cgraph_analyzed_node;
954 node->in_other_partition = bp_unpack_value (bp, 1);
955 node->alias = bp_unpack_value (bp, 1);
956 node->finalized_by_frontend = bp_unpack_value (bp, 1);
957 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
958 }
959
960 /* Output the part of the cgraph in SET. */
961
962 static void
963 output_varpool (cgraph_node_set set, varpool_node_set vset)
964 {
965 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
966 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
967 int len = lto_varpool_encoder_size (varpool_encoder), i;
968
969 lto_output_uleb128_stream (ob->main_stream, len);
970
971 /* Write out the nodes. We must first output a node and then its clones,
972 otherwise at a time reading back the node there would be nothing to clone
973 from. */
974 for (i = 0; i < len; i++)
975 {
976 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
977 varpool_encoder,
978 set, vset);
979 }
980
981 lto_destroy_simple_output_block (ob);
982 }
983
984 /* Read a node from input_block IB. TAG is the node's tag just read.
985 Return the node read or overwriten. */
986
987 static struct cgraph_node *
988 input_node (struct lto_file_decl_data *file_data,
989 struct lto_input_block *ib,
990 enum LTO_cgraph_tags tag,
991 VEC(cgraph_node_ptr, heap) *nodes)
992 {
993 tree fn_decl;
994 struct cgraph_node *node;
995 struct bitpack_d bp;
996 int stack_size = 0;
997 unsigned decl_index;
998 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
999 int self_time = 0;
1000 int self_size = 0;
1001 int time_inlining_benefit = 0;
1002 int size_inlining_benefit = 0;
1003 unsigned long same_body_count = 0;
1004 int clone_ref;
1005
1006 clone_ref = lto_input_sleb128 (ib);
1007
1008 decl_index = lto_input_uleb128 (ib);
1009 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1010
1011 if (clone_ref != LCC_NOT_FOUND)
1012 {
1013 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
1014 0, CGRAPH_FREQ_BASE, 0, false, NULL);
1015 }
1016 else
1017 node = cgraph_node (fn_decl);
1018
1019 node->count = lto_input_sleb128 (ib);
1020
1021 if (tag == LTO_cgraph_analyzed_node)
1022 {
1023 stack_size = lto_input_sleb128 (ib);
1024 self_size = lto_input_sleb128 (ib);
1025 size_inlining_benefit = lto_input_sleb128 (ib);
1026 self_time = lto_input_sleb128 (ib);
1027 time_inlining_benefit = lto_input_sleb128 (ib);
1028
1029 ref = lto_input_sleb128 (ib);
1030 }
1031
1032 ref2 = lto_input_sleb128 (ib);
1033
1034 /* Make sure that we have not read this node before. Nodes that
1035 have already been read will have their tag stored in the 'aux'
1036 field. Since built-in functions can be referenced in multiple
1037 functions, they are expected to be read more than once. */
1038 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1039 internal_error ("bytecode stream: found multiple instances of cgraph "
1040 "node %d", node->uid);
1041
1042 bp = lto_input_bitpack (ib);
1043 input_overwrite_node (file_data, node, tag, &bp, stack_size, self_time,
1044 time_inlining_benefit, self_size,
1045 size_inlining_benefit);
1046
1047 /* Store a reference for now, and fix up later to be a pointer. */
1048 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1049
1050 /* Store a reference for now, and fix up later to be a pointer. */
1051 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1052
1053 same_body_count = lto_input_uleb128 (ib);
1054 while (same_body_count-- > 0)
1055 {
1056 tree alias_decl;
1057 int type;
1058 decl_index = lto_input_uleb128 (ib);
1059 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1060 type = lto_input_uleb128 (ib);
1061 if (!type)
1062 {
1063 tree real_alias;
1064 decl_index = lto_input_uleb128 (ib);
1065 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1066 cgraph_same_body_alias (alias_decl, real_alias);
1067 }
1068 else
1069 {
1070 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1071 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1072 tree real_alias;
1073 decl_index = lto_input_uleb128 (ib);
1074 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1075 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1076 virtual_value,
1077 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1078 real_alias);
1079 }
1080 }
1081 return node;
1082 }
1083
1084 /* Read a node from input_block IB. TAG is the node's tag just read.
1085 Return the node read or overwriten. */
1086
1087 static struct varpool_node *
1088 input_varpool_node (struct lto_file_decl_data *file_data,
1089 struct lto_input_block *ib)
1090 {
1091 int decl_index;
1092 tree var_decl;
1093 struct varpool_node *node;
1094 struct bitpack_d bp;
1095 bool aliases_p;
1096 int count;
1097 int ref = LCC_NOT_FOUND;
1098
1099 decl_index = lto_input_uleb128 (ib);
1100 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1101 node = varpool_node (var_decl);
1102 node->lto_file_data = file_data;
1103
1104 bp = lto_input_bitpack (ib);
1105 node->externally_visible = bp_unpack_value (&bp, 1);
1106 node->force_output = bp_unpack_value (&bp, 1);
1107 node->finalized = bp_unpack_value (&bp, 1);
1108 node->alias = bp_unpack_value (&bp, 1);
1109 node->analyzed = node->finalized;
1110 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1111 node->in_other_partition = bp_unpack_value (&bp, 1);
1112 aliases_p = bp_unpack_value (&bp, 1);
1113 if (node->finalized)
1114 varpool_mark_needed_node (node);
1115 ref = lto_input_sleb128 (ib);
1116 /* Store a reference for now, and fix up later to be a pointer. */
1117 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1118 if (aliases_p)
1119 {
1120 count = lto_input_uleb128 (ib);
1121 for (; count > 0; count --)
1122 {
1123 tree decl = lto_file_decl_data_get_var_decl (file_data,
1124 lto_input_uleb128 (ib));
1125 varpool_extra_name_alias (decl, var_decl);
1126 }
1127 }
1128 return node;
1129 }
1130
1131 /* Read a node from input_block IB. TAG is the node's tag just read.
1132 Return the node read or overwriten. */
1133
1134 static void
1135 input_ref (struct lto_input_block *ib,
1136 struct cgraph_node *refering_node,
1137 struct varpool_node *refering_varpool_node,
1138 VEC(cgraph_node_ptr, heap) *nodes,
1139 VEC(varpool_node_ptr, heap) *varpool_nodes)
1140 {
1141 struct cgraph_node *node = NULL;
1142 struct varpool_node *varpool_node = NULL;
1143 struct bitpack_d bp;
1144 enum ipa_ref_type type;
1145 enum ipa_ref_use use;
1146
1147 bp = lto_input_bitpack (ib);
1148 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1149 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1150 if (type == IPA_REF_CGRAPH)
1151 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1152 else
1153 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1154 ipa_record_reference (refering_node, refering_varpool_node,
1155 node, varpool_node, use, NULL);
1156 }
1157
1158 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1159 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1160 edge being read is indirect (in the sense that it has
1161 indirect_unknown_callee set). */
1162
1163 static void
1164 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1165 bool indirect)
1166 {
1167 struct cgraph_node *caller, *callee;
1168 struct cgraph_edge *edge;
1169 unsigned int stmt_id;
1170 gcov_type count;
1171 int freq;
1172 unsigned int nest;
1173 cgraph_inline_failed_t inline_failed;
1174 struct bitpack_d bp;
1175 int ecf_flags = 0;
1176
1177 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1178 if (caller == NULL || caller->decl == NULL_TREE)
1179 internal_error ("bytecode stream: no caller found while reading edge");
1180
1181 if (!indirect)
1182 {
1183 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1184 if (callee == NULL || callee->decl == NULL_TREE)
1185 internal_error ("bytecode stream: no callee found while reading edge");
1186 }
1187 else
1188 callee = NULL;
1189
1190 count = (gcov_type) lto_input_sleb128 (ib);
1191
1192 bp = lto_input_bitpack (ib);
1193 stmt_id = (unsigned int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1194 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
1195 HOST_BITS_PER_INT);
1196 freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
1197 nest = (unsigned) bp_unpack_value (&bp, 30);
1198
1199 if (indirect)
1200 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1201 else
1202 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1203
1204 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1205 edge->lto_stmt_uid = stmt_id;
1206 edge->inline_failed = inline_failed;
1207 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1208 edge->can_throw_external = bp_unpack_value (&bp, 1);
1209 if (indirect)
1210 {
1211 if (bp_unpack_value (&bp, 1))
1212 ecf_flags |= ECF_CONST;
1213 if (bp_unpack_value (&bp, 1))
1214 ecf_flags |= ECF_PURE;
1215 if (bp_unpack_value (&bp, 1))
1216 ecf_flags |= ECF_NORETURN;
1217 if (bp_unpack_value (&bp, 1))
1218 ecf_flags |= ECF_MALLOC;
1219 if (bp_unpack_value (&bp, 1))
1220 ecf_flags |= ECF_NOTHROW;
1221 if (bp_unpack_value (&bp, 1))
1222 ecf_flags |= ECF_RETURNS_TWICE;
1223 edge->indirect_info->ecf_flags = ecf_flags;
1224 }
1225 }
1226
1227
1228 /* Read a cgraph from IB using the info in FILE_DATA. */
1229
1230 static VEC(cgraph_node_ptr, heap) *
1231 input_cgraph_1 (struct lto_file_decl_data *file_data,
1232 struct lto_input_block *ib)
1233 {
1234 enum LTO_cgraph_tags tag;
1235 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1236 struct cgraph_node *node;
1237 unsigned i;
1238 unsigned HOST_WIDE_INT len;
1239
1240 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1241 while (tag)
1242 {
1243 if (tag == LTO_cgraph_edge)
1244 input_edge (ib, nodes, false);
1245 else if (tag == LTO_cgraph_indirect_edge)
1246 input_edge (ib, nodes, true);
1247 else
1248 {
1249 node = input_node (file_data, ib, tag,nodes);
1250 if (node == NULL || node->decl == NULL_TREE)
1251 internal_error ("bytecode stream: found empty cgraph node");
1252 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1253 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1254 }
1255
1256 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1257 }
1258
1259 /* Input toplevel asms. */
1260 len = lto_input_uleb128 (ib);
1261 while (len)
1262 {
1263 char *str = (char *)xmalloc (len + 1);
1264 for (i = 0; i < len; ++i)
1265 str[i] = lto_input_1_unsigned (ib);
1266 cgraph_add_asm_node (build_string (len, str));
1267 free (str);
1268
1269 len = lto_input_uleb128 (ib);
1270 }
1271
1272 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1273 {
1274 int ref = (int) (intptr_t) node->global.inlined_to;
1275
1276 /* Fixup inlined_to from reference to pointer. */
1277 if (ref != LCC_NOT_FOUND)
1278 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1279 else
1280 node->global.inlined_to = NULL;
1281
1282 ref = (int) (intptr_t) node->same_comdat_group;
1283
1284 /* Fixup same_comdat_group from reference to pointer. */
1285 if (ref != LCC_NOT_FOUND)
1286 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1287 else
1288 node->same_comdat_group = NULL;
1289 }
1290 return nodes;
1291 }
1292
1293 /* Read a varpool from IB using the info in FILE_DATA. */
1294
1295 static VEC(varpool_node_ptr, heap) *
1296 input_varpool_1 (struct lto_file_decl_data *file_data,
1297 struct lto_input_block *ib)
1298 {
1299 unsigned HOST_WIDE_INT len;
1300 VEC(varpool_node_ptr, heap) *varpool = NULL;
1301 int i;
1302 struct varpool_node *node;
1303
1304 len = lto_input_uleb128 (ib);
1305 while (len)
1306 {
1307 VEC_safe_push (varpool_node_ptr, heap, varpool,
1308 input_varpool_node (file_data, ib));
1309 len--;
1310 }
1311 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1312 {
1313 int ref = (int) (intptr_t) node->same_comdat_group;
1314
1315 /* Fixup same_comdat_group from reference to pointer. */
1316 if (ref != LCC_NOT_FOUND)
1317 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1318 else
1319 node->same_comdat_group = NULL;
1320 }
1321 return varpool;
1322 }
1323
1324 /* Input ipa_refs. */
1325
1326 static void
1327 input_refs (struct lto_input_block *ib,
1328 VEC(cgraph_node_ptr, heap) *nodes,
1329 VEC(varpool_node_ptr, heap) *varpool)
1330 {
1331 int count;
1332 int idx;
1333 while (true)
1334 {
1335 struct cgraph_node *node;
1336 count = lto_input_uleb128 (ib);
1337 if (!count)
1338 break;
1339 idx = lto_input_uleb128 (ib);
1340 node = VEC_index (cgraph_node_ptr, nodes, idx);
1341 while (count)
1342 {
1343 input_ref (ib, node, NULL, nodes, varpool);
1344 count--;
1345 }
1346 }
1347 while (true)
1348 {
1349 struct varpool_node *node;
1350 count = lto_input_uleb128 (ib);
1351 if (!count)
1352 break;
1353 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1354 while (count)
1355 {
1356 input_ref (ib, NULL, node, nodes, varpool);
1357 count--;
1358 }
1359 }
1360 }
1361
1362
1363 static struct gcov_ctr_summary lto_gcov_summary;
1364
1365 /* Input profile_info from IB. */
1366 static void
1367 input_profile_summary (struct lto_input_block *ib)
1368 {
1369 unsigned int runs = lto_input_uleb128 (ib);
1370 if (runs)
1371 {
1372 if (!profile_info)
1373 {
1374 profile_info = &lto_gcov_summary;
1375 lto_gcov_summary.runs = runs;
1376 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1377 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1378 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1379 }
1380 /* We can support this by scaling all counts to nearest common multiple
1381 of all different runs, but it is perhaps not worth the effort. */
1382 else if (profile_info->runs != runs
1383 || profile_info->sum_all != lto_input_sleb128 (ib)
1384 || profile_info->run_max != lto_input_sleb128 (ib)
1385 || profile_info->sum_max != lto_input_sleb128 (ib))
1386 sorry ("Combining units with different profiles is not supported.");
1387 /* We allow some units to have profile and other to not have one. This will
1388 just make unprofiled units to be size optimized that is sane. */
1389 }
1390
1391 }
1392
1393 /* Input and merge the cgraph from each of the .o files passed to
1394 lto1. */
1395
1396 void
1397 input_cgraph (void)
1398 {
1399 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1400 struct lto_file_decl_data *file_data;
1401 unsigned int j = 0;
1402 struct cgraph_node *node;
1403
1404 while ((file_data = file_data_vec[j++]))
1405 {
1406 const char *data;
1407 size_t len;
1408 struct lto_input_block *ib;
1409 VEC(cgraph_node_ptr, heap) *nodes;
1410 VEC(varpool_node_ptr, heap) *varpool;
1411
1412 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1413 &data, &len);
1414 input_profile_summary (ib);
1415 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1416 nodes = input_cgraph_1 (file_data, ib);
1417 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1418 ib, data, len);
1419
1420 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1421 &data, &len);
1422 varpool = input_varpool_1 (file_data, ib);
1423 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1424 ib, data, len);
1425
1426 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1427 &data, &len);
1428 input_refs (ib, nodes, varpool);
1429 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1430 ib, data, len);
1431 if (flag_ltrans)
1432 input_cgraph_opt_summary (nodes);
1433 VEC_free (cgraph_node_ptr, heap, nodes);
1434 VEC_free (varpool_node_ptr, heap, varpool);
1435 }
1436
1437 /* Clear out the aux field that was used to store enough state to
1438 tell which nodes should be overwritten. */
1439 for (node = cgraph_nodes; node; node = node->next)
1440 {
1441 /* Some nodes may have been created by cgraph_node. This
1442 happens when the callgraph contains nested functions. If the
1443 node for the parent function was never emitted to the gimple
1444 file, cgraph_node will create a node for it when setting the
1445 context of the nested function. */
1446 if (node->local.lto_file_data)
1447 node->aux = NULL;
1448 }
1449 }
1450
1451 /* True when we need optimization summary for NODE. */
1452
1453 static int
1454 output_cgraph_opt_summary_p (struct cgraph_node *node)
1455 {
1456 if (!node->clone_of)
1457 return false;
1458 return (node->clone.tree_map
1459 || node->clone.args_to_skip
1460 || node->clone.combined_args_to_skip);
1461 }
1462
1463 /* Output optimization summary for NODE to OB. */
1464
1465 static void
1466 output_node_opt_summary (struct output_block *ob,
1467 struct cgraph_node *node)
1468 {
1469 unsigned int index;
1470 bitmap_iterator bi;
1471 struct ipa_replace_map *map;
1472 struct bitpack_d bp;
1473 int i;
1474
1475 lto_output_uleb128_stream (ob->main_stream,
1476 bitmap_count_bits (node->clone.args_to_skip));
1477 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1478 lto_output_uleb128_stream (ob->main_stream, index);
1479 lto_output_uleb128_stream (ob->main_stream,
1480 bitmap_count_bits (node->clone.combined_args_to_skip));
1481 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1482 lto_output_uleb128_stream (ob->main_stream, index);
1483 lto_output_uleb128_stream (ob->main_stream,
1484 VEC_length (ipa_replace_map_p, node->clone.tree_map));
1485 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1486 {
1487 int parm_num;
1488 tree parm;
1489
1490 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1491 parm = DECL_CHAIN (parm), parm_num++)
1492 if (map->old_tree == parm)
1493 break;
1494 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1495 mechanism to store function local declarations into summaries. */
1496 gcc_assert (parm);
1497 lto_output_uleb128_stream (ob->main_stream, parm_num);
1498 lto_output_tree (ob, map->new_tree, true);
1499 bp = bitpack_create (ob->main_stream);
1500 bp_pack_value (&bp, map->replace_p, 1);
1501 bp_pack_value (&bp, map->ref_p, 1);
1502 lto_output_bitpack (&bp);
1503 }
1504 }
1505
1506 /* Output optimization summaries stored in callgraph.
1507 At the moment it is the clone info structure. */
1508
1509 static void
1510 output_cgraph_opt_summary (void)
1511 {
1512 struct cgraph_node *node;
1513 int i, n_nodes;
1514 lto_cgraph_encoder_t encoder;
1515 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1516 unsigned count = 0;
1517
1518 ob->cgraph_node = NULL;
1519 encoder = ob->decl_state->cgraph_node_encoder;
1520 n_nodes = lto_cgraph_encoder_size (encoder);
1521 for (i = 0; i < n_nodes; i++)
1522 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i)))
1523 count++;
1524 lto_output_uleb128_stream (ob->main_stream, count);
1525 for (i = 0; i < n_nodes; i++)
1526 {
1527 node = lto_cgraph_encoder_deref (encoder, i);
1528 if (output_cgraph_opt_summary_p (node))
1529 {
1530 lto_output_uleb128_stream (ob->main_stream, i);
1531 output_node_opt_summary (ob, node);
1532 }
1533 }
1534 produce_asm (ob, NULL);
1535 destroy_output_block (ob);
1536 }
1537
1538 /* Input optimiation summary of NODE. */
1539
1540 static void
1541 input_node_opt_summary (struct cgraph_node *node,
1542 struct lto_input_block *ib_main,
1543 struct data_in *data_in)
1544 {
1545 int i;
1546 int count;
1547 int bit;
1548 struct bitpack_d bp;
1549
1550 count = lto_input_uleb128 (ib_main);
1551 if (count)
1552 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1553 for (i = 0; i < count; i++)
1554 {
1555 bit = lto_input_uleb128 (ib_main);
1556 bitmap_set_bit (node->clone.args_to_skip, bit);
1557 }
1558 count = lto_input_uleb128 (ib_main);
1559 if (count)
1560 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1561 for (i = 0; i < count; i++)
1562 {
1563 bit = lto_input_uleb128 (ib_main);
1564 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1565 }
1566 count = lto_input_uleb128 (ib_main);
1567 for (i = 0; i < count; i++)
1568 {
1569 int parm_num;
1570 tree parm;
1571 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1572
1573 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1574 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1575 parm = DECL_CHAIN (parm))
1576 parm_num --;
1577 map->parm_num = lto_input_uleb128 (ib_main);
1578 map->old_tree = NULL;
1579 map->new_tree = lto_input_tree (ib_main, data_in);
1580 bp = lto_input_bitpack (ib_main);
1581 map->replace_p = bp_unpack_value (&bp, 1);
1582 map->ref_p = bp_unpack_value (&bp, 1);
1583 }
1584 }
1585
1586 /* Read section in file FILE_DATA of length LEN with data DATA. */
1587
1588 static void
1589 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1590 const char *data, size_t len, VEC (cgraph_node_ptr,
1591 heap) * nodes)
1592 {
1593 const struct lto_function_header *header =
1594 (const struct lto_function_header *) data;
1595 const int32_t cfg_offset = sizeof (struct lto_function_header);
1596 const int32_t main_offset = cfg_offset + header->cfg_size;
1597 const int32_t string_offset = main_offset + header->main_size;
1598 struct data_in *data_in;
1599 struct lto_input_block ib_main;
1600 unsigned int i;
1601 unsigned int count;
1602
1603 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1604 header->main_size);
1605
1606 data_in =
1607 lto_data_in_create (file_data, (const char *) data + string_offset,
1608 header->string_size, NULL);
1609 count = lto_input_uleb128 (&ib_main);
1610
1611 for (i = 0; i < count; i++)
1612 {
1613 int ref = lto_input_uleb128 (&ib_main);
1614 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1615 &ib_main, data_in);
1616 }
1617 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
1618 len);
1619 lto_data_in_delete (data_in);
1620 }
1621
1622 /* Input optimization summary of cgraph. */
1623
1624 static void
1625 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1626 {
1627 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1628 struct lto_file_decl_data *file_data;
1629 unsigned int j = 0;
1630
1631 while ((file_data = file_data_vec[j++]))
1632 {
1633 size_t len;
1634 const char *data =
1635 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1636 &len);
1637
1638 if (data)
1639 input_cgraph_opt_section (file_data, data, len, nodes);
1640 }
1641 }