Handwritten part of conversion of passes to C++ classes
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "tree-streamer.h"
47 #include "gcov-io.h"
48 #include "tree-pass.h"
49 #include "profile.h"
50 #include "context.h"
51 #include "pass_manager.h"
52
53 static void output_cgraph_opt_summary (void);
54 static void input_cgraph_opt_summary (vec<symtab_node> nodes);
55
56 /* Number of LDPR values known to GCC. */
57 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
58
59 /* All node orders are ofsetted by ORDER_BASE. */
60 static int order_base;
61
62 /* Cgraph streaming is organized as set of record whose type
63 is indicated by a tag. */
64 enum LTO_symtab_tags
65 {
66 /* Must leave 0 for the stopper. */
67
68 /* Cgraph node without body available. */
69 LTO_symtab_unavail_node = 1,
70 /* Cgraph node with function body. */
71 LTO_symtab_analyzed_node,
72 /* Cgraph edges. */
73 LTO_symtab_edge,
74 LTO_symtab_indirect_edge,
75 LTO_symtab_variable,
76 LTO_symtab_last_tag
77 };
78
79 /* Create a new symtab encoder.
80 if FOR_INPUT, the encoder allocate only datastructures needed
81 to read the symtab. */
82
83 lto_symtab_encoder_t
84 lto_symtab_encoder_new (bool for_input)
85 {
86 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
87
88 if (!for_input)
89 encoder->map = pointer_map_create ();
90 encoder->nodes.create (0);
91 return encoder;
92 }
93
94
95 /* Delete ENCODER and its components. */
96
97 void
98 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
99 {
100 encoder->nodes.release ();
101 if (encoder->map)
102 pointer_map_destroy (encoder->map);
103 free (encoder);
104 }
105
106
107 /* Return the existing reference number of NODE in the symtab encoder in
108 output block OB. Assign a new reference if this is the first time
109 NODE is encoded. */
110
111 int
112 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
113 symtab_node node)
114 {
115 int ref;
116 void **slot;
117
118 if (!encoder->map)
119 {
120 lto_encoder_entry entry = {node, false, false, false};
121
122 ref = encoder->nodes.length ();
123 encoder->nodes.safe_push (entry);
124 return ref;
125 }
126
127 slot = pointer_map_contains (encoder->map, node);
128 if (!slot || !*slot)
129 {
130 lto_encoder_entry entry = {node, false, false, false};
131 ref = encoder->nodes.length ();
132 if (!slot)
133 slot = pointer_map_insert (encoder->map, node);
134 *slot = (void *) (intptr_t) (ref + 1);
135 encoder->nodes.safe_push (entry);
136 }
137 else
138 ref = (size_t) *slot - 1;
139
140 return ref;
141 }
142
143 /* Remove NODE from encoder. */
144
145 bool
146 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
147 symtab_node node)
148 {
149 void **slot, **last_slot;
150 int index;
151 lto_encoder_entry last_node;
152
153 slot = pointer_map_contains (encoder->map, node);
154 if (slot == NULL || !*slot)
155 return false;
156
157 index = (size_t) *slot - 1;
158 gcc_checking_assert (encoder->nodes[index].node == node);
159
160 /* Remove from vector. We do this by swapping node with the last element
161 of the vector. */
162 last_node = encoder->nodes.pop ();
163 if (last_node.node != node)
164 {
165 last_slot = pointer_map_contains (encoder->map, last_node.node);
166 gcc_checking_assert (last_slot && *last_slot);
167 *last_slot = (void *)(size_t) (index + 1);
168
169 /* Move the last element to the original spot of NODE. */
170 encoder->nodes[index] = last_node;
171 }
172
173 /* Remove element from hash table. */
174 *slot = NULL;
175 return true;
176 }
177
178
179 /* Return TRUE if we should encode initializer of NODE (if any). */
180
181 bool
182 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
183 struct cgraph_node *node)
184 {
185 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
186 return encoder->nodes[index].body;
187 }
188
189 /* Return TRUE if we should encode body of NODE (if any). */
190
191 static void
192 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
193 struct cgraph_node *node)
194 {
195 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
196 gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
197 encoder->nodes[index].body = true;
198 }
199
200 /* Return TRUE if we should encode initializer of NODE (if any). */
201
202 bool
203 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
204 struct varpool_node *node)
205 {
206 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
207 if (index == LCC_NOT_FOUND)
208 return false;
209 return encoder->nodes[index].initializer;
210 }
211
212 /* Return TRUE if we should encode initializer of NODE (if any). */
213
214 static void
215 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
216 struct varpool_node *node)
217 {
218 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
219 encoder->nodes[index].initializer = true;
220 }
221
222 /* Return TRUE if we should encode initializer of NODE (if any). */
223
224 bool
225 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
226 symtab_node node)
227 {
228 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
229 if (index == LCC_NOT_FOUND)
230 return false;
231 return encoder->nodes[index].in_partition;
232 }
233
234 /* Return TRUE if we should encode body of NODE (if any). */
235
236 void
237 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
238 symtab_node node)
239 {
240 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
241 encoder->nodes[index].in_partition = true;
242 }
243
244 /* Output the cgraph EDGE to OB using ENCODER. */
245
246 static void
247 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
248 lto_symtab_encoder_t encoder)
249 {
250 unsigned int uid;
251 intptr_t ref;
252 struct bitpack_d bp;
253
254 if (edge->indirect_unknown_callee)
255 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
256 LTO_symtab_indirect_edge);
257 else
258 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
259 LTO_symtab_edge);
260
261 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->caller);
262 gcc_assert (ref != LCC_NOT_FOUND);
263 streamer_write_hwi_stream (ob->main_stream, ref);
264
265 if (!edge->indirect_unknown_callee)
266 {
267 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->callee);
268 gcc_assert (ref != LCC_NOT_FOUND);
269 streamer_write_hwi_stream (ob->main_stream, ref);
270 }
271
272 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
273
274 bp = bitpack_create (ob->main_stream);
275 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
276 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
277 bp_pack_enum (&bp, cgraph_inline_failed_enum,
278 CIF_N_REASONS, edge->inline_failed);
279 bp_pack_var_len_unsigned (&bp, uid);
280 bp_pack_var_len_unsigned (&bp, edge->frequency);
281 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
282 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
283 bp_pack_value (&bp, edge->can_throw_external, 1);
284 if (edge->indirect_unknown_callee)
285 {
286 int flags = edge->indirect_info->ecf_flags;
287 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
289 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
290 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
291 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
292 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
293 /* Flags that should not appear on indirect calls. */
294 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
295 | ECF_MAY_BE_ALLOCA
296 | ECF_SIBCALL
297 | ECF_LEAF
298 | ECF_NOVOPS)));
299 }
300 streamer_write_bitpack (&bp);
301 }
302
303 /* Return if LIST contain references from other partitions. */
304
305 bool
306 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
307 {
308 int i;
309 struct ipa_ref *ref;
310 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
311 {
312 if (ref->referring->symbol.in_other_partition
313 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
314 return true;
315 }
316 return false;
317 }
318
319 /* Return true when node is reachable from other partition. */
320
321 bool
322 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
323 {
324 struct cgraph_edge *e;
325 if (!node->symbol.definition)
326 return false;
327 if (node->global.inlined_to)
328 return false;
329 for (e = node->callers; e; e = e->next_caller)
330 if (e->caller->symbol.in_other_partition
331 || !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
332 return true;
333 return false;
334 }
335
336 /* Return if LIST contain references from other partitions. */
337
338 bool
339 referenced_from_this_partition_p (struct ipa_ref_list *list,
340 lto_symtab_encoder_t encoder)
341 {
342 int i;
343 struct ipa_ref *ref;
344 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
345 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
346 return true;
347 return false;
348 }
349
350 /* Return true when node is reachable from other partition. */
351
352 bool
353 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
354 {
355 struct cgraph_edge *e;
356 for (e = node->callers; e; e = e->next_caller)
357 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
358 return true;
359 return false;
360 }
361
362 /* Output the cgraph NODE to OB. ENCODER is used to find the
363 reference number of NODE->inlined_to. SET is the set of nodes we
364 are writing to the current file. If NODE is not in SET, then NODE
365 is a boundary of a cgraph_node_set and we pretend NODE just has a
366 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
367 that have had their callgraph node written so far. This is used to
368 determine if NODE is a clone of a previously written node. */
369
370 static void
371 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
372 lto_symtab_encoder_t encoder)
373 {
374 unsigned int tag;
375 struct bitpack_d bp;
376 bool boundary_p;
377 intptr_t ref;
378 bool in_other_partition = false;
379 struct cgraph_node *clone_of;
380 struct ipa_opt_pass_d *pass;
381 int i;
382 bool alias_p;
383
384 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
385
386 if (node->symbol.analyzed && !boundary_p)
387 tag = LTO_symtab_analyzed_node;
388 else
389 tag = LTO_symtab_unavail_node;
390
391 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
392 tag);
393 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
394
395 /* In WPA mode, we only output part of the call-graph. Also, we
396 fake cgraph node attributes. There are two cases that we care.
397
398 Boundary nodes: There are nodes that are not part of SET but are
399 called from within SET. We artificially make them look like
400 externally visible nodes with no function body.
401
402 Cherry-picked nodes: These are nodes we pulled from other
403 translation units into SET during IPA-inlining. We make them as
404 local static nodes to prevent clashes with other local statics. */
405 if (boundary_p && node->symbol.analyzed && !DECL_EXTERNAL (node->symbol.decl))
406 {
407 /* Inline clones can not be part of boundary.
408 gcc_assert (!node->global.inlined_to);
409
410 FIXME: At the moment they can be, when partition contains an inline
411 clone that is clone of inline clone from outside partition. We can
412 reshape the clone tree and make other tree to be the root, but it
413 needs a bit extra work and will be promplty done by cgraph_remove_node
414 after reading back. */
415 in_other_partition = 1;
416 }
417
418 clone_of = node->clone_of;
419 while (clone_of
420 && (ref = lto_symtab_encoder_lookup (encoder, (symtab_node)clone_of)) == LCC_NOT_FOUND)
421 if (clone_of->prev_sibling_clone)
422 clone_of = clone_of->prev_sibling_clone;
423 else
424 clone_of = clone_of->clone_of;
425
426 if (LTO_symtab_analyzed_node)
427 gcc_assert (clone_of || !node->clone_of);
428 if (!clone_of)
429 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
430 else
431 streamer_write_hwi_stream (ob->main_stream, ref);
432
433
434 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
435 streamer_write_gcov_count_stream (ob->main_stream, node->count);
436 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
437
438 streamer_write_hwi_stream (ob->main_stream,
439 node->ipa_transforms_to_apply.length ());
440 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
441 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
442
443 if (tag == LTO_symtab_analyzed_node)
444 {
445 if (node->global.inlined_to)
446 {
447 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)node->global.inlined_to);
448 gcc_assert (ref != LCC_NOT_FOUND);
449 }
450 else
451 ref = LCC_NOT_FOUND;
452
453 streamer_write_hwi_stream (ob->main_stream, ref);
454 }
455
456 if (node->symbol.same_comdat_group && !boundary_p)
457 {
458 ref = lto_symtab_encoder_lookup (encoder,
459 node->symbol.same_comdat_group);
460 gcc_assert (ref != LCC_NOT_FOUND);
461 }
462 else
463 ref = LCC_NOT_FOUND;
464 streamer_write_hwi_stream (ob->main_stream, ref);
465
466 bp = bitpack_create (ob->main_stream);
467 bp_pack_value (&bp, node->local.local, 1);
468 bp_pack_value (&bp, node->symbol.externally_visible, 1);
469 bp_pack_value (&bp, node->symbol.definition, 1);
470 bp_pack_value (&bp, node->local.versionable, 1);
471 bp_pack_value (&bp, node->local.can_change_signature, 1);
472 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
473 bp_pack_value (&bp, node->symbol.force_output, 1);
474 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
475 bp_pack_value (&bp, node->symbol.unique_name, 1);
476 bp_pack_value (&bp, node->symbol.address_taken, 1);
477 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
478 && !DECL_EXTERNAL (node->symbol.decl)
479 && !DECL_COMDAT (node->symbol.decl)
480 && (reachable_from_other_partition_p (node, encoder)
481 || referenced_from_other_partition_p (&node->symbol.ref_list,
482 encoder)), 1);
483 bp_pack_value (&bp, node->lowered, 1);
484 bp_pack_value (&bp, in_other_partition, 1);
485 /* Real aliases in a boundary become non-aliases. However we still stream
486 alias info on weakrefs.
487 TODO: We lose a bit of information here - when we know that variable is
488 defined in other unit, we may use the info on aliases to resolve
489 symbol1 != symbol2 type tests that we can do only for locally defined objects
490 otherwise. */
491 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
492 bp_pack_value (&bp, alias_p, 1);
493 bp_pack_value (&bp, node->symbol.weakref, 1);
494 bp_pack_value (&bp, node->frequency, 2);
495 bp_pack_value (&bp, node->only_called_at_startup, 1);
496 bp_pack_value (&bp, node->only_called_at_exit, 1);
497 bp_pack_value (&bp, node->tm_clone, 1);
498 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
499 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
500 LDPR_NUM_KNOWN, node->symbol.resolution);
501 streamer_write_bitpack (&bp);
502
503 if (node->thunk.thunk_p && !boundary_p)
504 {
505 streamer_write_uhwi_stream
506 (ob->main_stream,
507 1 + (node->thunk.this_adjusting != 0) * 2
508 + (node->thunk.virtual_offset_p != 0) * 4);
509 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
510 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
511 }
512 }
513
514 /* Output the varpool NODE to OB.
515 If NODE is not in SET, then NODE is a boundary. */
516
517 static void
518 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
519 lto_symtab_encoder_t encoder)
520 {
521 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
522 struct bitpack_d bp;
523 int ref;
524 bool alias_p;
525
526 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
527 LTO_symtab_variable);
528 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
529 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
530 bp = bitpack_create (ob->main_stream);
531 bp_pack_value (&bp, node->symbol.externally_visible, 1);
532 bp_pack_value (&bp, node->symbol.force_output, 1);
533 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
534 bp_pack_value (&bp, node->symbol.unique_name, 1);
535 bp_pack_value (&bp, node->symbol.definition, 1);
536 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
537 bp_pack_value (&bp, alias_p, 1);
538 bp_pack_value (&bp, node->symbol.weakref, 1);
539 bp_pack_value (&bp, node->symbol.analyzed && !boundary_p, 1);
540 gcc_assert (node->symbol.definition || !node->symbol.analyzed);
541 /* Constant pool initializers can be de-unified into individual ltrans units.
542 FIXME: Alternatively at -Os we may want to avoid generating for them the local
543 labels and share them across LTRANS partitions. */
544 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
545 && !DECL_EXTERNAL (node->symbol.decl)
546 && !DECL_COMDAT (node->symbol.decl))
547 {
548 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
549 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
550 }
551 else
552 {
553 bp_pack_value (&bp, node->symbol.definition
554 && referenced_from_other_partition_p (&node->symbol.ref_list,
555 encoder), 1);
556 bp_pack_value (&bp, node->symbol.analyzed
557 && boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
558 /* in_other_partition. */
559 }
560 streamer_write_bitpack (&bp);
561 if (node->symbol.same_comdat_group && !boundary_p)
562 {
563 ref = lto_symtab_encoder_lookup (encoder,
564 node->symbol.same_comdat_group);
565 gcc_assert (ref != LCC_NOT_FOUND);
566 }
567 else
568 ref = LCC_NOT_FOUND;
569 streamer_write_hwi_stream (ob->main_stream, ref);
570 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
571 LDPR_NUM_KNOWN, node->symbol.resolution);
572 }
573
574 /* Output the varpool NODE to OB.
575 If NODE is not in SET, then NODE is a boundary. */
576
577 static void
578 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
579 lto_symtab_encoder_t encoder)
580 {
581 struct bitpack_d bp;
582 int nref;
583
584 bp = bitpack_create (ob->main_stream);
585 bp_pack_value (&bp, ref->use, 2);
586 streamer_write_bitpack (&bp);
587 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
588 gcc_assert (nref != LCC_NOT_FOUND);
589 streamer_write_hwi_stream (ob->main_stream, nref);
590 }
591
592 /* Stream out profile_summary to OB. */
593
594 static void
595 output_profile_summary (struct lto_simple_output_block *ob)
596 {
597 unsigned h_ix;
598 struct bitpack_d bp;
599
600 if (profile_info)
601 {
602 /* We do not output num and run_max, they are not used by
603 GCC profile feedback and they are difficult to merge from multiple
604 units. */
605 gcc_assert (profile_info->runs);
606 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
607 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
608
609 /* sum_all is needed for computing the working set with the
610 histogram. */
611 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
612
613 /* Create and output a bitpack of non-zero histogram entries indices. */
614 bp = bitpack_create (ob->main_stream);
615 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
616 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
617 streamer_write_bitpack (&bp);
618 /* Now stream out only those non-zero entries. */
619 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
620 {
621 if (!profile_info->histogram[h_ix].num_counters)
622 continue;
623 streamer_write_gcov_count_stream (ob->main_stream,
624 profile_info->histogram[h_ix].num_counters);
625 streamer_write_gcov_count_stream (ob->main_stream,
626 profile_info->histogram[h_ix].min_value);
627 streamer_write_gcov_count_stream (ob->main_stream,
628 profile_info->histogram[h_ix].cum_value);
629 }
630 /* IPA-profile computes hot bb threshold based on cumulated
631 whole program profile. We need to stream it down to ltrans. */
632 if (flag_wpa)
633 streamer_write_gcov_count_stream (ob->main_stream,
634 get_hot_bb_threshold ());
635 }
636 else
637 streamer_write_uhwi_stream (ob->main_stream, 0);
638 }
639
640 /* Output all callees or indirect outgoing edges. EDGE must be the first such
641 edge. */
642
643 static void
644 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
645 struct lto_simple_output_block *ob,
646 lto_symtab_encoder_t encoder)
647 {
648 if (!edge)
649 return;
650
651 /* Output edges in backward direction, so the reconstructed callgraph match
652 and it is easy to associate call sites in the IPA pass summaries. */
653 while (edge->next_callee)
654 edge = edge->next_callee;
655 for (; edge; edge = edge->prev_callee)
656 lto_output_edge (ob, edge, encoder);
657 }
658
659 /* Output the part of the cgraph in SET. */
660
661 static void
662 output_refs (lto_symtab_encoder_t encoder)
663 {
664 lto_symtab_encoder_iterator lsei;
665 struct lto_simple_output_block *ob;
666 int count;
667 struct ipa_ref *ref;
668 int i;
669
670 ob = lto_create_simple_output_block (LTO_section_refs);
671
672 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
673 lsei_next_in_partition (&lsei))
674 {
675 symtab_node node = lsei_node (lsei);
676
677 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
678 if (count)
679 {
680 streamer_write_gcov_count_stream (ob->main_stream, count);
681 streamer_write_uhwi_stream (ob->main_stream,
682 lto_symtab_encoder_lookup (encoder, node));
683 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
684 i, ref); i++)
685 lto_output_ref (ob, ref, encoder);
686 }
687 }
688
689 streamer_write_uhwi_stream (ob->main_stream, 0);
690
691 lto_destroy_simple_output_block (ob);
692 }
693
694 /* Add NODE into encoder as well as nodes it is cloned from.
695 Do it in a way so clones appear first. */
696
697 static void
698 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
699 bool include_body)
700 {
701 if (node->clone_of)
702 add_node_to (encoder, node->clone_of, include_body);
703 else if (include_body)
704 lto_set_symtab_encoder_encode_body (encoder, node);
705 lto_symtab_encoder_encode (encoder, (symtab_node)node);
706 }
707
708 /* Add all references in LIST to encoders. */
709
710 static void
711 add_references (lto_symtab_encoder_t encoder,
712 struct ipa_ref_list *list)
713 {
714 int i;
715 struct ipa_ref *ref;
716 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
717 if (is_a <cgraph_node> (ref->referred))
718 add_node_to (encoder, ipa_ref_node (ref), false);
719 else
720 lto_symtab_encoder_encode (encoder, ref->referred);
721 }
722
723 /* Find all symbols we want to stream into given partition and insert them
724 to encoders.
725
726 The function actually replaces IN_ENCODER by new one. The reason is that
727 streaming code needs clone's origin to be streamed before clone. This
728 means that we need to insert the nodes in specific order. This order is
729 ignored by the partitioning logic earlier. */
730
731 lto_symtab_encoder_t
732 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
733 {
734 struct cgraph_node *node;
735 struct cgraph_edge *edge;
736 int i;
737 lto_symtab_encoder_t encoder;
738 lto_symtab_encoder_iterator lsei;
739
740 encoder = lto_symtab_encoder_new (false);
741
742 /* Go over all entries in the IN_ENCODER and duplicate them to
743 ENCODER. At the same time insert masters of clones so
744 every master appears before clone. */
745 for (lsei = lsei_start_function_in_partition (in_encoder);
746 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
747 {
748 node = lsei_cgraph_node (lsei);
749 add_node_to (encoder, node, true);
750 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)node);
751 add_references (encoder, &node->symbol.ref_list);
752 /* For proper debug info, we need to ship the origins, too. */
753 if (DECL_ABSTRACT_ORIGIN (node->symbol.decl))
754 {
755 struct cgraph_node *origin_node
756 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
757 add_node_to (encoder, origin_node, true);
758 }
759 }
760 for (lsei = lsei_start_variable_in_partition (in_encoder);
761 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
762 {
763 struct varpool_node *vnode = lsei_varpool_node (lsei);
764
765 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)vnode);
766 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
767 add_references (encoder, &vnode->symbol.ref_list);
768 /* For proper debug info, we need to ship the origins, too. */
769 if (DECL_ABSTRACT_ORIGIN (vnode->symbol.decl))
770 {
771 struct varpool_node *origin_node
772 = varpool_get_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
773 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)origin_node);
774 }
775 }
776 /* Pickle in also the initializer of all referenced readonly variables
777 to help folding. Constant pool variables are not shared, so we must
778 pickle those too. */
779 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
780 {
781 symtab_node node = lto_symtab_encoder_deref (encoder, i);
782 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
783 {
784 if (!lto_symtab_encoder_encode_initializer_p (encoder,
785 vnode)
786 && ctor_for_folding (vnode->symbol.decl) != error_mark_node)
787 {
788 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
789 add_references (encoder, &vnode->symbol.ref_list);
790 }
791 }
792 }
793
794 /* Go over all the nodes again to include callees that are not in
795 SET. */
796 for (lsei = lsei_start_function_in_partition (encoder);
797 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
798 {
799 node = lsei_cgraph_node (lsei);
800 for (edge = node->callees; edge; edge = edge->next_callee)
801 {
802 struct cgraph_node *callee = edge->callee;
803 if (!lto_symtab_encoder_in_partition_p (encoder, (symtab_node)callee))
804 {
805 /* We should have moved all the inlines. */
806 gcc_assert (!callee->global.inlined_to);
807 add_node_to (encoder, callee, false);
808 }
809 }
810 }
811 lto_symtab_encoder_delete (in_encoder);
812 return encoder;
813 }
814
815 /* Output the part of the symtab in SET and VSET. */
816
817 void
818 output_symtab (void)
819 {
820 struct cgraph_node *node;
821 struct lto_simple_output_block *ob;
822 lto_symtab_encoder_iterator lsei;
823 int i, n_nodes;
824 lto_symtab_encoder_t encoder;
825 static bool asm_nodes_output = false;
826
827 if (flag_wpa)
828 output_cgraph_opt_summary ();
829
830 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
831
832 output_profile_summary (ob);
833
834 /* An encoder for cgraph nodes should have been created by
835 ipa_write_summaries_1. */
836 gcc_assert (ob->decl_state->symtab_node_encoder);
837 encoder = ob->decl_state->symtab_node_encoder;
838
839 /* Write out the nodes. We must first output a node and then its clones,
840 otherwise at a time reading back the node there would be nothing to clone
841 from. */
842 n_nodes = lto_symtab_encoder_size (encoder);
843 for (i = 0; i < n_nodes; i++)
844 {
845 symtab_node node = lto_symtab_encoder_deref (encoder, i);
846 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
847 lto_output_node (ob, cnode, encoder);
848 else
849 lto_output_varpool_node (ob, varpool (node), encoder);
850
851 }
852
853 /* Go over the nodes in SET again to write edges. */
854 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
855 lsei_next_function_in_partition (&lsei))
856 {
857 node = lsei_cgraph_node (lsei);
858 output_outgoing_cgraph_edges (node->callees, ob, encoder);
859 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
860 }
861
862 streamer_write_uhwi_stream (ob->main_stream, 0);
863
864 lto_destroy_simple_output_block (ob);
865
866 /* Emit toplevel asms.
867 When doing WPA we must output every asm just once. Since we do not partition asm
868 nodes at all, output them to first output. This is kind of hack, but should work
869 well. */
870 if (!asm_nodes_output)
871 {
872 asm_nodes_output = true;
873 lto_output_toplevel_asms ();
874 }
875
876 output_refs (encoder);
877 }
878
879 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
880 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
881 NODE or to replace the values in it, for instance because the first
882 time we saw it, the function body was not available but now it
883 is. BP is a bitpack with all the bitflags for NODE read from the
884 stream. */
885
886 static void
887 input_overwrite_node (struct lto_file_decl_data *file_data,
888 struct cgraph_node *node,
889 enum LTO_symtab_tags tag,
890 struct bitpack_d *bp)
891 {
892 node->symbol.aux = (void *) tag;
893 node->symbol.lto_file_data = file_data;
894
895 node->local.local = bp_unpack_value (bp, 1);
896 node->symbol.externally_visible = bp_unpack_value (bp, 1);
897 node->symbol.definition = bp_unpack_value (bp, 1);
898 node->local.versionable = bp_unpack_value (bp, 1);
899 node->local.can_change_signature = bp_unpack_value (bp, 1);
900 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
901 node->symbol.force_output = bp_unpack_value (bp, 1);
902 node->symbol.forced_by_abi = bp_unpack_value (bp, 1);
903 node->symbol.unique_name = bp_unpack_value (bp, 1);
904 node->symbol.address_taken = bp_unpack_value (bp, 1);
905 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
906 node->lowered = bp_unpack_value (bp, 1);
907 node->symbol.analyzed = tag == LTO_symtab_analyzed_node;
908 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
909 if (node->symbol.in_other_partition
910 /* Avoid updating decl when we are seeing just inline clone.
911 When inlining function that has functions already inlined into it,
912 we produce clones of inline clones.
913
914 WPA partitioning might put each clone into different unit and
915 we might end up streaming inline clone from other partition
916 to support clone we are interested in. */
917 && (!node->clone_of
918 || node->clone_of->symbol.decl != node->symbol.decl))
919 {
920 DECL_EXTERNAL (node->symbol.decl) = 1;
921 TREE_STATIC (node->symbol.decl) = 0;
922 }
923 node->symbol.alias = bp_unpack_value (bp, 1);
924 node->symbol.weakref = bp_unpack_value (bp, 1);
925 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
926 node->only_called_at_startup = bp_unpack_value (bp, 1);
927 node->only_called_at_exit = bp_unpack_value (bp, 1);
928 node->tm_clone = bp_unpack_value (bp, 1);
929 node->thunk.thunk_p = bp_unpack_value (bp, 1);
930 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
931 LDPR_NUM_KNOWN);
932 }
933
934 /* Return string alias is alias of. */
935
936 static tree
937 get_alias_symbol (tree decl)
938 {
939 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
940 return get_identifier (TREE_STRING_POINTER
941 (TREE_VALUE (TREE_VALUE (alias))));
942 }
943
944 /* Read a node from input_block IB. TAG is the node's tag just read.
945 Return the node read or overwriten. */
946
947 static struct cgraph_node *
948 input_node (struct lto_file_decl_data *file_data,
949 struct lto_input_block *ib,
950 enum LTO_symtab_tags tag,
951 vec<symtab_node> nodes)
952 {
953 gcc::pass_manager *passes = g->get_passes ();
954 tree fn_decl;
955 struct cgraph_node *node;
956 struct bitpack_d bp;
957 unsigned decl_index;
958 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
959 int clone_ref;
960 int order;
961 int i, count;
962
963 order = streamer_read_hwi (ib) + order_base;
964 clone_ref = streamer_read_hwi (ib);
965
966 decl_index = streamer_read_uhwi (ib);
967 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
968
969 if (clone_ref != LCC_NOT_FOUND)
970 {
971 node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
972 0, CGRAPH_FREQ_BASE, false,
973 vNULL, false, NULL);
974 }
975 else
976 {
977 /* Declaration of functions can be already merged with a declaration
978 from other input file. We keep cgraph unmerged until after streaming
979 of ipa passes is done. Alays forcingly create a fresh node. */
980 node = cgraph_create_empty_node ();
981 node->symbol.decl = fn_decl;
982 symtab_register_node ((symtab_node)node);
983 }
984
985 node->symbol.order = order;
986 if (order >= symtab_order)
987 symtab_order = order + 1;
988
989 node->count = streamer_read_gcov_count (ib);
990 node->count_materialization_scale = streamer_read_hwi (ib);
991
992 count = streamer_read_hwi (ib);
993 node->ipa_transforms_to_apply = vNULL;
994 for (i = 0; i < count; i++)
995 {
996 struct opt_pass *pass;
997 int pid = streamer_read_hwi (ib);
998
999 gcc_assert (pid < passes->passes_by_id_size);
1000 pass = passes->passes_by_id[pid];
1001 node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
1002 }
1003
1004 if (tag == LTO_symtab_analyzed_node)
1005 ref = streamer_read_hwi (ib);
1006
1007 ref2 = streamer_read_hwi (ib);
1008
1009 /* Make sure that we have not read this node before. Nodes that
1010 have already been read will have their tag stored in the 'aux'
1011 field. Since built-in functions can be referenced in multiple
1012 functions, they are expected to be read more than once. */
1013 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1014 internal_error ("bytecode stream: found multiple instances of cgraph "
1015 "node with uid %d", node->uid);
1016
1017 bp = streamer_read_bitpack (ib);
1018 input_overwrite_node (file_data, node, tag, &bp);
1019
1020 /* Store a reference for now, and fix up later to be a pointer. */
1021 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1022
1023 /* Store a reference for now, and fix up later to be a pointer. */
1024 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1025
1026 if (node->thunk.thunk_p)
1027 {
1028 int type = streamer_read_uhwi (ib);
1029 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1030 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1031
1032 node->thunk.fixed_offset = fixed_offset;
1033 node->thunk.this_adjusting = (type & 2);
1034 node->thunk.virtual_value = virtual_value;
1035 node->thunk.virtual_offset_p = (type & 4);
1036 }
1037 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1038 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1039 return node;
1040 }
1041
1042 /* Read a node from input_block IB. TAG is the node's tag just read.
1043 Return the node read or overwriten. */
1044
1045 static struct varpool_node *
1046 input_varpool_node (struct lto_file_decl_data *file_data,
1047 struct lto_input_block *ib)
1048 {
1049 int decl_index;
1050 tree var_decl;
1051 struct varpool_node *node;
1052 struct bitpack_d bp;
1053 int ref = LCC_NOT_FOUND;
1054 int order;
1055
1056 order = streamer_read_hwi (ib) + order_base;
1057 decl_index = streamer_read_uhwi (ib);
1058 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1059
1060 /* Declaration of functions can be already merged with a declaration
1061 from other input file. We keep cgraph unmerged until after streaming
1062 of ipa passes is done. Alays forcingly create a fresh node. */
1063 node = varpool_create_empty_node ();
1064 node->symbol.decl = var_decl;
1065 symtab_register_node ((symtab_node)node);
1066
1067 node->symbol.order = order;
1068 if (order >= symtab_order)
1069 symtab_order = order + 1;
1070 node->symbol.lto_file_data = file_data;
1071
1072 bp = streamer_read_bitpack (ib);
1073 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1074 node->symbol.force_output = bp_unpack_value (&bp, 1);
1075 node->symbol.forced_by_abi = bp_unpack_value (&bp, 1);
1076 node->symbol.unique_name = bp_unpack_value (&bp, 1);
1077 node->symbol.definition = bp_unpack_value (&bp, 1);
1078 node->symbol.alias = bp_unpack_value (&bp, 1);
1079 node->symbol.weakref = bp_unpack_value (&bp, 1);
1080 node->symbol.analyzed = bp_unpack_value (&bp, 1);
1081 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1082 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1083 if (node->symbol.in_other_partition)
1084 {
1085 DECL_EXTERNAL (node->symbol.decl) = 1;
1086 TREE_STATIC (node->symbol.decl) = 0;
1087 }
1088 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1089 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1090 ref = streamer_read_hwi (ib);
1091 /* Store a reference for now, and fix up later to be a pointer. */
1092 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1093 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1094 LDPR_NUM_KNOWN);
1095
1096 return node;
1097 }
1098
1099 /* Read a node from input_block IB. TAG is the node's tag just read.
1100 Return the node read or overwriten. */
1101
1102 static void
1103 input_ref (struct lto_input_block *ib,
1104 symtab_node referring_node,
1105 vec<symtab_node> nodes)
1106 {
1107 symtab_node node = NULL;
1108 struct bitpack_d bp;
1109 enum ipa_ref_use use;
1110
1111 bp = streamer_read_bitpack (ib);
1112 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1113 node = nodes[streamer_read_hwi (ib)];
1114 ipa_record_reference (referring_node, node, use, NULL);
1115 }
1116
1117 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1118 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1119 edge being read is indirect (in the sense that it has
1120 indirect_unknown_callee set). */
1121
1122 static void
1123 input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
1124 bool indirect)
1125 {
1126 struct cgraph_node *caller, *callee;
1127 struct cgraph_edge *edge;
1128 unsigned int stmt_id;
1129 gcov_type count;
1130 int freq;
1131 cgraph_inline_failed_t inline_failed;
1132 struct bitpack_d bp;
1133 int ecf_flags = 0;
1134
1135 caller = cgraph (nodes[streamer_read_hwi (ib)]);
1136 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1137 internal_error ("bytecode stream: no caller found while reading edge");
1138
1139 if (!indirect)
1140 {
1141 callee = cgraph (nodes[streamer_read_hwi (ib)]);
1142 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1143 internal_error ("bytecode stream: no callee found while reading edge");
1144 }
1145 else
1146 callee = NULL;
1147
1148 count = streamer_read_gcov_count (ib);
1149
1150 bp = streamer_read_bitpack (ib);
1151 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1152 stmt_id = bp_unpack_var_len_unsigned (&bp);
1153 freq = (int) bp_unpack_var_len_unsigned (&bp);
1154
1155 if (indirect)
1156 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1157 else
1158 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1159
1160 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1161 edge->lto_stmt_uid = stmt_id;
1162 edge->inline_failed = inline_failed;
1163 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1164 edge->can_throw_external = bp_unpack_value (&bp, 1);
1165 if (indirect)
1166 {
1167 if (bp_unpack_value (&bp, 1))
1168 ecf_flags |= ECF_CONST;
1169 if (bp_unpack_value (&bp, 1))
1170 ecf_flags |= ECF_PURE;
1171 if (bp_unpack_value (&bp, 1))
1172 ecf_flags |= ECF_NORETURN;
1173 if (bp_unpack_value (&bp, 1))
1174 ecf_flags |= ECF_MALLOC;
1175 if (bp_unpack_value (&bp, 1))
1176 ecf_flags |= ECF_NOTHROW;
1177 if (bp_unpack_value (&bp, 1))
1178 ecf_flags |= ECF_RETURNS_TWICE;
1179 edge->indirect_info->ecf_flags = ecf_flags;
1180 }
1181 }
1182
1183
1184 /* Read a cgraph from IB using the info in FILE_DATA. */
1185
1186 static vec<symtab_node>
1187 input_cgraph_1 (struct lto_file_decl_data *file_data,
1188 struct lto_input_block *ib)
1189 {
1190 enum LTO_symtab_tags tag;
1191 vec<symtab_node> nodes = vNULL;
1192 symtab_node node;
1193 unsigned i;
1194
1195 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1196 order_base = symtab_order;
1197 while (tag)
1198 {
1199 if (tag == LTO_symtab_edge)
1200 input_edge (ib, nodes, false);
1201 else if (tag == LTO_symtab_indirect_edge)
1202 input_edge (ib, nodes, true);
1203 else if (tag == LTO_symtab_variable)
1204 {
1205 node = (symtab_node)input_varpool_node (file_data, ib);
1206 nodes.safe_push (node);
1207 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1208 }
1209 else
1210 {
1211 node = (symtab_node)input_node (file_data, ib, tag, nodes);
1212 if (node == NULL || node->symbol.decl == NULL_TREE)
1213 internal_error ("bytecode stream: found empty cgraph node");
1214 nodes.safe_push (node);
1215 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1216 }
1217
1218 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1219 }
1220
1221 lto_input_toplevel_asms (file_data, order_base);
1222
1223 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1224 #ifdef ENABLE_CHECKING
1225 FOR_EACH_VEC_ELT (nodes, i, node)
1226 gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
1227 #endif
1228 FOR_EACH_VEC_ELT (nodes, i, node)
1229 {
1230 int ref;
1231 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1232 {
1233 ref = (int) (intptr_t) cnode->global.inlined_to;
1234
1235 /* We share declaration of builtins, so we may read same node twice. */
1236 if (!node->symbol.aux)
1237 continue;
1238 node->symbol.aux = NULL;
1239
1240 /* Fixup inlined_to from reference to pointer. */
1241 if (ref != LCC_NOT_FOUND)
1242 cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1243 else
1244 cnode->global.inlined_to = NULL;
1245 }
1246
1247 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1248
1249 /* Fixup same_comdat_group from reference to pointer. */
1250 if (ref != LCC_NOT_FOUND)
1251 node->symbol.same_comdat_group = nodes[ref];
1252 else
1253 node->symbol.same_comdat_group = NULL;
1254 }
1255 FOR_EACH_VEC_ELT (nodes, i, node)
1256 node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1257 return nodes;
1258 }
1259
1260 /* Input ipa_refs. */
1261
1262 static void
1263 input_refs (struct lto_input_block *ib,
1264 vec<symtab_node> nodes)
1265 {
1266 int count;
1267 int idx;
1268 while (true)
1269 {
1270 symtab_node node;
1271 count = streamer_read_uhwi (ib);
1272 if (!count)
1273 break;
1274 idx = streamer_read_uhwi (ib);
1275 node = nodes[idx];
1276 while (count)
1277 {
1278 input_ref (ib, node, nodes);
1279 count--;
1280 }
1281 }
1282 }
1283
1284
1285 static struct gcov_ctr_summary lto_gcov_summary;
1286
1287 /* Input profile_info from IB. */
1288 static void
1289 input_profile_summary (struct lto_input_block *ib,
1290 struct lto_file_decl_data *file_data)
1291 {
1292 unsigned h_ix;
1293 struct bitpack_d bp;
1294 unsigned int runs = streamer_read_uhwi (ib);
1295 if (runs)
1296 {
1297 file_data->profile_info.runs = runs;
1298 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1299 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1300
1301 memset (file_data->profile_info.histogram, 0,
1302 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1303 /* Input the bitpack of non-zero histogram indices. */
1304 bp = streamer_read_bitpack (ib);
1305 /* Read in and unpack the full bitpack, flagging non-zero
1306 histogram entries by setting the num_counters non-zero. */
1307 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1308 {
1309 file_data->profile_info.histogram[h_ix].num_counters
1310 = bp_unpack_value (&bp, 1);
1311 }
1312 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1313 {
1314 if (!file_data->profile_info.histogram[h_ix].num_counters)
1315 continue;
1316
1317 file_data->profile_info.histogram[h_ix].num_counters
1318 = streamer_read_gcov_count (ib);
1319 file_data->profile_info.histogram[h_ix].min_value
1320 = streamer_read_gcov_count (ib);
1321 file_data->profile_info.histogram[h_ix].cum_value
1322 = streamer_read_gcov_count (ib);
1323 }
1324 /* IPA-profile computes hot bb threshold based on cumulated
1325 whole program profile. We need to stream it down to ltrans. */
1326 if (flag_ltrans)
1327 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1328 }
1329
1330 }
1331
1332 /* Rescale profile summaries to the same number of runs in the whole unit. */
1333
1334 static void
1335 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1336 {
1337 struct lto_file_decl_data *file_data;
1338 unsigned int j, h_ix;
1339 gcov_unsigned_t max_runs = 0;
1340 struct cgraph_node *node;
1341 struct cgraph_edge *edge;
1342 gcov_type saved_sum_all = 0;
1343 gcov_ctr_summary *saved_profile_info = 0;
1344 int saved_scale = 0;
1345
1346 /* Find unit with maximal number of runs. If we ever get serious about
1347 roundoff errors, we might also consider computing smallest common
1348 multiply. */
1349 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1350 if (max_runs < file_data->profile_info.runs)
1351 max_runs = file_data->profile_info.runs;
1352
1353 if (!max_runs)
1354 return;
1355
1356 /* Simple overflow check. We probably don't need to support that many train
1357 runs. Such a large value probably imply data corruption anyway. */
1358 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1359 {
1360 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1361 INT_MAX / REG_BR_PROB_BASE);
1362 return;
1363 }
1364
1365 profile_info = &lto_gcov_summary;
1366 lto_gcov_summary.runs = max_runs;
1367 lto_gcov_summary.sum_max = 0;
1368 memset (lto_gcov_summary.histogram, 0,
1369 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1370
1371 /* Rescale all units to the maximal number of runs.
1372 sum_max can not be easily merged, as we have no idea what files come from
1373 the same run. We do not use the info anyway, so leave it 0. */
1374 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1375 if (file_data->profile_info.runs)
1376 {
1377 int scale = GCOV_COMPUTE_SCALE (max_runs,
1378 file_data->profile_info.runs);
1379 lto_gcov_summary.sum_max
1380 = MAX (lto_gcov_summary.sum_max,
1381 apply_scale (file_data->profile_info.sum_max, scale));
1382 lto_gcov_summary.sum_all
1383 = MAX (lto_gcov_summary.sum_all,
1384 apply_scale (file_data->profile_info.sum_all, scale));
1385 /* Save a pointer to the profile_info with the largest
1386 scaled sum_all and the scale for use in merging the
1387 histogram. */
1388 if (!saved_profile_info
1389 || lto_gcov_summary.sum_all > saved_sum_all)
1390 {
1391 saved_profile_info = &file_data->profile_info;
1392 saved_sum_all = lto_gcov_summary.sum_all;
1393 saved_scale = scale;
1394 }
1395 }
1396
1397 gcc_assert (saved_profile_info);
1398
1399 /* Scale up the histogram from the profile that had the largest
1400 scaled sum_all above. */
1401 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1402 {
1403 /* Scale up the min value as we did the corresponding sum_all
1404 above. Use that to find the new histogram index. */
1405 gcov_type scaled_min
1406 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1407 saved_scale);
1408 /* The new index may be shared with another scaled histogram entry,
1409 so we need to account for a non-zero histogram entry at new_ix. */
1410 unsigned new_ix = gcov_histo_index (scaled_min);
1411 lto_gcov_summary.histogram[new_ix].min_value
1412 = (lto_gcov_summary.histogram[new_ix].num_counters
1413 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1414 : scaled_min);
1415 /* Some of the scaled counter values would ostensibly need to be placed
1416 into different (larger) histogram buckets, but we keep things simple
1417 here and place the scaled cumulative counter value in the bucket
1418 corresponding to the scaled minimum counter value. */
1419 lto_gcov_summary.histogram[new_ix].cum_value
1420 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1421 saved_scale);
1422 lto_gcov_summary.histogram[new_ix].num_counters
1423 += saved_profile_info->histogram[h_ix].num_counters;
1424 }
1425
1426 /* Watch roundoff errors. */
1427 if (lto_gcov_summary.sum_max < max_runs)
1428 lto_gcov_summary.sum_max = max_runs;
1429
1430 /* If merging already happent at WPA time, we are done. */
1431 if (flag_ltrans)
1432 return;
1433
1434 /* Now compute count_materialization_scale of each node.
1435 During LTRANS we already have values of count_materialization_scale
1436 computed, so just update them. */
1437 FOR_EACH_FUNCTION (node)
1438 if (node->symbol.lto_file_data
1439 && node->symbol.lto_file_data->profile_info.runs)
1440 {
1441 int scale;
1442
1443 scale = RDIV (node->count_materialization_scale * max_runs,
1444 node->symbol.lto_file_data->profile_info.runs);
1445 node->count_materialization_scale = scale;
1446 if (scale < 0)
1447 fatal_error ("Profile information in %s corrupted",
1448 file_data->file_name);
1449
1450 if (scale == REG_BR_PROB_BASE)
1451 continue;
1452 for (edge = node->callees; edge; edge = edge->next_callee)
1453 edge->count = apply_scale (edge->count, scale);
1454 node->count = apply_scale (node->count, scale);
1455 }
1456 }
1457
1458 /* Input and merge the symtab from each of the .o files passed to
1459 lto1. */
1460
1461 void
1462 input_symtab (void)
1463 {
1464 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1465 struct lto_file_decl_data *file_data;
1466 unsigned int j = 0;
1467 struct cgraph_node *node;
1468
1469 while ((file_data = file_data_vec[j++]))
1470 {
1471 const char *data;
1472 size_t len;
1473 struct lto_input_block *ib;
1474 vec<symtab_node> nodes;
1475
1476 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1477 &data, &len);
1478 if (!ib)
1479 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1480 input_profile_summary (ib, file_data);
1481 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1482 nodes = input_cgraph_1 (file_data, ib);
1483 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1484 ib, data, len);
1485
1486 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1487 &data, &len);
1488 if (!ib)
1489 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1490 input_refs (ib, nodes);
1491 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1492 ib, data, len);
1493 if (flag_ltrans)
1494 input_cgraph_opt_summary (nodes);
1495 nodes.release ();
1496 }
1497
1498 merge_profile_summaries (file_data_vec);
1499 get_working_sets ();
1500
1501
1502 /* Clear out the aux field that was used to store enough state to
1503 tell which nodes should be overwritten. */
1504 FOR_EACH_FUNCTION (node)
1505 {
1506 /* Some nodes may have been created by cgraph_node. This
1507 happens when the callgraph contains nested functions. If the
1508 node for the parent function was never emitted to the gimple
1509 file, cgraph_node will create a node for it when setting the
1510 context of the nested function. */
1511 if (node->symbol.lto_file_data)
1512 node->symbol.aux = NULL;
1513 }
1514 }
1515
1516 /* True when we need optimization summary for NODE. */
1517
1518 static int
1519 output_cgraph_opt_summary_p (struct cgraph_node *node)
1520 {
1521 return (node->clone_of
1522 && (node->clone.tree_map
1523 || node->clone.args_to_skip
1524 || node->clone.combined_args_to_skip));
1525 }
1526
1527 /* Output optimization summary for EDGE to OB. */
1528 static void
1529 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1530 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1531 {
1532 }
1533
1534 /* Output optimization summary for NODE to OB. */
1535
1536 static void
1537 output_node_opt_summary (struct output_block *ob,
1538 struct cgraph_node *node,
1539 lto_symtab_encoder_t encoder)
1540 {
1541 unsigned int index;
1542 bitmap_iterator bi;
1543 struct ipa_replace_map *map;
1544 struct bitpack_d bp;
1545 int i;
1546 struct cgraph_edge *e;
1547
1548 if (node->clone.args_to_skip)
1549 {
1550 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1551 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1552 streamer_write_uhwi (ob, index);
1553 }
1554 else
1555 streamer_write_uhwi (ob, 0);
1556 if (node->clone.combined_args_to_skip)
1557 {
1558 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1559 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1560 streamer_write_uhwi (ob, index);
1561 }
1562 else
1563 streamer_write_uhwi (ob, 0);
1564 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1565 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1566 {
1567 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1568 mechanism to store function local declarations into summaries. */
1569 gcc_assert (!map->old_tree);
1570 streamer_write_uhwi (ob, map->parm_num);
1571 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1572 stream_write_tree (ob, map->new_tree, true);
1573 bp = bitpack_create (ob->main_stream);
1574 bp_pack_value (&bp, map->replace_p, 1);
1575 bp_pack_value (&bp, map->ref_p, 1);
1576 streamer_write_bitpack (&bp);
1577 }
1578
1579 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node) node))
1580 {
1581 for (e = node->callees; e; e = e->next_callee)
1582 output_edge_opt_summary (ob, e);
1583 for (e = node->indirect_calls; e; e = e->next_callee)
1584 output_edge_opt_summary (ob, e);
1585 }
1586 }
1587
1588 /* Output optimization summaries stored in callgraph.
1589 At the moment it is the clone info structure. */
1590
1591 static void
1592 output_cgraph_opt_summary (void)
1593 {
1594 int i, n_nodes;
1595 lto_symtab_encoder_t encoder;
1596 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1597 unsigned count = 0;
1598
1599 ob->cgraph_node = NULL;
1600 encoder = ob->decl_state->symtab_node_encoder;
1601 n_nodes = lto_symtab_encoder_size (encoder);
1602 for (i = 0; i < n_nodes; i++)
1603 {
1604 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1605 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1606 if (cnode && output_cgraph_opt_summary_p (cnode))
1607 count++;
1608 }
1609 streamer_write_uhwi (ob, count);
1610 for (i = 0; i < n_nodes; i++)
1611 {
1612 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1613 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1614 if (cnode && output_cgraph_opt_summary_p (cnode))
1615 {
1616 streamer_write_uhwi (ob, i);
1617 output_node_opt_summary (ob, cnode, encoder);
1618 }
1619 }
1620 produce_asm (ob, NULL);
1621 destroy_output_block (ob);
1622 }
1623
1624 /* Input optimisation summary of EDGE. */
1625
1626 static void
1627 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1628 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1629 {
1630 }
1631
1632 /* Input optimisation summary of NODE. */
1633
1634 static void
1635 input_node_opt_summary (struct cgraph_node *node,
1636 struct lto_input_block *ib_main,
1637 struct data_in *data_in)
1638 {
1639 int i;
1640 int count;
1641 int bit;
1642 struct bitpack_d bp;
1643 struct cgraph_edge *e;
1644
1645 count = streamer_read_uhwi (ib_main);
1646 if (count)
1647 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1648 for (i = 0; i < count; i++)
1649 {
1650 bit = streamer_read_uhwi (ib_main);
1651 bitmap_set_bit (node->clone.args_to_skip, bit);
1652 }
1653 count = streamer_read_uhwi (ib_main);
1654 if (count)
1655 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1656 for (i = 0; i < count; i++)
1657 {
1658 bit = streamer_read_uhwi (ib_main);
1659 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1660 }
1661 count = streamer_read_uhwi (ib_main);
1662 for (i = 0; i < count; i++)
1663 {
1664 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1665
1666 vec_safe_push (node->clone.tree_map, map);
1667 map->parm_num = streamer_read_uhwi (ib_main);
1668 map->old_tree = NULL;
1669 map->new_tree = stream_read_tree (ib_main, data_in);
1670 bp = streamer_read_bitpack (ib_main);
1671 map->replace_p = bp_unpack_value (&bp, 1);
1672 map->ref_p = bp_unpack_value (&bp, 1);
1673 }
1674 for (e = node->callees; e; e = e->next_callee)
1675 input_edge_opt_summary (e, ib_main);
1676 for (e = node->indirect_calls; e; e = e->next_callee)
1677 input_edge_opt_summary (e, ib_main);
1678 }
1679
1680 /* Read section in file FILE_DATA of length LEN with data DATA. */
1681
1682 static void
1683 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1684 const char *data, size_t len,
1685 vec<symtab_node> nodes)
1686 {
1687 const struct lto_function_header *header =
1688 (const struct lto_function_header *) data;
1689 const int cfg_offset = sizeof (struct lto_function_header);
1690 const int main_offset = cfg_offset + header->cfg_size;
1691 const int string_offset = main_offset + header->main_size;
1692 struct data_in *data_in;
1693 struct lto_input_block ib_main;
1694 unsigned int i;
1695 unsigned int count;
1696
1697 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1698 header->main_size);
1699
1700 data_in =
1701 lto_data_in_create (file_data, (const char *) data + string_offset,
1702 header->string_size, vNULL);
1703 count = streamer_read_uhwi (&ib_main);
1704
1705 for (i = 0; i < count; i++)
1706 {
1707 int ref = streamer_read_uhwi (&ib_main);
1708 input_node_opt_summary (cgraph (nodes[ref]),
1709 &ib_main, data_in);
1710 }
1711 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1712 len);
1713 lto_data_in_delete (data_in);
1714 }
1715
1716 /* Input optimization summary of cgraph. */
1717
1718 static void
1719 input_cgraph_opt_summary (vec<symtab_node> nodes)
1720 {
1721 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1722 struct lto_file_decl_data *file_data;
1723 unsigned int j = 0;
1724
1725 while ((file_data = file_data_vec[j++]))
1726 {
1727 size_t len;
1728 const char *data =
1729 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1730 &len);
1731
1732 if (data)
1733 input_cgraph_opt_section (file_data, data, len, nodes);
1734 }
1735 }