lto-symtab.c (lto_symtab_merge_symbols): Populate symtab hashtable.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "tree-streamer.h"
47 #include "gcov-io.h"
48 #include "tree-pass.h"
49 #include "profile.h"
50
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node> nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_symtab_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_symtab_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_symtab_analyzed_node,
70 /* Cgraph edges. */
71 LTO_symtab_edge,
72 LTO_symtab_indirect_edge,
73 LTO_symtab_variable,
74 LTO_symtab_last_tag
75 };
76
77 /* Create a new symtab encoder.
78 if FOR_INPUT, the encoder allocate only datastructures needed
79 to read the symtab. */
80
81 lto_symtab_encoder_t
82 lto_symtab_encoder_new (bool for_input)
83 {
84 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
85
86 if (!for_input)
87 encoder->map = pointer_map_create ();
88 encoder->nodes.create (0);
89 return encoder;
90 }
91
92
93 /* Delete ENCODER and its components. */
94
95 void
96 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
97 {
98 encoder->nodes.release ();
99 if (encoder->map)
100 pointer_map_destroy (encoder->map);
101 free (encoder);
102 }
103
104
105 /* Return the existing reference number of NODE in the symtab encoder in
106 output block OB. Assign a new reference if this is the first time
107 NODE is encoded. */
108
109 int
110 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
111 symtab_node node)
112 {
113 int ref;
114 void **slot;
115
116 if (!encoder->map)
117 {
118 lto_encoder_entry entry = {node, false, false, false};
119
120 ref = encoder->nodes.length ();
121 encoder->nodes.safe_push (entry);
122 return ref;
123 }
124
125 slot = pointer_map_contains (encoder->map, node);
126 if (!slot || !*slot)
127 {
128 lto_encoder_entry entry = {node, false, false, false};
129 ref = encoder->nodes.length ();
130 if (!slot)
131 slot = pointer_map_insert (encoder->map, node);
132 *slot = (void *) (intptr_t) (ref + 1);
133 encoder->nodes.safe_push (entry);
134 }
135 else
136 ref = (size_t) *slot - 1;
137
138 return ref;
139 }
140
141 /* Remove NODE from encoder. */
142
143 bool
144 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
145 symtab_node node)
146 {
147 void **slot, **last_slot;
148 int index;
149 lto_encoder_entry last_node;
150
151 slot = pointer_map_contains (encoder->map, node);
152 if (slot == NULL || !*slot)
153 return false;
154
155 index = (size_t) *slot - 1;
156 gcc_checking_assert (encoder->nodes[index].node == node);
157
158 /* Remove from vector. We do this by swapping node with the last element
159 of the vector. */
160 last_node = encoder->nodes.pop ();
161 if (last_node.node != node)
162 {
163 last_slot = pointer_map_contains (encoder->map, last_node.node);
164 gcc_checking_assert (last_slot && *last_slot);
165 *last_slot = (void *)(size_t) (index + 1);
166
167 /* Move the last element to the original spot of NODE. */
168 encoder->nodes[index] = last_node;
169 }
170
171 /* Remove element from hash table. */
172 *slot = NULL;
173 return true;
174 }
175
176
177 /* Return TRUE if we should encode initializer of NODE (if any). */
178
179 bool
180 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
181 struct cgraph_node *node)
182 {
183 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
184 return encoder->nodes[index].body;
185 }
186
187 /* Return TRUE if we should encode body of NODE (if any). */
188
189 static void
190 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
191 struct cgraph_node *node)
192 {
193 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
194 gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
195 encoder->nodes[index].body = true;
196 }
197
198 /* Return TRUE if we should encode initializer of NODE (if any). */
199
200 bool
201 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
202 struct varpool_node *node)
203 {
204 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
205 if (index == LCC_NOT_FOUND)
206 return false;
207 return encoder->nodes[index].initializer;
208 }
209
210 /* Return TRUE if we should encode initializer of NODE (if any). */
211
212 static void
213 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
214 struct varpool_node *node)
215 {
216 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
217 encoder->nodes[index].initializer = true;
218 }
219
220 /* Return TRUE if we should encode initializer of NODE (if any). */
221
222 bool
223 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
224 symtab_node node)
225 {
226 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
227 if (index == LCC_NOT_FOUND)
228 return false;
229 return encoder->nodes[index].in_partition;
230 }
231
232 /* Return TRUE if we should encode body of NODE (if any). */
233
234 void
235 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
236 symtab_node node)
237 {
238 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
239 encoder->nodes[index].in_partition = true;
240 }
241
242 /* Output the cgraph EDGE to OB using ENCODER. */
243
244 static void
245 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
246 lto_symtab_encoder_t encoder)
247 {
248 unsigned int uid;
249 intptr_t ref;
250 struct bitpack_d bp;
251
252 if (edge->indirect_unknown_callee)
253 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
254 LTO_symtab_indirect_edge);
255 else
256 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
257 LTO_symtab_edge);
258
259 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->caller);
260 gcc_assert (ref != LCC_NOT_FOUND);
261 streamer_write_hwi_stream (ob->main_stream, ref);
262
263 if (!edge->indirect_unknown_callee)
264 {
265 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->callee);
266 gcc_assert (ref != LCC_NOT_FOUND);
267 streamer_write_hwi_stream (ob->main_stream, ref);
268 }
269
270 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
271
272 bp = bitpack_create (ob->main_stream);
273 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
274 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
275 bp_pack_enum (&bp, cgraph_inline_failed_enum,
276 CIF_N_REASONS, edge->inline_failed);
277 bp_pack_var_len_unsigned (&bp, uid);
278 bp_pack_var_len_unsigned (&bp, edge->frequency);
279 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
280 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
281 bp_pack_value (&bp, edge->can_throw_external, 1);
282 if (edge->indirect_unknown_callee)
283 {
284 int flags = edge->indirect_info->ecf_flags;
285 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
289 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
290 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
291 /* Flags that should not appear on indirect calls. */
292 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
293 | ECF_MAY_BE_ALLOCA
294 | ECF_SIBCALL
295 | ECF_LEAF
296 | ECF_NOVOPS)));
297 }
298 streamer_write_bitpack (&bp);
299 }
300
301 /* Return if LIST contain references from other partitions. */
302
303 bool
304 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
305 {
306 int i;
307 struct ipa_ref *ref;
308 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
309 {
310 if (ref->referring->symbol.in_other_partition
311 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
312 return true;
313 }
314 return false;
315 }
316
317 /* Return true when node is reachable from other partition. */
318
319 bool
320 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
321 {
322 struct cgraph_edge *e;
323 if (!node->symbol.definition)
324 return false;
325 if (node->global.inlined_to)
326 return false;
327 for (e = node->callers; e; e = e->next_caller)
328 if (e->caller->symbol.in_other_partition
329 || !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
330 return true;
331 return false;
332 }
333
334 /* Return if LIST contain references from other partitions. */
335
336 bool
337 referenced_from_this_partition_p (struct ipa_ref_list *list,
338 lto_symtab_encoder_t encoder)
339 {
340 int i;
341 struct ipa_ref *ref;
342 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
343 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
344 return true;
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
352 {
353 struct cgraph_edge *e;
354 for (e = node->callers; e; e = e->next_caller)
355 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
356 return true;
357 return false;
358 }
359
360 /* Output the cgraph NODE to OB. ENCODER is used to find the
361 reference number of NODE->inlined_to. SET is the set of nodes we
362 are writing to the current file. If NODE is not in SET, then NODE
363 is a boundary of a cgraph_node_set and we pretend NODE just has a
364 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
365 that have had their callgraph node written so far. This is used to
366 determine if NODE is a clone of a previously written node. */
367
368 static void
369 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
370 lto_symtab_encoder_t encoder)
371 {
372 unsigned int tag;
373 struct bitpack_d bp;
374 bool boundary_p;
375 intptr_t ref;
376 bool in_other_partition = false;
377 struct cgraph_node *clone_of;
378 struct ipa_opt_pass_d *pass;
379 int i;
380 bool alias_p;
381
382 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
383
384 if (node->symbol.analyzed && !boundary_p)
385 tag = LTO_symtab_analyzed_node;
386 else
387 tag = LTO_symtab_unavail_node;
388
389 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
390 tag);
391 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
392
393 /* In WPA mode, we only output part of the call-graph. Also, we
394 fake cgraph node attributes. There are two cases that we care.
395
396 Boundary nodes: There are nodes that are not part of SET but are
397 called from within SET. We artificially make them look like
398 externally visible nodes with no function body.
399
400 Cherry-picked nodes: These are nodes we pulled from other
401 translation units into SET during IPA-inlining. We make them as
402 local static nodes to prevent clashes with other local statics. */
403 if (boundary_p && node->symbol.analyzed && !DECL_EXTERNAL (node->symbol.decl))
404 {
405 /* Inline clones can not be part of boundary.
406 gcc_assert (!node->global.inlined_to);
407
408 FIXME: At the moment they can be, when partition contains an inline
409 clone that is clone of inline clone from outside partition. We can
410 reshape the clone tree and make other tree to be the root, but it
411 needs a bit extra work and will be promplty done by cgraph_remove_node
412 after reading back. */
413 in_other_partition = 1;
414 }
415
416 clone_of = node->clone_of;
417 while (clone_of
418 && (ref = lto_symtab_encoder_lookup (encoder, (symtab_node)clone_of)) == LCC_NOT_FOUND)
419 if (clone_of->prev_sibling_clone)
420 clone_of = clone_of->prev_sibling_clone;
421 else
422 clone_of = clone_of->clone_of;
423
424 if (LTO_symtab_analyzed_node)
425 gcc_assert (clone_of || !node->clone_of);
426 if (!clone_of)
427 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
428 else
429 streamer_write_hwi_stream (ob->main_stream, ref);
430
431
432 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
433 streamer_write_gcov_count_stream (ob->main_stream, node->count);
434 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
435
436 streamer_write_hwi_stream (ob->main_stream,
437 node->ipa_transforms_to_apply.length ());
438 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
439 streamer_write_hwi_stream (ob->main_stream, pass->pass.static_pass_number);
440
441 if (tag == LTO_symtab_analyzed_node)
442 {
443 if (node->global.inlined_to)
444 {
445 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)node->global.inlined_to);
446 gcc_assert (ref != LCC_NOT_FOUND);
447 }
448 else
449 ref = LCC_NOT_FOUND;
450
451 streamer_write_hwi_stream (ob->main_stream, ref);
452 }
453
454 if (node->symbol.same_comdat_group && !boundary_p)
455 {
456 ref = lto_symtab_encoder_lookup (encoder,
457 node->symbol.same_comdat_group);
458 gcc_assert (ref != LCC_NOT_FOUND);
459 }
460 else
461 ref = LCC_NOT_FOUND;
462 streamer_write_hwi_stream (ob->main_stream, ref);
463
464 bp = bitpack_create (ob->main_stream);
465 bp_pack_value (&bp, node->local.local, 1);
466 bp_pack_value (&bp, node->symbol.externally_visible, 1);
467 bp_pack_value (&bp, node->symbol.definition, 1);
468 bp_pack_value (&bp, node->local.versionable, 1);
469 bp_pack_value (&bp, node->local.can_change_signature, 1);
470 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
471 bp_pack_value (&bp, node->symbol.force_output, 1);
472 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
473 bp_pack_value (&bp, node->symbol.unique_name, 1);
474 bp_pack_value (&bp, node->symbol.address_taken, 1);
475 bp_pack_value (&bp, node->abstract_and_needed, 1);
476 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
477 && !DECL_EXTERNAL (node->symbol.decl)
478 && !DECL_COMDAT (node->symbol.decl)
479 && (reachable_from_other_partition_p (node, encoder)
480 || referenced_from_other_partition_p (&node->symbol.ref_list,
481 encoder)), 1);
482 bp_pack_value (&bp, node->lowered, 1);
483 bp_pack_value (&bp, in_other_partition, 1);
484 /* Real aliases in a boundary become non-aliases. However we still stream
485 alias info on weakrefs.
486 TODO: We lose a bit of information here - when we know that variable is
487 defined in other unit, we may use the info on aliases to resolve
488 symbol1 != symbol2 type tests that we can do only for locally defined objects
489 otherwise. */
490 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
491 bp_pack_value (&bp, alias_p, 1);
492 bp_pack_value (&bp, node->symbol.weakref, 1);
493 bp_pack_value (&bp, node->frequency, 2);
494 bp_pack_value (&bp, node->only_called_at_startup, 1);
495 bp_pack_value (&bp, node->only_called_at_exit, 1);
496 bp_pack_value (&bp, node->tm_clone, 1);
497 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
498 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
499 LDPR_NUM_KNOWN, node->symbol.resolution);
500 streamer_write_bitpack (&bp);
501
502 if (node->thunk.thunk_p && !boundary_p)
503 {
504 streamer_write_uhwi_stream
505 (ob->main_stream,
506 1 + (node->thunk.this_adjusting != 0) * 2
507 + (node->thunk.virtual_offset_p != 0) * 4);
508 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
509 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
510 }
511 }
512
513 /* Output the varpool NODE to OB.
514 If NODE is not in SET, then NODE is a boundary. */
515
516 static void
517 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
518 lto_symtab_encoder_t encoder)
519 {
520 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
521 struct bitpack_d bp;
522 int ref;
523 bool alias_p;
524
525 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
526 LTO_symtab_variable);
527 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
528 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
529 bp = bitpack_create (ob->main_stream);
530 bp_pack_value (&bp, node->symbol.externally_visible, 1);
531 bp_pack_value (&bp, node->symbol.force_output, 1);
532 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
533 bp_pack_value (&bp, node->symbol.unique_name, 1);
534 bp_pack_value (&bp, node->symbol.definition, 1);
535 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
536 bp_pack_value (&bp, alias_p, 1);
537 bp_pack_value (&bp, node->symbol.weakref, 1);
538 bp_pack_value (&bp, node->symbol.analyzed && !boundary_p, 1);
539 gcc_assert (node->symbol.definition || !node->symbol.analyzed);
540 /* Constant pool initializers can be de-unified into individual ltrans units.
541 FIXME: Alternatively at -Os we may want to avoid generating for them the local
542 labels and share them across LTRANS partitions. */
543 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
544 && !DECL_EXTERNAL (node->symbol.decl)
545 && !DECL_COMDAT (node->symbol.decl))
546 {
547 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
548 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
549 }
550 else
551 {
552 bp_pack_value (&bp, node->symbol.definition
553 && referenced_from_other_partition_p (&node->symbol.ref_list,
554 encoder), 1);
555 bp_pack_value (&bp, node->symbol.analyzed
556 && boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
557 /* in_other_partition. */
558 }
559 streamer_write_bitpack (&bp);
560 if (node->symbol.same_comdat_group && !boundary_p)
561 {
562 ref = lto_symtab_encoder_lookup (encoder,
563 node->symbol.same_comdat_group);
564 gcc_assert (ref != LCC_NOT_FOUND);
565 }
566 else
567 ref = LCC_NOT_FOUND;
568 streamer_write_hwi_stream (ob->main_stream, ref);
569 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
570 LDPR_NUM_KNOWN, node->symbol.resolution);
571 }
572
573 /* Output the varpool NODE to OB.
574 If NODE is not in SET, then NODE is a boundary. */
575
576 static void
577 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
578 lto_symtab_encoder_t encoder)
579 {
580 struct bitpack_d bp;
581 int nref;
582
583 bp = bitpack_create (ob->main_stream);
584 bp_pack_value (&bp, ref->use, 2);
585 streamer_write_bitpack (&bp);
586 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
587 gcc_assert (nref != LCC_NOT_FOUND);
588 streamer_write_hwi_stream (ob->main_stream, nref);
589 }
590
591 /* Stream out profile_summary to OB. */
592
593 static void
594 output_profile_summary (struct lto_simple_output_block *ob)
595 {
596 unsigned h_ix;
597 struct bitpack_d bp;
598
599 if (profile_info)
600 {
601 /* We do not output num and run_max, they are not used by
602 GCC profile feedback and they are difficult to merge from multiple
603 units. */
604 gcc_assert (profile_info->runs);
605 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
606 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
607
608 /* sum_all is needed for computing the working set with the
609 histogram. */
610 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
611
612 /* Create and output a bitpack of non-zero histogram entries indices. */
613 bp = bitpack_create (ob->main_stream);
614 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
615 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
616 streamer_write_bitpack (&bp);
617 /* Now stream out only those non-zero entries. */
618 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
619 {
620 if (!profile_info->histogram[h_ix].num_counters)
621 continue;
622 streamer_write_gcov_count_stream (ob->main_stream,
623 profile_info->histogram[h_ix].num_counters);
624 streamer_write_gcov_count_stream (ob->main_stream,
625 profile_info->histogram[h_ix].min_value);
626 streamer_write_gcov_count_stream (ob->main_stream,
627 profile_info->histogram[h_ix].cum_value);
628 }
629 /* IPA-profile computes hot bb threshold based on cumulated
630 whole program profile. We need to stream it down to ltrans. */
631 if (flag_wpa)
632 streamer_write_gcov_count_stream (ob->main_stream,
633 get_hot_bb_threshold ());
634 }
635 else
636 streamer_write_uhwi_stream (ob->main_stream, 0);
637 }
638
639 /* Output all callees or indirect outgoing edges. EDGE must be the first such
640 edge. */
641
642 static void
643 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
644 struct lto_simple_output_block *ob,
645 lto_symtab_encoder_t encoder)
646 {
647 if (!edge)
648 return;
649
650 /* Output edges in backward direction, so the reconstructed callgraph match
651 and it is easy to associate call sites in the IPA pass summaries. */
652 while (edge->next_callee)
653 edge = edge->next_callee;
654 for (; edge; edge = edge->prev_callee)
655 lto_output_edge (ob, edge, encoder);
656 }
657
658 /* Output the part of the cgraph in SET. */
659
660 static void
661 output_refs (lto_symtab_encoder_t encoder)
662 {
663 lto_symtab_encoder_iterator lsei;
664 struct lto_simple_output_block *ob;
665 int count;
666 struct ipa_ref *ref;
667 int i;
668
669 ob = lto_create_simple_output_block (LTO_section_refs);
670
671 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
672 lsei_next_in_partition (&lsei))
673 {
674 symtab_node node = lsei_node (lsei);
675
676 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
677 if (count)
678 {
679 streamer_write_gcov_count_stream (ob->main_stream, count);
680 streamer_write_uhwi_stream (ob->main_stream,
681 lto_symtab_encoder_lookup (encoder, node));
682 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
683 i, ref); i++)
684 lto_output_ref (ob, ref, encoder);
685 }
686 }
687
688 streamer_write_uhwi_stream (ob->main_stream, 0);
689
690 lto_destroy_simple_output_block (ob);
691 }
692
693 /* Add NODE into encoder as well as nodes it is cloned from.
694 Do it in a way so clones appear first. */
695
696 static void
697 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
698 bool include_body)
699 {
700 if (node->clone_of)
701 add_node_to (encoder, node->clone_of, include_body);
702 else if (include_body)
703 lto_set_symtab_encoder_encode_body (encoder, node);
704 lto_symtab_encoder_encode (encoder, (symtab_node)node);
705 }
706
707 /* Add all references in LIST to encoders. */
708
709 static void
710 add_references (lto_symtab_encoder_t encoder,
711 struct ipa_ref_list *list)
712 {
713 int i;
714 struct ipa_ref *ref;
715 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
716 if (is_a <cgraph_node> (ref->referred))
717 add_node_to (encoder, ipa_ref_node (ref), false);
718 else
719 lto_symtab_encoder_encode (encoder, ref->referred);
720 }
721
722 /* Find all symbols we want to stream into given partition and insert them
723 to encoders.
724
725 The function actually replaces IN_ENCODER by new one. The reason is that
726 streaming code needs clone's origin to be streamed before clone. This
727 means that we need to insert the nodes in specific order. This order is
728 ignored by the partitioning logic earlier. */
729
730 lto_symtab_encoder_t
731 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
732 {
733 struct cgraph_node *node;
734 struct cgraph_edge *edge;
735 int i;
736 lto_symtab_encoder_t encoder;
737 lto_symtab_encoder_iterator lsei;
738
739 encoder = lto_symtab_encoder_new (false);
740
741 /* Go over all entries in the IN_ENCODER and duplicate them to
742 ENCODER. At the same time insert masters of clones so
743 every master appears before clone. */
744 for (lsei = lsei_start_function_in_partition (in_encoder);
745 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
746 {
747 node = lsei_cgraph_node (lsei);
748 add_node_to (encoder, node, true);
749 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)node);
750 add_references (encoder, &node->symbol.ref_list);
751 }
752 for (lsei = lsei_start_variable_in_partition (in_encoder);
753 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
754 {
755 struct varpool_node *vnode = lsei_varpool_node (lsei);
756
757 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)vnode);
758 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
759 add_references (encoder, &vnode->symbol.ref_list);
760 }
761 /* Pickle in also the initializer of all referenced readonly variables
762 to help folding. Constant pool variables are not shared, so we must
763 pickle those too. */
764 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
765 {
766 symtab_node node = lto_symtab_encoder_deref (encoder, i);
767 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
768 {
769 if (DECL_INITIAL (vnode->symbol.decl)
770 && !lto_symtab_encoder_encode_initializer_p (encoder,
771 vnode)
772 && const_value_known_p (vnode->symbol.decl))
773 {
774 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
775 add_references (encoder, &vnode->symbol.ref_list);
776 }
777 }
778 }
779
780 /* Go over all the nodes again to include callees that are not in
781 SET. */
782 for (lsei = lsei_start_function_in_partition (encoder);
783 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
784 {
785 node = lsei_cgraph_node (lsei);
786 for (edge = node->callees; edge; edge = edge->next_callee)
787 {
788 struct cgraph_node *callee = edge->callee;
789 if (!lto_symtab_encoder_in_partition_p (encoder, (symtab_node)callee))
790 {
791 /* We should have moved all the inlines. */
792 gcc_assert (!callee->global.inlined_to);
793 add_node_to (encoder, callee, false);
794 }
795 }
796 }
797 lto_symtab_encoder_delete (in_encoder);
798 return encoder;
799 }
800
801 /* Output the part of the symtab in SET and VSET. */
802
803 void
804 output_symtab (void)
805 {
806 struct cgraph_node *node;
807 struct lto_simple_output_block *ob;
808 lto_symtab_encoder_iterator lsei;
809 int i, n_nodes;
810 lto_symtab_encoder_t encoder;
811 static bool asm_nodes_output = false;
812
813 if (flag_wpa)
814 output_cgraph_opt_summary ();
815
816 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
817
818 output_profile_summary (ob);
819
820 /* An encoder for cgraph nodes should have been created by
821 ipa_write_summaries_1. */
822 gcc_assert (ob->decl_state->symtab_node_encoder);
823 encoder = ob->decl_state->symtab_node_encoder;
824
825 /* Write out the nodes. We must first output a node and then its clones,
826 otherwise at a time reading back the node there would be nothing to clone
827 from. */
828 n_nodes = lto_symtab_encoder_size (encoder);
829 for (i = 0; i < n_nodes; i++)
830 {
831 symtab_node node = lto_symtab_encoder_deref (encoder, i);
832 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
833 lto_output_node (ob, cnode, encoder);
834 else
835 lto_output_varpool_node (ob, varpool (node), encoder);
836
837 }
838
839 /* Go over the nodes in SET again to write edges. */
840 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
841 lsei_next_function_in_partition (&lsei))
842 {
843 node = lsei_cgraph_node (lsei);
844 output_outgoing_cgraph_edges (node->callees, ob, encoder);
845 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
846 }
847
848 streamer_write_uhwi_stream (ob->main_stream, 0);
849
850 lto_destroy_simple_output_block (ob);
851
852 /* Emit toplevel asms.
853 When doing WPA we must output every asm just once. Since we do not partition asm
854 nodes at all, output them to first output. This is kind of hack, but should work
855 well. */
856 if (!asm_nodes_output)
857 {
858 asm_nodes_output = true;
859 lto_output_toplevel_asms ();
860 }
861
862 output_refs (encoder);
863 }
864
865 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
866 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
867 NODE or to replace the values in it, for instance because the first
868 time we saw it, the function body was not available but now it
869 is. BP is a bitpack with all the bitflags for NODE read from the
870 stream. */
871
872 static void
873 input_overwrite_node (struct lto_file_decl_data *file_data,
874 struct cgraph_node *node,
875 enum LTO_symtab_tags tag,
876 struct bitpack_d *bp)
877 {
878 node->symbol.aux = (void *) tag;
879 node->symbol.lto_file_data = file_data;
880
881 node->local.local = bp_unpack_value (bp, 1);
882 node->symbol.externally_visible = bp_unpack_value (bp, 1);
883 node->symbol.definition = bp_unpack_value (bp, 1);
884 node->local.versionable = bp_unpack_value (bp, 1);
885 node->local.can_change_signature = bp_unpack_value (bp, 1);
886 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
887 node->symbol.force_output = bp_unpack_value (bp, 1);
888 node->symbol.forced_by_abi = bp_unpack_value (bp, 1);
889 node->symbol.unique_name = bp_unpack_value (bp, 1);
890 node->symbol.address_taken = bp_unpack_value (bp, 1);
891 node->abstract_and_needed = bp_unpack_value (bp, 1);
892 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
893 node->lowered = bp_unpack_value (bp, 1);
894 node->symbol.analyzed = tag == LTO_symtab_analyzed_node;
895 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
896 if (node->symbol.in_other_partition
897 /* Avoid updating decl when we are seeing just inline clone.
898 When inlining function that has functions already inlined into it,
899 we produce clones of inline clones.
900
901 WPA partitioning might put each clone into different unit and
902 we might end up streaming inline clone from other partition
903 to support clone we are interested in. */
904 && (!node->clone_of
905 || node->clone_of->symbol.decl != node->symbol.decl))
906 {
907 DECL_EXTERNAL (node->symbol.decl) = 1;
908 TREE_STATIC (node->symbol.decl) = 0;
909 }
910 node->symbol.alias = bp_unpack_value (bp, 1);
911 node->symbol.weakref = bp_unpack_value (bp, 1);
912 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
913 node->only_called_at_startup = bp_unpack_value (bp, 1);
914 node->only_called_at_exit = bp_unpack_value (bp, 1);
915 node->tm_clone = bp_unpack_value (bp, 1);
916 node->thunk.thunk_p = bp_unpack_value (bp, 1);
917 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
918 LDPR_NUM_KNOWN);
919 }
920
921 /* Return string alias is alias of. */
922
923 static tree
924 get_alias_symbol (tree decl)
925 {
926 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
927 return get_identifier (TREE_STRING_POINTER
928 (TREE_VALUE (TREE_VALUE (alias))));
929 }
930
931 /* Read a node from input_block IB. TAG is the node's tag just read.
932 Return the node read or overwriten. */
933
934 static struct cgraph_node *
935 input_node (struct lto_file_decl_data *file_data,
936 struct lto_input_block *ib,
937 enum LTO_symtab_tags tag,
938 vec<symtab_node> nodes)
939 {
940 tree fn_decl;
941 struct cgraph_node *node;
942 struct bitpack_d bp;
943 unsigned decl_index;
944 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
945 int clone_ref;
946 int order;
947 int i, count;
948
949 order = streamer_read_hwi (ib) + order_base;
950 clone_ref = streamer_read_hwi (ib);
951
952 decl_index = streamer_read_uhwi (ib);
953 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
954
955 if (clone_ref != LCC_NOT_FOUND)
956 {
957 node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
958 0, CGRAPH_FREQ_BASE, false,
959 vNULL, false);
960 }
961 else
962 {
963 /* Declaration of functions can be already merged with a declaration
964 from other input file. We keep cgraph unmerged until after streaming
965 of ipa passes is done. Alays forcingly create a fresh node. */
966 node = cgraph_create_empty_node ();
967 node->symbol.decl = fn_decl;
968 symtab_register_node ((symtab_node)node);
969 }
970
971 node->symbol.order = order;
972 if (order >= symtab_order)
973 symtab_order = order + 1;
974
975 node->count = streamer_read_gcov_count (ib);
976 node->count_materialization_scale = streamer_read_hwi (ib);
977
978 count = streamer_read_hwi (ib);
979 node->ipa_transforms_to_apply = vNULL;
980 for (i = 0; i < count; i++)
981 {
982 struct opt_pass *pass;
983 int pid = streamer_read_hwi (ib);
984
985 gcc_assert (pid < passes_by_id_size);
986 pass = passes_by_id[pid];
987 node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
988 }
989
990 if (tag == LTO_symtab_analyzed_node)
991 ref = streamer_read_hwi (ib);
992
993 ref2 = streamer_read_hwi (ib);
994
995 /* Make sure that we have not read this node before. Nodes that
996 have already been read will have their tag stored in the 'aux'
997 field. Since built-in functions can be referenced in multiple
998 functions, they are expected to be read more than once. */
999 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1000 internal_error ("bytecode stream: found multiple instances of cgraph "
1001 "node with uid %d", node->uid);
1002
1003 bp = streamer_read_bitpack (ib);
1004 input_overwrite_node (file_data, node, tag, &bp);
1005
1006 /* Store a reference for now, and fix up later to be a pointer. */
1007 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1008
1009 /* Store a reference for now, and fix up later to be a pointer. */
1010 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1011
1012 if (node->thunk.thunk_p)
1013 {
1014 int type = streamer_read_uhwi (ib);
1015 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1016 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1017
1018 node->thunk.fixed_offset = fixed_offset;
1019 node->thunk.this_adjusting = (type & 2);
1020 node->thunk.virtual_value = virtual_value;
1021 node->thunk.virtual_offset_p = (type & 4);
1022 }
1023 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1024 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1025 return node;
1026 }
1027
1028 /* Read a node from input_block IB. TAG is the node's tag just read.
1029 Return the node read or overwriten. */
1030
1031 static struct varpool_node *
1032 input_varpool_node (struct lto_file_decl_data *file_data,
1033 struct lto_input_block *ib)
1034 {
1035 int decl_index;
1036 tree var_decl;
1037 struct varpool_node *node;
1038 struct bitpack_d bp;
1039 int ref = LCC_NOT_FOUND;
1040 int order;
1041
1042 order = streamer_read_hwi (ib) + order_base;
1043 decl_index = streamer_read_uhwi (ib);
1044 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1045
1046 /* Declaration of functions can be already merged with a declaration
1047 from other input file. We keep cgraph unmerged until after streaming
1048 of ipa passes is done. Alays forcingly create a fresh node. */
1049 node = varpool_create_empty_node ();
1050 node->symbol.decl = var_decl;
1051 symtab_register_node ((symtab_node)node);
1052
1053 node->symbol.order = order;
1054 if (order >= symtab_order)
1055 symtab_order = order + 1;
1056 node->symbol.lto_file_data = file_data;
1057
1058 bp = streamer_read_bitpack (ib);
1059 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1060 node->symbol.force_output = bp_unpack_value (&bp, 1);
1061 node->symbol.forced_by_abi = bp_unpack_value (&bp, 1);
1062 node->symbol.unique_name = bp_unpack_value (&bp, 1);
1063 node->symbol.definition = bp_unpack_value (&bp, 1);
1064 node->symbol.alias = bp_unpack_value (&bp, 1);
1065 node->symbol.weakref = bp_unpack_value (&bp, 1);
1066 node->symbol.analyzed = bp_unpack_value (&bp, 1);
1067 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1068 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1069 if (node->symbol.in_other_partition)
1070 {
1071 DECL_EXTERNAL (node->symbol.decl) = 1;
1072 TREE_STATIC (node->symbol.decl) = 0;
1073 }
1074 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1075 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1076 ref = streamer_read_hwi (ib);
1077 /* Store a reference for now, and fix up later to be a pointer. */
1078 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1079 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1080 LDPR_NUM_KNOWN);
1081
1082 return node;
1083 }
1084
1085 /* Read a node from input_block IB. TAG is the node's tag just read.
1086 Return the node read or overwriten. */
1087
1088 static void
1089 input_ref (struct lto_input_block *ib,
1090 symtab_node referring_node,
1091 vec<symtab_node> nodes)
1092 {
1093 symtab_node node = NULL;
1094 struct bitpack_d bp;
1095 enum ipa_ref_use use;
1096
1097 bp = streamer_read_bitpack (ib);
1098 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1099 node = nodes[streamer_read_hwi (ib)];
1100 ipa_record_reference (referring_node, node, use, NULL);
1101 }
1102
1103 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1104 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1105 edge being read is indirect (in the sense that it has
1106 indirect_unknown_callee set). */
1107
1108 static void
1109 input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
1110 bool indirect)
1111 {
1112 struct cgraph_node *caller, *callee;
1113 struct cgraph_edge *edge;
1114 unsigned int stmt_id;
1115 gcov_type count;
1116 int freq;
1117 cgraph_inline_failed_t inline_failed;
1118 struct bitpack_d bp;
1119 int ecf_flags = 0;
1120
1121 caller = cgraph (nodes[streamer_read_hwi (ib)]);
1122 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1123 internal_error ("bytecode stream: no caller found while reading edge");
1124
1125 if (!indirect)
1126 {
1127 callee = cgraph (nodes[streamer_read_hwi (ib)]);
1128 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1129 internal_error ("bytecode stream: no callee found while reading edge");
1130 }
1131 else
1132 callee = NULL;
1133
1134 count = streamer_read_gcov_count (ib);
1135
1136 bp = streamer_read_bitpack (ib);
1137 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1138 stmt_id = bp_unpack_var_len_unsigned (&bp);
1139 freq = (int) bp_unpack_var_len_unsigned (&bp);
1140
1141 if (indirect)
1142 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1143 else
1144 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1145
1146 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1147 edge->lto_stmt_uid = stmt_id;
1148 edge->inline_failed = inline_failed;
1149 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1150 edge->can_throw_external = bp_unpack_value (&bp, 1);
1151 if (indirect)
1152 {
1153 if (bp_unpack_value (&bp, 1))
1154 ecf_flags |= ECF_CONST;
1155 if (bp_unpack_value (&bp, 1))
1156 ecf_flags |= ECF_PURE;
1157 if (bp_unpack_value (&bp, 1))
1158 ecf_flags |= ECF_NORETURN;
1159 if (bp_unpack_value (&bp, 1))
1160 ecf_flags |= ECF_MALLOC;
1161 if (bp_unpack_value (&bp, 1))
1162 ecf_flags |= ECF_NOTHROW;
1163 if (bp_unpack_value (&bp, 1))
1164 ecf_flags |= ECF_RETURNS_TWICE;
1165 edge->indirect_info->ecf_flags = ecf_flags;
1166 }
1167 }
1168
1169
1170 /* Read a cgraph from IB using the info in FILE_DATA. */
1171
1172 static vec<symtab_node>
1173 input_cgraph_1 (struct lto_file_decl_data *file_data,
1174 struct lto_input_block *ib)
1175 {
1176 enum LTO_symtab_tags tag;
1177 vec<symtab_node> nodes = vNULL;
1178 symtab_node node;
1179 unsigned i;
1180
1181 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1182 order_base = symtab_order;
1183 while (tag)
1184 {
1185 if (tag == LTO_symtab_edge)
1186 input_edge (ib, nodes, false);
1187 else if (tag == LTO_symtab_indirect_edge)
1188 input_edge (ib, nodes, true);
1189 else if (tag == LTO_symtab_variable)
1190 {
1191 node = (symtab_node)input_varpool_node (file_data, ib);
1192 nodes.safe_push (node);
1193 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1194 }
1195 else
1196 {
1197 node = (symtab_node)input_node (file_data, ib, tag, nodes);
1198 if (node == NULL || node->symbol.decl == NULL_TREE)
1199 internal_error ("bytecode stream: found empty cgraph node");
1200 nodes.safe_push (node);
1201 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1202 }
1203
1204 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1205 }
1206
1207 lto_input_toplevel_asms (file_data, order_base);
1208
1209 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1210 #ifdef ENABLE_CHECKING
1211 FOR_EACH_VEC_ELT (nodes, i, node)
1212 gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
1213 #endif
1214 FOR_EACH_VEC_ELT (nodes, i, node)
1215 {
1216 int ref;
1217 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1218 {
1219 ref = (int) (intptr_t) cnode->global.inlined_to;
1220
1221 /* We share declaration of builtins, so we may read same node twice. */
1222 if (!node->symbol.aux)
1223 continue;
1224 node->symbol.aux = NULL;
1225
1226 /* Fixup inlined_to from reference to pointer. */
1227 if (ref != LCC_NOT_FOUND)
1228 cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1229 else
1230 cnode->global.inlined_to = NULL;
1231 }
1232
1233 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1234
1235 /* Fixup same_comdat_group from reference to pointer. */
1236 if (ref != LCC_NOT_FOUND)
1237 node->symbol.same_comdat_group = nodes[ref];
1238 else
1239 node->symbol.same_comdat_group = NULL;
1240 }
1241 FOR_EACH_VEC_ELT (nodes, i, node)
1242 node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1243 return nodes;
1244 }
1245
1246 /* Input ipa_refs. */
1247
1248 static void
1249 input_refs (struct lto_input_block *ib,
1250 vec<symtab_node> nodes)
1251 {
1252 int count;
1253 int idx;
1254 while (true)
1255 {
1256 symtab_node node;
1257 count = streamer_read_uhwi (ib);
1258 if (!count)
1259 break;
1260 idx = streamer_read_uhwi (ib);
1261 node = nodes[idx];
1262 while (count)
1263 {
1264 input_ref (ib, node, nodes);
1265 count--;
1266 }
1267 }
1268 }
1269
1270
1271 static struct gcov_ctr_summary lto_gcov_summary;
1272
1273 /* Input profile_info from IB. */
1274 static void
1275 input_profile_summary (struct lto_input_block *ib,
1276 struct lto_file_decl_data *file_data)
1277 {
1278 unsigned h_ix;
1279 struct bitpack_d bp;
1280 unsigned int runs = streamer_read_uhwi (ib);
1281 if (runs)
1282 {
1283 file_data->profile_info.runs = runs;
1284 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1285 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1286
1287 memset (file_data->profile_info.histogram, 0,
1288 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1289 /* Input the bitpack of non-zero histogram indices. */
1290 bp = streamer_read_bitpack (ib);
1291 /* Read in and unpack the full bitpack, flagging non-zero
1292 histogram entries by setting the num_counters non-zero. */
1293 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1294 {
1295 file_data->profile_info.histogram[h_ix].num_counters
1296 = bp_unpack_value (&bp, 1);
1297 }
1298 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1299 {
1300 if (!file_data->profile_info.histogram[h_ix].num_counters)
1301 continue;
1302
1303 file_data->profile_info.histogram[h_ix].num_counters
1304 = streamer_read_gcov_count (ib);
1305 file_data->profile_info.histogram[h_ix].min_value
1306 = streamer_read_gcov_count (ib);
1307 file_data->profile_info.histogram[h_ix].cum_value
1308 = streamer_read_gcov_count (ib);
1309 }
1310 /* IPA-profile computes hot bb threshold based on cumulated
1311 whole program profile. We need to stream it down to ltrans. */
1312 if (flag_ltrans)
1313 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1314 }
1315
1316 }
1317
1318 /* Rescale profile summaries to the same number of runs in the whole unit. */
1319
1320 static void
1321 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1322 {
1323 struct lto_file_decl_data *file_data;
1324 unsigned int j, h_ix;
1325 gcov_unsigned_t max_runs = 0;
1326 struct cgraph_node *node;
1327 struct cgraph_edge *edge;
1328 gcov_type saved_sum_all = 0;
1329 gcov_ctr_summary *saved_profile_info = 0;
1330 int saved_scale = 0;
1331
1332 /* Find unit with maximal number of runs. If we ever get serious about
1333 roundoff errors, we might also consider computing smallest common
1334 multiply. */
1335 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1336 if (max_runs < file_data->profile_info.runs)
1337 max_runs = file_data->profile_info.runs;
1338
1339 if (!max_runs)
1340 return;
1341
1342 /* Simple overflow check. We probably don't need to support that many train
1343 runs. Such a large value probably imply data corruption anyway. */
1344 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1345 {
1346 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1347 INT_MAX / REG_BR_PROB_BASE);
1348 return;
1349 }
1350
1351 profile_info = &lto_gcov_summary;
1352 lto_gcov_summary.runs = max_runs;
1353 lto_gcov_summary.sum_max = 0;
1354 memset (lto_gcov_summary.histogram, 0,
1355 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1356
1357 /* Rescale all units to the maximal number of runs.
1358 sum_max can not be easily merged, as we have no idea what files come from
1359 the same run. We do not use the info anyway, so leave it 0. */
1360 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1361 if (file_data->profile_info.runs)
1362 {
1363 int scale = GCOV_COMPUTE_SCALE (max_runs,
1364 file_data->profile_info.runs);
1365 lto_gcov_summary.sum_max
1366 = MAX (lto_gcov_summary.sum_max,
1367 apply_scale (file_data->profile_info.sum_max, scale));
1368 lto_gcov_summary.sum_all
1369 = MAX (lto_gcov_summary.sum_all,
1370 apply_scale (file_data->profile_info.sum_all, scale));
1371 /* Save a pointer to the profile_info with the largest
1372 scaled sum_all and the scale for use in merging the
1373 histogram. */
1374 if (!saved_profile_info
1375 || lto_gcov_summary.sum_all > saved_sum_all)
1376 {
1377 saved_profile_info = &file_data->profile_info;
1378 saved_sum_all = lto_gcov_summary.sum_all;
1379 saved_scale = scale;
1380 }
1381 }
1382
1383 gcc_assert (saved_profile_info);
1384
1385 /* Scale up the histogram from the profile that had the largest
1386 scaled sum_all above. */
1387 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1388 {
1389 /* Scale up the min value as we did the corresponding sum_all
1390 above. Use that to find the new histogram index. */
1391 gcov_type scaled_min
1392 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1393 saved_scale);
1394 /* The new index may be shared with another scaled histogram entry,
1395 so we need to account for a non-zero histogram entry at new_ix. */
1396 unsigned new_ix = gcov_histo_index (scaled_min);
1397 lto_gcov_summary.histogram[new_ix].min_value
1398 = (lto_gcov_summary.histogram[new_ix].num_counters
1399 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1400 : scaled_min);
1401 /* Some of the scaled counter values would ostensibly need to be placed
1402 into different (larger) histogram buckets, but we keep things simple
1403 here and place the scaled cumulative counter value in the bucket
1404 corresponding to the scaled minimum counter value. */
1405 lto_gcov_summary.histogram[new_ix].cum_value
1406 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1407 saved_scale);
1408 lto_gcov_summary.histogram[new_ix].num_counters
1409 += saved_profile_info->histogram[h_ix].num_counters;
1410 }
1411
1412 /* Watch roundoff errors. */
1413 if (lto_gcov_summary.sum_max < max_runs)
1414 lto_gcov_summary.sum_max = max_runs;
1415
1416 /* If merging already happent at WPA time, we are done. */
1417 if (flag_ltrans)
1418 return;
1419
1420 /* Now compute count_materialization_scale of each node.
1421 During LTRANS we already have values of count_materialization_scale
1422 computed, so just update them. */
1423 FOR_EACH_FUNCTION (node)
1424 if (node->symbol.lto_file_data
1425 && node->symbol.lto_file_data->profile_info.runs)
1426 {
1427 int scale;
1428
1429 scale = RDIV (node->count_materialization_scale * max_runs,
1430 node->symbol.lto_file_data->profile_info.runs);
1431 node->count_materialization_scale = scale;
1432 if (scale < 0)
1433 fatal_error ("Profile information in %s corrupted",
1434 file_data->file_name);
1435
1436 if (scale == REG_BR_PROB_BASE)
1437 continue;
1438 for (edge = node->callees; edge; edge = edge->next_callee)
1439 edge->count = apply_scale (edge->count, scale);
1440 node->count = apply_scale (node->count, scale);
1441 }
1442 }
1443
1444 /* Input and merge the symtab from each of the .o files passed to
1445 lto1. */
1446
1447 void
1448 input_symtab (void)
1449 {
1450 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1451 struct lto_file_decl_data *file_data;
1452 unsigned int j = 0;
1453 struct cgraph_node *node;
1454
1455 cgraph_state = CGRAPH_STATE_IPA_SSA;
1456
1457 while ((file_data = file_data_vec[j++]))
1458 {
1459 const char *data;
1460 size_t len;
1461 struct lto_input_block *ib;
1462 vec<symtab_node> nodes;
1463
1464 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1465 &data, &len);
1466 if (!ib)
1467 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1468 input_profile_summary (ib, file_data);
1469 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1470 nodes = input_cgraph_1 (file_data, ib);
1471 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1472 ib, data, len);
1473
1474 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1475 &data, &len);
1476 if (!ib)
1477 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1478 input_refs (ib, nodes);
1479 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1480 ib, data, len);
1481 if (flag_ltrans)
1482 input_cgraph_opt_summary (nodes);
1483 nodes.release ();
1484 }
1485
1486 merge_profile_summaries (file_data_vec);
1487 get_working_sets ();
1488
1489
1490 /* Clear out the aux field that was used to store enough state to
1491 tell which nodes should be overwritten. */
1492 FOR_EACH_FUNCTION (node)
1493 {
1494 /* Some nodes may have been created by cgraph_node. This
1495 happens when the callgraph contains nested functions. If the
1496 node for the parent function was never emitted to the gimple
1497 file, cgraph_node will create a node for it when setting the
1498 context of the nested function. */
1499 if (node->symbol.lto_file_data)
1500 node->symbol.aux = NULL;
1501 }
1502 }
1503
1504 /* True when we need optimization summary for NODE. */
1505
1506 static int
1507 output_cgraph_opt_summary_p (struct cgraph_node *node)
1508 {
1509 return (node->clone_of
1510 && (node->clone.tree_map
1511 || node->clone.args_to_skip
1512 || node->clone.combined_args_to_skip));
1513 }
1514
1515 /* Output optimization summary for EDGE to OB. */
1516 static void
1517 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1518 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1519 {
1520 }
1521
1522 /* Output optimization summary for NODE to OB. */
1523
1524 static void
1525 output_node_opt_summary (struct output_block *ob,
1526 struct cgraph_node *node,
1527 lto_symtab_encoder_t encoder)
1528 {
1529 unsigned int index;
1530 bitmap_iterator bi;
1531 struct ipa_replace_map *map;
1532 struct bitpack_d bp;
1533 int i;
1534 struct cgraph_edge *e;
1535
1536 if (node->clone.args_to_skip)
1537 {
1538 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1539 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1540 streamer_write_uhwi (ob, index);
1541 }
1542 else
1543 streamer_write_uhwi (ob, 0);
1544 if (node->clone.combined_args_to_skip)
1545 {
1546 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1547 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1548 streamer_write_uhwi (ob, index);
1549 }
1550 else
1551 streamer_write_uhwi (ob, 0);
1552 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1553 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1554 {
1555 int parm_num;
1556 tree parm;
1557
1558 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1559 parm = DECL_CHAIN (parm), parm_num++)
1560 if (map->old_tree == parm)
1561 break;
1562 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1563 mechanism to store function local declarations into summaries. */
1564 gcc_assert (parm);
1565 streamer_write_uhwi (ob, parm_num);
1566 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1567 stream_write_tree (ob, map->new_tree, true);
1568 bp = bitpack_create (ob->main_stream);
1569 bp_pack_value (&bp, map->replace_p, 1);
1570 bp_pack_value (&bp, map->ref_p, 1);
1571 streamer_write_bitpack (&bp);
1572 }
1573
1574 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node) node))
1575 {
1576 for (e = node->callees; e; e = e->next_callee)
1577 output_edge_opt_summary (ob, e);
1578 for (e = node->indirect_calls; e; e = e->next_callee)
1579 output_edge_opt_summary (ob, e);
1580 }
1581 }
1582
1583 /* Output optimization summaries stored in callgraph.
1584 At the moment it is the clone info structure. */
1585
1586 static void
1587 output_cgraph_opt_summary (void)
1588 {
1589 int i, n_nodes;
1590 lto_symtab_encoder_t encoder;
1591 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1592 unsigned count = 0;
1593
1594 ob->cgraph_node = NULL;
1595 encoder = ob->decl_state->symtab_node_encoder;
1596 n_nodes = lto_symtab_encoder_size (encoder);
1597 for (i = 0; i < n_nodes; i++)
1598 {
1599 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1600 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1601 if (cnode && output_cgraph_opt_summary_p (cnode))
1602 count++;
1603 }
1604 streamer_write_uhwi (ob, count);
1605 for (i = 0; i < n_nodes; i++)
1606 {
1607 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1608 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1609 if (cnode && output_cgraph_opt_summary_p (cnode))
1610 {
1611 streamer_write_uhwi (ob, i);
1612 output_node_opt_summary (ob, cnode, encoder);
1613 }
1614 }
1615 produce_asm (ob, NULL);
1616 destroy_output_block (ob);
1617 }
1618
1619 /* Input optimisation summary of EDGE. */
1620
1621 static void
1622 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1623 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1624 {
1625 }
1626
1627 /* Input optimisation summary of NODE. */
1628
1629 static void
1630 input_node_opt_summary (struct cgraph_node *node,
1631 struct lto_input_block *ib_main,
1632 struct data_in *data_in)
1633 {
1634 int i;
1635 int count;
1636 int bit;
1637 struct bitpack_d bp;
1638 struct cgraph_edge *e;
1639
1640 count = streamer_read_uhwi (ib_main);
1641 if (count)
1642 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1643 for (i = 0; i < count; i++)
1644 {
1645 bit = streamer_read_uhwi (ib_main);
1646 bitmap_set_bit (node->clone.args_to_skip, bit);
1647 }
1648 count = streamer_read_uhwi (ib_main);
1649 if (count)
1650 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1651 for (i = 0; i < count; i++)
1652 {
1653 bit = streamer_read_uhwi (ib_main);
1654 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1655 }
1656 count = streamer_read_uhwi (ib_main);
1657 for (i = 0; i < count; i++)
1658 {
1659 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1660
1661 vec_safe_push (node->clone.tree_map, map);
1662 map->parm_num = streamer_read_uhwi (ib_main);
1663 map->old_tree = NULL;
1664 map->new_tree = stream_read_tree (ib_main, data_in);
1665 bp = streamer_read_bitpack (ib_main);
1666 map->replace_p = bp_unpack_value (&bp, 1);
1667 map->ref_p = bp_unpack_value (&bp, 1);
1668 }
1669 for (e = node->callees; e; e = e->next_callee)
1670 input_edge_opt_summary (e, ib_main);
1671 for (e = node->indirect_calls; e; e = e->next_callee)
1672 input_edge_opt_summary (e, ib_main);
1673 }
1674
1675 /* Read section in file FILE_DATA of length LEN with data DATA. */
1676
1677 static void
1678 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1679 const char *data, size_t len,
1680 vec<symtab_node> nodes)
1681 {
1682 const struct lto_function_header *header =
1683 (const struct lto_function_header *) data;
1684 const int cfg_offset = sizeof (struct lto_function_header);
1685 const int main_offset = cfg_offset + header->cfg_size;
1686 const int string_offset = main_offset + header->main_size;
1687 struct data_in *data_in;
1688 struct lto_input_block ib_main;
1689 unsigned int i;
1690 unsigned int count;
1691
1692 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1693 header->main_size);
1694
1695 data_in =
1696 lto_data_in_create (file_data, (const char *) data + string_offset,
1697 header->string_size, vNULL);
1698 count = streamer_read_uhwi (&ib_main);
1699
1700 for (i = 0; i < count; i++)
1701 {
1702 int ref = streamer_read_uhwi (&ib_main);
1703 input_node_opt_summary (cgraph (nodes[ref]),
1704 &ib_main, data_in);
1705 }
1706 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1707 len);
1708 lto_data_in_delete (data_in);
1709 }
1710
1711 /* Input optimization summary of cgraph. */
1712
1713 static void
1714 input_cgraph_opt_summary (vec<symtab_node> nodes)
1715 {
1716 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1717 struct lto_file_decl_data *file_data;
1718 unsigned int j = 0;
1719
1720 while ((file_data = file_data_vec[j++]))
1721 {
1722 size_t len;
1723 const char *data =
1724 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1725 &len);
1726
1727 if (data)
1728 input_cgraph_opt_section (file_data, data, len, nodes);
1729 }
1730 }