* symtab.c (symtab_resolve_alias): Do not remove alias attribute.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "tree-streamer.h"
47 #include "gcov-io.h"
48 #include "tree-pass.h"
49 #include "profile.h"
50
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node> nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_symtab_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_symtab_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_symtab_analyzed_node,
70 /* Cgraph edges. */
71 LTO_symtab_edge,
72 LTO_symtab_indirect_edge,
73 LTO_symtab_variable,
74 LTO_symtab_last_tag
75 };
76
77 /* Create a new symtab encoder.
78 if FOR_INPUT, the encoder allocate only datastructures needed
79 to read the symtab. */
80
81 lto_symtab_encoder_t
82 lto_symtab_encoder_new (bool for_input)
83 {
84 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
85
86 if (!for_input)
87 encoder->map = pointer_map_create ();
88 encoder->nodes.create (0);
89 return encoder;
90 }
91
92
93 /* Delete ENCODER and its components. */
94
95 void
96 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
97 {
98 encoder->nodes.release ();
99 if (encoder->map)
100 pointer_map_destroy (encoder->map);
101 free (encoder);
102 }
103
104
105 /* Return the existing reference number of NODE in the symtab encoder in
106 output block OB. Assign a new reference if this is the first time
107 NODE is encoded. */
108
109 int
110 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
111 symtab_node node)
112 {
113 int ref;
114 void **slot;
115
116 if (!encoder->map)
117 {
118 lto_encoder_entry entry = {node, false, false, false};
119
120 ref = encoder->nodes.length ();
121 encoder->nodes.safe_push (entry);
122 return ref;
123 }
124
125 slot = pointer_map_contains (encoder->map, node);
126 if (!slot || !*slot)
127 {
128 lto_encoder_entry entry = {node, false, false, false};
129 ref = encoder->nodes.length ();
130 if (!slot)
131 slot = pointer_map_insert (encoder->map, node);
132 *slot = (void *) (intptr_t) (ref + 1);
133 encoder->nodes.safe_push (entry);
134 }
135 else
136 ref = (size_t) *slot - 1;
137
138 return ref;
139 }
140
141 /* Remove NODE from encoder. */
142
143 bool
144 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
145 symtab_node node)
146 {
147 void **slot, **last_slot;
148 int index;
149 lto_encoder_entry last_node;
150
151 slot = pointer_map_contains (encoder->map, node);
152 if (slot == NULL || !*slot)
153 return false;
154
155 index = (size_t) *slot - 1;
156 gcc_checking_assert (encoder->nodes[index].node == node);
157
158 /* Remove from vector. We do this by swapping node with the last element
159 of the vector. */
160 last_node = encoder->nodes.pop ();
161 if (last_node.node != node)
162 {
163 last_slot = pointer_map_contains (encoder->map, last_node.node);
164 gcc_checking_assert (last_slot && *last_slot);
165 *last_slot = (void *)(size_t) (index + 1);
166
167 /* Move the last element to the original spot of NODE. */
168 encoder->nodes[index] = last_node;
169 }
170
171 /* Remove element from hash table. */
172 *slot = NULL;
173 return true;
174 }
175
176
177 /* Return TRUE if we should encode initializer of NODE (if any). */
178
179 bool
180 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
181 struct cgraph_node *node)
182 {
183 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
184 return encoder->nodes[index].body;
185 }
186
187 /* Return TRUE if we should encode body of NODE (if any). */
188
189 static void
190 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
191 struct cgraph_node *node)
192 {
193 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
194 gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
195 encoder->nodes[index].body = true;
196 }
197
198 /* Return TRUE if we should encode initializer of NODE (if any). */
199
200 bool
201 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
202 struct varpool_node *node)
203 {
204 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
205 if (index == LCC_NOT_FOUND)
206 return false;
207 return encoder->nodes[index].initializer;
208 }
209
210 /* Return TRUE if we should encode initializer of NODE (if any). */
211
212 static void
213 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
214 struct varpool_node *node)
215 {
216 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
217 encoder->nodes[index].initializer = true;
218 }
219
220 /* Return TRUE if we should encode initializer of NODE (if any). */
221
222 bool
223 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
224 symtab_node node)
225 {
226 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
227 if (index == LCC_NOT_FOUND)
228 return false;
229 return encoder->nodes[index].in_partition;
230 }
231
232 /* Return TRUE if we should encode body of NODE (if any). */
233
234 void
235 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
236 symtab_node node)
237 {
238 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
239 encoder->nodes[index].in_partition = true;
240 }
241
242 /* Output the cgraph EDGE to OB using ENCODER. */
243
244 static void
245 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
246 lto_symtab_encoder_t encoder)
247 {
248 unsigned int uid;
249 intptr_t ref;
250 struct bitpack_d bp;
251
252 if (edge->indirect_unknown_callee)
253 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
254 LTO_symtab_indirect_edge);
255 else
256 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
257 LTO_symtab_edge);
258
259 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->caller);
260 gcc_assert (ref != LCC_NOT_FOUND);
261 streamer_write_hwi_stream (ob->main_stream, ref);
262
263 if (!edge->indirect_unknown_callee)
264 {
265 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->callee);
266 gcc_assert (ref != LCC_NOT_FOUND);
267 streamer_write_hwi_stream (ob->main_stream, ref);
268 }
269
270 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
271
272 bp = bitpack_create (ob->main_stream);
273 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
274 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
275 bp_pack_enum (&bp, cgraph_inline_failed_enum,
276 CIF_N_REASONS, edge->inline_failed);
277 bp_pack_var_len_unsigned (&bp, uid);
278 bp_pack_var_len_unsigned (&bp, edge->frequency);
279 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
280 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
281 bp_pack_value (&bp, edge->can_throw_external, 1);
282 if (edge->indirect_unknown_callee)
283 {
284 int flags = edge->indirect_info->ecf_flags;
285 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
289 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
290 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
291 /* Flags that should not appear on indirect calls. */
292 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
293 | ECF_MAY_BE_ALLOCA
294 | ECF_SIBCALL
295 | ECF_LEAF
296 | ECF_NOVOPS)));
297 }
298 streamer_write_bitpack (&bp);
299 }
300
301 /* Return if LIST contain references from other partitions. */
302
303 bool
304 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
305 {
306 int i;
307 struct ipa_ref *ref;
308 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
309 {
310 if (ref->referring->symbol.in_other_partition
311 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
312 return true;
313 }
314 return false;
315 }
316
317 /* Return true when node is reachable from other partition. */
318
319 bool
320 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
321 {
322 struct cgraph_edge *e;
323 if (!node->symbol.definition)
324 return false;
325 if (node->global.inlined_to)
326 return false;
327 for (e = node->callers; e; e = e->next_caller)
328 if (e->caller->symbol.in_other_partition
329 || !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
330 return true;
331 return false;
332 }
333
334 /* Return if LIST contain references from other partitions. */
335
336 bool
337 referenced_from_this_partition_p (struct ipa_ref_list *list,
338 lto_symtab_encoder_t encoder)
339 {
340 int i;
341 struct ipa_ref *ref;
342 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
343 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
344 return true;
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
352 {
353 struct cgraph_edge *e;
354 for (e = node->callers; e; e = e->next_caller)
355 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
356 return true;
357 return false;
358 }
359
360 /* Output the cgraph NODE to OB. ENCODER is used to find the
361 reference number of NODE->inlined_to. SET is the set of nodes we
362 are writing to the current file. If NODE is not in SET, then NODE
363 is a boundary of a cgraph_node_set and we pretend NODE just has a
364 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
365 that have had their callgraph node written so far. This is used to
366 determine if NODE is a clone of a previously written node. */
367
368 static void
369 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
370 lto_symtab_encoder_t encoder)
371 {
372 unsigned int tag;
373 struct bitpack_d bp;
374 bool boundary_p;
375 intptr_t ref;
376 bool in_other_partition = false;
377 struct cgraph_node *clone_of;
378 struct ipa_opt_pass_d *pass;
379 int i;
380 bool alias_p;
381
382 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
383
384 if (node->symbol.analyzed && !boundary_p)
385 tag = LTO_symtab_analyzed_node;
386 else
387 tag = LTO_symtab_unavail_node;
388
389 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
390 tag);
391 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
392
393 /* In WPA mode, we only output part of the call-graph. Also, we
394 fake cgraph node attributes. There are two cases that we care.
395
396 Boundary nodes: There are nodes that are not part of SET but are
397 called from within SET. We artificially make them look like
398 externally visible nodes with no function body.
399
400 Cherry-picked nodes: These are nodes we pulled from other
401 translation units into SET during IPA-inlining. We make them as
402 local static nodes to prevent clashes with other local statics. */
403 if (boundary_p && node->symbol.analyzed && !DECL_EXTERNAL (node->symbol.decl))
404 {
405 /* Inline clones can not be part of boundary.
406 gcc_assert (!node->global.inlined_to);
407
408 FIXME: At the moment they can be, when partition contains an inline
409 clone that is clone of inline clone from outside partition. We can
410 reshape the clone tree and make other tree to be the root, but it
411 needs a bit extra work and will be promplty done by cgraph_remove_node
412 after reading back. */
413 in_other_partition = 1;
414 }
415
416 clone_of = node->clone_of;
417 while (clone_of
418 && (ref = lto_symtab_encoder_lookup (encoder, (symtab_node)clone_of)) == LCC_NOT_FOUND)
419 if (clone_of->prev_sibling_clone)
420 clone_of = clone_of->prev_sibling_clone;
421 else
422 clone_of = clone_of->clone_of;
423
424 if (LTO_symtab_analyzed_node)
425 gcc_assert (clone_of || !node->clone_of);
426 if (!clone_of)
427 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
428 else
429 streamer_write_hwi_stream (ob->main_stream, ref);
430
431
432 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
433 streamer_write_gcov_count_stream (ob->main_stream, node->count);
434 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
435
436 streamer_write_hwi_stream (ob->main_stream,
437 node->ipa_transforms_to_apply.length ());
438 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
439 streamer_write_hwi_stream (ob->main_stream, pass->pass.static_pass_number);
440
441 if (tag == LTO_symtab_analyzed_node)
442 {
443 if (node->global.inlined_to)
444 {
445 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)node->global.inlined_to);
446 gcc_assert (ref != LCC_NOT_FOUND);
447 }
448 else
449 ref = LCC_NOT_FOUND;
450
451 streamer_write_hwi_stream (ob->main_stream, ref);
452 }
453
454 if (node->symbol.same_comdat_group && !boundary_p)
455 {
456 ref = lto_symtab_encoder_lookup (encoder,
457 node->symbol.same_comdat_group);
458 gcc_assert (ref != LCC_NOT_FOUND);
459 }
460 else
461 ref = LCC_NOT_FOUND;
462 streamer_write_hwi_stream (ob->main_stream, ref);
463
464 bp = bitpack_create (ob->main_stream);
465 bp_pack_value (&bp, node->local.local, 1);
466 bp_pack_value (&bp, node->symbol.externally_visible, 1);
467 bp_pack_value (&bp, node->symbol.definition, 1);
468 bp_pack_value (&bp, node->local.versionable, 1);
469 bp_pack_value (&bp, node->local.can_change_signature, 1);
470 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
471 bp_pack_value (&bp, node->symbol.force_output, 1);
472 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
473 bp_pack_value (&bp, node->symbol.unique_name, 1);
474 bp_pack_value (&bp, node->symbol.address_taken, 1);
475 bp_pack_value (&bp, node->abstract_and_needed, 1);
476 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
477 && !DECL_EXTERNAL (node->symbol.decl)
478 && !DECL_COMDAT (node->symbol.decl)
479 && (reachable_from_other_partition_p (node, encoder)
480 || referenced_from_other_partition_p (&node->symbol.ref_list,
481 encoder)), 1);
482 bp_pack_value (&bp, node->lowered, 1);
483 bp_pack_value (&bp, in_other_partition, 1);
484 /* Real aliases in a boundary become non-aliases. However we still stream
485 alias info on weakrefs.
486 TODO: We lose a bit of information here - when we know that variable is
487 defined in other unit, we may use the info on aliases to resolve
488 symbol1 != symbol2 type tests that we can do only for locally defined objects
489 otherwise. */
490 alias_p = node->symbol.alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl));
491 bp_pack_value (&bp, alias_p, 1);
492 bp_pack_value (&bp, node->frequency, 2);
493 bp_pack_value (&bp, node->only_called_at_startup, 1);
494 bp_pack_value (&bp, node->only_called_at_exit, 1);
495 bp_pack_value (&bp, node->tm_clone, 1);
496 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
497 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
498 LDPR_NUM_KNOWN, node->symbol.resolution);
499 streamer_write_bitpack (&bp);
500
501 if (node->thunk.thunk_p && !boundary_p)
502 {
503 streamer_write_uhwi_stream
504 (ob->main_stream,
505 1 + (node->thunk.this_adjusting != 0) * 2
506 + (node->thunk.virtual_offset_p != 0) * 4);
507 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
508 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
509 }
510 }
511
512 /* Output the varpool NODE to OB.
513 If NODE is not in SET, then NODE is a boundary. */
514
515 static void
516 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
517 lto_symtab_encoder_t encoder)
518 {
519 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
520 struct bitpack_d bp;
521 int ref;
522 bool alias_p;
523
524 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
525 LTO_symtab_variable);
526 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
527 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
528 bp = bitpack_create (ob->main_stream);
529 bp_pack_value (&bp, node->symbol.externally_visible, 1);
530 bp_pack_value (&bp, node->symbol.force_output, 1);
531 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
532 bp_pack_value (&bp, node->symbol.unique_name, 1);
533 bp_pack_value (&bp, node->symbol.definition, 1);
534 alias_p = node->symbol.alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl));
535 bp_pack_value (&bp, alias_p, 1);
536 bp_pack_value (&bp, node->symbol.analyzed && !boundary_p, 1);
537 gcc_assert (node->symbol.definition || !node->symbol.analyzed);
538 /* Constant pool initializers can be de-unified into individual ltrans units.
539 FIXME: Alternatively at -Os we may want to avoid generating for them the local
540 labels and share them across LTRANS partitions. */
541 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
542 && !DECL_EXTERNAL (node->symbol.decl)
543 && !DECL_COMDAT (node->symbol.decl))
544 {
545 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
546 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
547 }
548 else
549 {
550 bp_pack_value (&bp, node->symbol.definition
551 && referenced_from_other_partition_p (&node->symbol.ref_list,
552 encoder), 1);
553 bp_pack_value (&bp, node->symbol.analyzed
554 && boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
555 /* in_other_partition. */
556 }
557 streamer_write_bitpack (&bp);
558 if (node->symbol.same_comdat_group && !boundary_p)
559 {
560 ref = lto_symtab_encoder_lookup (encoder,
561 node->symbol.same_comdat_group);
562 gcc_assert (ref != LCC_NOT_FOUND);
563 }
564 else
565 ref = LCC_NOT_FOUND;
566 streamer_write_hwi_stream (ob->main_stream, ref);
567 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
568 LDPR_NUM_KNOWN, node->symbol.resolution);
569 }
570
571 /* Output the varpool NODE to OB.
572 If NODE is not in SET, then NODE is a boundary. */
573
574 static void
575 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
576 lto_symtab_encoder_t encoder)
577 {
578 struct bitpack_d bp;
579 int nref;
580
581 bp = bitpack_create (ob->main_stream);
582 bp_pack_value (&bp, ref->use, 2);
583 streamer_write_bitpack (&bp);
584 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
585 gcc_assert (nref != LCC_NOT_FOUND);
586 streamer_write_hwi_stream (ob->main_stream, nref);
587 }
588
589 /* Stream out profile_summary to OB. */
590
591 static void
592 output_profile_summary (struct lto_simple_output_block *ob)
593 {
594 unsigned h_ix;
595 struct bitpack_d bp;
596
597 if (profile_info)
598 {
599 /* We do not output num and run_max, they are not used by
600 GCC profile feedback and they are difficult to merge from multiple
601 units. */
602 gcc_assert (profile_info->runs);
603 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
604 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
605
606 /* sum_all is needed for computing the working set with the
607 histogram. */
608 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
609
610 /* Create and output a bitpack of non-zero histogram entries indices. */
611 bp = bitpack_create (ob->main_stream);
612 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
613 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
614 streamer_write_bitpack (&bp);
615 /* Now stream out only those non-zero entries. */
616 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
617 {
618 if (!profile_info->histogram[h_ix].num_counters)
619 continue;
620 streamer_write_gcov_count_stream (ob->main_stream,
621 profile_info->histogram[h_ix].num_counters);
622 streamer_write_gcov_count_stream (ob->main_stream,
623 profile_info->histogram[h_ix].min_value);
624 streamer_write_gcov_count_stream (ob->main_stream,
625 profile_info->histogram[h_ix].cum_value);
626 }
627 /* IPA-profile computes hot bb threshold based on cumulated
628 whole program profile. We need to stream it down to ltrans. */
629 if (flag_wpa)
630 streamer_write_gcov_count_stream (ob->main_stream,
631 get_hot_bb_threshold ());
632 }
633 else
634 streamer_write_uhwi_stream (ob->main_stream, 0);
635 }
636
637 /* Output all callees or indirect outgoing edges. EDGE must be the first such
638 edge. */
639
640 static void
641 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
642 struct lto_simple_output_block *ob,
643 lto_symtab_encoder_t encoder)
644 {
645 if (!edge)
646 return;
647
648 /* Output edges in backward direction, so the reconstructed callgraph match
649 and it is easy to associate call sites in the IPA pass summaries. */
650 while (edge->next_callee)
651 edge = edge->next_callee;
652 for (; edge; edge = edge->prev_callee)
653 lto_output_edge (ob, edge, encoder);
654 }
655
656 /* Output the part of the cgraph in SET. */
657
658 static void
659 output_refs (lto_symtab_encoder_t encoder)
660 {
661 lto_symtab_encoder_iterator lsei;
662 struct lto_simple_output_block *ob;
663 int count;
664 struct ipa_ref *ref;
665 int i;
666
667 ob = lto_create_simple_output_block (LTO_section_refs);
668
669 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
670 lsei_next_in_partition (&lsei))
671 {
672 symtab_node node = lsei_node (lsei);
673
674 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
675 if (count)
676 {
677 streamer_write_gcov_count_stream (ob->main_stream, count);
678 streamer_write_uhwi_stream (ob->main_stream,
679 lto_symtab_encoder_lookup (encoder, node));
680 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
681 i, ref); i++)
682 lto_output_ref (ob, ref, encoder);
683 }
684 }
685
686 streamer_write_uhwi_stream (ob->main_stream, 0);
687
688 lto_destroy_simple_output_block (ob);
689 }
690
691 /* Add NODE into encoder as well as nodes it is cloned from.
692 Do it in a way so clones appear first. */
693
694 static void
695 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
696 bool include_body)
697 {
698 if (node->clone_of)
699 add_node_to (encoder, node->clone_of, include_body);
700 else if (include_body)
701 lto_set_symtab_encoder_encode_body (encoder, node);
702 lto_symtab_encoder_encode (encoder, (symtab_node)node);
703 }
704
705 /* Add all references in LIST to encoders. */
706
707 static void
708 add_references (lto_symtab_encoder_t encoder,
709 struct ipa_ref_list *list)
710 {
711 int i;
712 struct ipa_ref *ref;
713 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
714 if (is_a <cgraph_node> (ref->referred))
715 add_node_to (encoder, ipa_ref_node (ref), false);
716 else
717 lto_symtab_encoder_encode (encoder, ref->referred);
718 }
719
720 /* Find all symbols we want to stream into given partition and insert them
721 to encoders.
722
723 The function actually replaces IN_ENCODER by new one. The reason is that
724 streaming code needs clone's origin to be streamed before clone. This
725 means that we need to insert the nodes in specific order. This order is
726 ignored by the partitioning logic earlier. */
727
728 lto_symtab_encoder_t
729 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
730 {
731 struct cgraph_node *node;
732 struct cgraph_edge *edge;
733 int i;
734 lto_symtab_encoder_t encoder;
735 lto_symtab_encoder_iterator lsei;
736
737 encoder = lto_symtab_encoder_new (false);
738
739 /* Go over all entries in the IN_ENCODER and duplicate them to
740 ENCODER. At the same time insert masters of clones so
741 every master appears before clone. */
742 for (lsei = lsei_start_function_in_partition (in_encoder);
743 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
744 {
745 node = lsei_cgraph_node (lsei);
746 add_node_to (encoder, node, true);
747 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)node);
748 add_references (encoder, &node->symbol.ref_list);
749 }
750 for (lsei = lsei_start_variable_in_partition (in_encoder);
751 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
752 {
753 struct varpool_node *vnode = lsei_varpool_node (lsei);
754
755 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)vnode);
756 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
757 add_references (encoder, &vnode->symbol.ref_list);
758 }
759 /* Pickle in also the initializer of all referenced readonly variables
760 to help folding. Constant pool variables are not shared, so we must
761 pickle those too. */
762 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
763 {
764 symtab_node node = lto_symtab_encoder_deref (encoder, i);
765 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
766 {
767 if (DECL_INITIAL (vnode->symbol.decl)
768 && !lto_symtab_encoder_encode_initializer_p (encoder,
769 vnode)
770 && const_value_known_p (vnode->symbol.decl))
771 {
772 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
773 add_references (encoder, &vnode->symbol.ref_list);
774 }
775 }
776 }
777
778 /* Go over all the nodes again to include callees that are not in
779 SET. */
780 for (lsei = lsei_start_function_in_partition (encoder);
781 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
782 {
783 node = lsei_cgraph_node (lsei);
784 for (edge = node->callees; edge; edge = edge->next_callee)
785 {
786 struct cgraph_node *callee = edge->callee;
787 if (!lto_symtab_encoder_in_partition_p (encoder, (symtab_node)callee))
788 {
789 /* We should have moved all the inlines. */
790 gcc_assert (!callee->global.inlined_to);
791 add_node_to (encoder, callee, false);
792 }
793 }
794 }
795 lto_symtab_encoder_delete (in_encoder);
796 return encoder;
797 }
798
799 /* Output the part of the symtab in SET and VSET. */
800
801 void
802 output_symtab (void)
803 {
804 struct cgraph_node *node;
805 struct lto_simple_output_block *ob;
806 lto_symtab_encoder_iterator lsei;
807 int i, n_nodes;
808 lto_symtab_encoder_t encoder;
809 static bool asm_nodes_output = false;
810
811 if (flag_wpa)
812 output_cgraph_opt_summary ();
813
814 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
815
816 output_profile_summary (ob);
817
818 /* An encoder for cgraph nodes should have been created by
819 ipa_write_summaries_1. */
820 gcc_assert (ob->decl_state->symtab_node_encoder);
821 encoder = ob->decl_state->symtab_node_encoder;
822
823 /* Write out the nodes. We must first output a node and then its clones,
824 otherwise at a time reading back the node there would be nothing to clone
825 from. */
826 n_nodes = lto_symtab_encoder_size (encoder);
827 for (i = 0; i < n_nodes; i++)
828 {
829 symtab_node node = lto_symtab_encoder_deref (encoder, i);
830 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
831 lto_output_node (ob, cnode, encoder);
832 else
833 lto_output_varpool_node (ob, varpool (node), encoder);
834
835 }
836
837 /* Go over the nodes in SET again to write edges. */
838 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
839 lsei_next_function_in_partition (&lsei))
840 {
841 node = lsei_cgraph_node (lsei);
842 output_outgoing_cgraph_edges (node->callees, ob, encoder);
843 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
844 }
845
846 streamer_write_uhwi_stream (ob->main_stream, 0);
847
848 lto_destroy_simple_output_block (ob);
849
850 /* Emit toplevel asms.
851 When doing WPA we must output every asm just once. Since we do not partition asm
852 nodes at all, output them to first output. This is kind of hack, but should work
853 well. */
854 if (!asm_nodes_output)
855 {
856 asm_nodes_output = true;
857 lto_output_toplevel_asms ();
858 }
859
860 output_refs (encoder);
861 }
862
863 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
864 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
865 NODE or to replace the values in it, for instance because the first
866 time we saw it, the function body was not available but now it
867 is. BP is a bitpack with all the bitflags for NODE read from the
868 stream. */
869
870 static void
871 input_overwrite_node (struct lto_file_decl_data *file_data,
872 struct cgraph_node *node,
873 enum LTO_symtab_tags tag,
874 struct bitpack_d *bp)
875 {
876 node->symbol.aux = (void *) tag;
877 node->symbol.lto_file_data = file_data;
878
879 node->local.local = bp_unpack_value (bp, 1);
880 node->symbol.externally_visible = bp_unpack_value (bp, 1);
881 node->symbol.definition = bp_unpack_value (bp, 1);
882 node->local.versionable = bp_unpack_value (bp, 1);
883 node->local.can_change_signature = bp_unpack_value (bp, 1);
884 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
885 node->symbol.force_output = bp_unpack_value (bp, 1);
886 node->symbol.forced_by_abi = bp_unpack_value (bp, 1);
887 node->symbol.unique_name = bp_unpack_value (bp, 1);
888 node->symbol.address_taken = bp_unpack_value (bp, 1);
889 node->abstract_and_needed = bp_unpack_value (bp, 1);
890 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
891 node->lowered = bp_unpack_value (bp, 1);
892 node->symbol.analyzed = tag == LTO_symtab_analyzed_node;
893 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
894 if (node->symbol.in_other_partition
895 /* Avoid updating decl when we are seeing just inline clone.
896 When inlining function that has functions already inlined into it,
897 we produce clones of inline clones.
898
899 WPA partitioning might put each clone into different unit and
900 we might end up streaming inline clone from other partition
901 to support clone we are interested in. */
902 && (!node->clone_of
903 || node->clone_of->symbol.decl != node->symbol.decl))
904 {
905 DECL_EXTERNAL (node->symbol.decl) = 1;
906 TREE_STATIC (node->symbol.decl) = 0;
907 }
908 node->symbol.alias = bp_unpack_value (bp, 1);
909 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
910 node->only_called_at_startup = bp_unpack_value (bp, 1);
911 node->only_called_at_exit = bp_unpack_value (bp, 1);
912 node->tm_clone = bp_unpack_value (bp, 1);
913 node->thunk.thunk_p = bp_unpack_value (bp, 1);
914 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
915 LDPR_NUM_KNOWN);
916 }
917
918 /* Return string alias is alias of. */
919
920 static tree
921 get_alias_symbol (tree decl)
922 {
923 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
924 return get_identifier (TREE_STRING_POINTER
925 (TREE_VALUE (TREE_VALUE (alias))));
926 }
927
928 /* Read a node from input_block IB. TAG is the node's tag just read.
929 Return the node read or overwriten. */
930
931 static struct cgraph_node *
932 input_node (struct lto_file_decl_data *file_data,
933 struct lto_input_block *ib,
934 enum LTO_symtab_tags tag,
935 vec<symtab_node> nodes)
936 {
937 tree fn_decl;
938 struct cgraph_node *node;
939 struct bitpack_d bp;
940 unsigned decl_index;
941 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
942 int clone_ref;
943 int order;
944 int i, count;
945
946 order = streamer_read_hwi (ib) + order_base;
947 clone_ref = streamer_read_hwi (ib);
948
949 decl_index = streamer_read_uhwi (ib);
950 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
951
952 if (clone_ref != LCC_NOT_FOUND)
953 {
954 node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
955 0, CGRAPH_FREQ_BASE, false,
956 vNULL, false);
957 }
958 else
959 node = cgraph_get_create_node (fn_decl);
960
961 node->symbol.order = order;
962 if (order >= symtab_order)
963 symtab_order = order + 1;
964
965 node->count = streamer_read_gcov_count (ib);
966 node->count_materialization_scale = streamer_read_hwi (ib);
967
968 count = streamer_read_hwi (ib);
969 node->ipa_transforms_to_apply = vNULL;
970 for (i = 0; i < count; i++)
971 {
972 struct opt_pass *pass;
973 int pid = streamer_read_hwi (ib);
974
975 gcc_assert (pid < passes_by_id_size);
976 pass = passes_by_id[pid];
977 node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
978 }
979
980 if (tag == LTO_symtab_analyzed_node)
981 ref = streamer_read_hwi (ib);
982
983 ref2 = streamer_read_hwi (ib);
984
985 /* Make sure that we have not read this node before. Nodes that
986 have already been read will have their tag stored in the 'aux'
987 field. Since built-in functions can be referenced in multiple
988 functions, they are expected to be read more than once. */
989 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
990 internal_error ("bytecode stream: found multiple instances of cgraph "
991 "node with uid %d", node->uid);
992
993 bp = streamer_read_bitpack (ib);
994 input_overwrite_node (file_data, node, tag, &bp);
995
996 /* Store a reference for now, and fix up later to be a pointer. */
997 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
998
999 /* Store a reference for now, and fix up later to be a pointer. */
1000 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1001
1002 if (node->thunk.thunk_p)
1003 {
1004 int type = streamer_read_uhwi (ib);
1005 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1006 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1007
1008 node->thunk.fixed_offset = fixed_offset;
1009 node->thunk.this_adjusting = (type & 2);
1010 node->thunk.virtual_value = virtual_value;
1011 node->thunk.virtual_offset_p = (type & 4);
1012 }
1013 if (node->symbol.alias && !node->symbol.analyzed
1014 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1015 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1016 return node;
1017 }
1018
1019 /* Read a node from input_block IB. TAG is the node's tag just read.
1020 Return the node read or overwriten. */
1021
1022 static struct varpool_node *
1023 input_varpool_node (struct lto_file_decl_data *file_data,
1024 struct lto_input_block *ib)
1025 {
1026 int decl_index;
1027 tree var_decl;
1028 struct varpool_node *node;
1029 struct bitpack_d bp;
1030 int ref = LCC_NOT_FOUND;
1031 int order;
1032
1033 order = streamer_read_hwi (ib) + order_base;
1034 decl_index = streamer_read_uhwi (ib);
1035 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1036 node = varpool_node_for_decl (var_decl);
1037 node->symbol.order = order;
1038 if (order >= symtab_order)
1039 symtab_order = order + 1;
1040 node->symbol.lto_file_data = file_data;
1041
1042 bp = streamer_read_bitpack (ib);
1043 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1044 node->symbol.force_output = bp_unpack_value (&bp, 1);
1045 node->symbol.forced_by_abi = bp_unpack_value (&bp, 1);
1046 node->symbol.unique_name = bp_unpack_value (&bp, 1);
1047 node->symbol.definition = bp_unpack_value (&bp, 1);
1048 node->symbol.alias = bp_unpack_value (&bp, 1);
1049 node->symbol.analyzed = bp_unpack_value (&bp, 1);
1050 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1051 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1052 if (node->symbol.in_other_partition)
1053 {
1054 DECL_EXTERNAL (node->symbol.decl) = 1;
1055 TREE_STATIC (node->symbol.decl) = 0;
1056 }
1057 if (node->symbol.alias && !node->symbol.analyzed
1058 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1059 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1060 ref = streamer_read_hwi (ib);
1061 /* Store a reference for now, and fix up later to be a pointer. */
1062 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1063 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1064 LDPR_NUM_KNOWN);
1065
1066 return node;
1067 }
1068
1069 /* Read a node from input_block IB. TAG is the node's tag just read.
1070 Return the node read or overwriten. */
1071
1072 static void
1073 input_ref (struct lto_input_block *ib,
1074 symtab_node referring_node,
1075 vec<symtab_node> nodes)
1076 {
1077 symtab_node node = NULL;
1078 struct bitpack_d bp;
1079 enum ipa_ref_use use;
1080
1081 bp = streamer_read_bitpack (ib);
1082 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1083 node = nodes[streamer_read_hwi (ib)];
1084 ipa_record_reference (referring_node, node, use, NULL);
1085 }
1086
1087 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1088 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1089 edge being read is indirect (in the sense that it has
1090 indirect_unknown_callee set). */
1091
1092 static void
1093 input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
1094 bool indirect)
1095 {
1096 struct cgraph_node *caller, *callee;
1097 struct cgraph_edge *edge;
1098 unsigned int stmt_id;
1099 gcov_type count;
1100 int freq;
1101 cgraph_inline_failed_t inline_failed;
1102 struct bitpack_d bp;
1103 int ecf_flags = 0;
1104
1105 caller = cgraph (nodes[streamer_read_hwi (ib)]);
1106 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1107 internal_error ("bytecode stream: no caller found while reading edge");
1108
1109 if (!indirect)
1110 {
1111 callee = cgraph (nodes[streamer_read_hwi (ib)]);
1112 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1113 internal_error ("bytecode stream: no callee found while reading edge");
1114 }
1115 else
1116 callee = NULL;
1117
1118 count = streamer_read_gcov_count (ib);
1119
1120 bp = streamer_read_bitpack (ib);
1121 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1122 stmt_id = bp_unpack_var_len_unsigned (&bp);
1123 freq = (int) bp_unpack_var_len_unsigned (&bp);
1124
1125 if (indirect)
1126 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1127 else
1128 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1129
1130 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1131 edge->lto_stmt_uid = stmt_id;
1132 edge->inline_failed = inline_failed;
1133 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1134 edge->can_throw_external = bp_unpack_value (&bp, 1);
1135 if (indirect)
1136 {
1137 if (bp_unpack_value (&bp, 1))
1138 ecf_flags |= ECF_CONST;
1139 if (bp_unpack_value (&bp, 1))
1140 ecf_flags |= ECF_PURE;
1141 if (bp_unpack_value (&bp, 1))
1142 ecf_flags |= ECF_NORETURN;
1143 if (bp_unpack_value (&bp, 1))
1144 ecf_flags |= ECF_MALLOC;
1145 if (bp_unpack_value (&bp, 1))
1146 ecf_flags |= ECF_NOTHROW;
1147 if (bp_unpack_value (&bp, 1))
1148 ecf_flags |= ECF_RETURNS_TWICE;
1149 edge->indirect_info->ecf_flags = ecf_flags;
1150 }
1151 }
1152
1153
1154 /* Read a cgraph from IB using the info in FILE_DATA. */
1155
1156 static vec<symtab_node>
1157 input_cgraph_1 (struct lto_file_decl_data *file_data,
1158 struct lto_input_block *ib)
1159 {
1160 enum LTO_symtab_tags tag;
1161 vec<symtab_node> nodes = vNULL;
1162 symtab_node node;
1163 unsigned i;
1164
1165 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1166 order_base = symtab_order;
1167 while (tag)
1168 {
1169 if (tag == LTO_symtab_edge)
1170 input_edge (ib, nodes, false);
1171 else if (tag == LTO_symtab_indirect_edge)
1172 input_edge (ib, nodes, true);
1173 else if (tag == LTO_symtab_variable)
1174 {
1175 node = (symtab_node)input_varpool_node (file_data, ib);
1176 nodes.safe_push (node);
1177 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1178 }
1179 else
1180 {
1181 node = (symtab_node)input_node (file_data, ib, tag, nodes);
1182 if (node == NULL || node->symbol.decl == NULL_TREE)
1183 internal_error ("bytecode stream: found empty cgraph node");
1184 nodes.safe_push (node);
1185 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1186 }
1187
1188 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1189 }
1190
1191 lto_input_toplevel_asms (file_data, order_base);
1192
1193 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1194 #ifdef ENABLE_CHECKING
1195 FOR_EACH_VEC_ELT (nodes, i, node)
1196 gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
1197 #endif
1198 FOR_EACH_VEC_ELT (nodes, i, node)
1199 {
1200 int ref;
1201 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1202 {
1203 ref = (int) (intptr_t) cnode->global.inlined_to;
1204
1205 /* We share declaration of builtins, so we may read same node twice. */
1206 if (!node->symbol.aux)
1207 continue;
1208 node->symbol.aux = NULL;
1209
1210 /* Fixup inlined_to from reference to pointer. */
1211 if (ref != LCC_NOT_FOUND)
1212 cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1213 else
1214 cnode->global.inlined_to = NULL;
1215 }
1216
1217 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1218
1219 /* Fixup same_comdat_group from reference to pointer. */
1220 if (ref != LCC_NOT_FOUND)
1221 node->symbol.same_comdat_group = nodes[ref];
1222 else
1223 node->symbol.same_comdat_group = NULL;
1224 }
1225 FOR_EACH_VEC_ELT (nodes, i, node)
1226 node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1227 return nodes;
1228 }
1229
1230 /* Input ipa_refs. */
1231
1232 static void
1233 input_refs (struct lto_input_block *ib,
1234 vec<symtab_node> nodes)
1235 {
1236 int count;
1237 int idx;
1238 while (true)
1239 {
1240 symtab_node node;
1241 count = streamer_read_uhwi (ib);
1242 if (!count)
1243 break;
1244 idx = streamer_read_uhwi (ib);
1245 node = nodes[idx];
1246 while (count)
1247 {
1248 input_ref (ib, node, nodes);
1249 count--;
1250 }
1251 }
1252 }
1253
1254
1255 static struct gcov_ctr_summary lto_gcov_summary;
1256
1257 /* Input profile_info from IB. */
1258 static void
1259 input_profile_summary (struct lto_input_block *ib,
1260 struct lto_file_decl_data *file_data)
1261 {
1262 unsigned h_ix;
1263 struct bitpack_d bp;
1264 unsigned int runs = streamer_read_uhwi (ib);
1265 if (runs)
1266 {
1267 file_data->profile_info.runs = runs;
1268 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1269 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1270
1271 memset (file_data->profile_info.histogram, 0,
1272 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1273 /* Input the bitpack of non-zero histogram indices. */
1274 bp = streamer_read_bitpack (ib);
1275 /* Read in and unpack the full bitpack, flagging non-zero
1276 histogram entries by setting the num_counters non-zero. */
1277 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1278 {
1279 file_data->profile_info.histogram[h_ix].num_counters
1280 = bp_unpack_value (&bp, 1);
1281 }
1282 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1283 {
1284 if (!file_data->profile_info.histogram[h_ix].num_counters)
1285 continue;
1286
1287 file_data->profile_info.histogram[h_ix].num_counters
1288 = streamer_read_gcov_count (ib);
1289 file_data->profile_info.histogram[h_ix].min_value
1290 = streamer_read_gcov_count (ib);
1291 file_data->profile_info.histogram[h_ix].cum_value
1292 = streamer_read_gcov_count (ib);
1293 }
1294 /* IPA-profile computes hot bb threshold based on cumulated
1295 whole program profile. We need to stream it down to ltrans. */
1296 if (flag_ltrans)
1297 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1298 }
1299
1300 }
1301
1302 /* Rescale profile summaries to the same number of runs in the whole unit. */
1303
1304 static void
1305 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1306 {
1307 struct lto_file_decl_data *file_data;
1308 unsigned int j, h_ix;
1309 gcov_unsigned_t max_runs = 0;
1310 struct cgraph_node *node;
1311 struct cgraph_edge *edge;
1312 gcov_type saved_sum_all = 0;
1313 gcov_ctr_summary *saved_profile_info = 0;
1314 int saved_scale = 0;
1315
1316 /* Find unit with maximal number of runs. If we ever get serious about
1317 roundoff errors, we might also consider computing smallest common
1318 multiply. */
1319 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1320 if (max_runs < file_data->profile_info.runs)
1321 max_runs = file_data->profile_info.runs;
1322
1323 if (!max_runs)
1324 return;
1325
1326 /* Simple overflow check. We probably don't need to support that many train
1327 runs. Such a large value probably imply data corruption anyway. */
1328 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1329 {
1330 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1331 INT_MAX / REG_BR_PROB_BASE);
1332 return;
1333 }
1334
1335 profile_info = &lto_gcov_summary;
1336 lto_gcov_summary.runs = max_runs;
1337 lto_gcov_summary.sum_max = 0;
1338 memset (lto_gcov_summary.histogram, 0,
1339 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1340
1341 /* Rescale all units to the maximal number of runs.
1342 sum_max can not be easily merged, as we have no idea what files come from
1343 the same run. We do not use the info anyway, so leave it 0. */
1344 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1345 if (file_data->profile_info.runs)
1346 {
1347 int scale = GCOV_COMPUTE_SCALE (max_runs,
1348 file_data->profile_info.runs);
1349 lto_gcov_summary.sum_max
1350 = MAX (lto_gcov_summary.sum_max,
1351 apply_scale (file_data->profile_info.sum_max, scale));
1352 lto_gcov_summary.sum_all
1353 = MAX (lto_gcov_summary.sum_all,
1354 apply_scale (file_data->profile_info.sum_all, scale));
1355 /* Save a pointer to the profile_info with the largest
1356 scaled sum_all and the scale for use in merging the
1357 histogram. */
1358 if (!saved_profile_info
1359 || lto_gcov_summary.sum_all > saved_sum_all)
1360 {
1361 saved_profile_info = &file_data->profile_info;
1362 saved_sum_all = lto_gcov_summary.sum_all;
1363 saved_scale = scale;
1364 }
1365 }
1366
1367 gcc_assert (saved_profile_info);
1368
1369 /* Scale up the histogram from the profile that had the largest
1370 scaled sum_all above. */
1371 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1372 {
1373 /* Scale up the min value as we did the corresponding sum_all
1374 above. Use that to find the new histogram index. */
1375 gcov_type scaled_min
1376 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1377 saved_scale);
1378 /* The new index may be shared with another scaled histogram entry,
1379 so we need to account for a non-zero histogram entry at new_ix. */
1380 unsigned new_ix = gcov_histo_index (scaled_min);
1381 lto_gcov_summary.histogram[new_ix].min_value
1382 = (lto_gcov_summary.histogram[new_ix].num_counters
1383 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1384 : scaled_min);
1385 /* Some of the scaled counter values would ostensibly need to be placed
1386 into different (larger) histogram buckets, but we keep things simple
1387 here and place the scaled cumulative counter value in the bucket
1388 corresponding to the scaled minimum counter value. */
1389 lto_gcov_summary.histogram[new_ix].cum_value
1390 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1391 saved_scale);
1392 lto_gcov_summary.histogram[new_ix].num_counters
1393 += saved_profile_info->histogram[h_ix].num_counters;
1394 }
1395
1396 /* Watch roundoff errors. */
1397 if (lto_gcov_summary.sum_max < max_runs)
1398 lto_gcov_summary.sum_max = max_runs;
1399
1400 /* If merging already happent at WPA time, we are done. */
1401 if (flag_ltrans)
1402 return;
1403
1404 /* Now compute count_materialization_scale of each node.
1405 During LTRANS we already have values of count_materialization_scale
1406 computed, so just update them. */
1407 FOR_EACH_FUNCTION (node)
1408 if (node->symbol.lto_file_data
1409 && node->symbol.lto_file_data->profile_info.runs)
1410 {
1411 int scale;
1412
1413 scale = RDIV (node->count_materialization_scale * max_runs,
1414 node->symbol.lto_file_data->profile_info.runs);
1415 node->count_materialization_scale = scale;
1416 if (scale < 0)
1417 fatal_error ("Profile information in %s corrupted",
1418 file_data->file_name);
1419
1420 if (scale == REG_BR_PROB_BASE)
1421 continue;
1422 for (edge = node->callees; edge; edge = edge->next_callee)
1423 edge->count = apply_scale (edge->count, scale);
1424 node->count = apply_scale (node->count, scale);
1425 }
1426 }
1427
1428 /* Input and merge the symtab from each of the .o files passed to
1429 lto1. */
1430
1431 void
1432 input_symtab (void)
1433 {
1434 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1435 struct lto_file_decl_data *file_data;
1436 unsigned int j = 0;
1437 struct cgraph_node *node;
1438
1439 cgraph_state = CGRAPH_STATE_IPA_SSA;
1440
1441 while ((file_data = file_data_vec[j++]))
1442 {
1443 const char *data;
1444 size_t len;
1445 struct lto_input_block *ib;
1446 vec<symtab_node> nodes;
1447
1448 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1449 &data, &len);
1450 if (!ib)
1451 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1452 input_profile_summary (ib, file_data);
1453 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1454 nodes = input_cgraph_1 (file_data, ib);
1455 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1456 ib, data, len);
1457
1458 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1459 &data, &len);
1460 if (!ib)
1461 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1462 input_refs (ib, nodes);
1463 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1464 ib, data, len);
1465 if (flag_ltrans)
1466 input_cgraph_opt_summary (nodes);
1467 nodes.release ();
1468 }
1469
1470 merge_profile_summaries (file_data_vec);
1471 get_working_sets ();
1472
1473
1474 /* Clear out the aux field that was used to store enough state to
1475 tell which nodes should be overwritten. */
1476 FOR_EACH_FUNCTION (node)
1477 {
1478 /* Some nodes may have been created by cgraph_node. This
1479 happens when the callgraph contains nested functions. If the
1480 node for the parent function was never emitted to the gimple
1481 file, cgraph_node will create a node for it when setting the
1482 context of the nested function. */
1483 if (node->symbol.lto_file_data)
1484 node->symbol.aux = NULL;
1485 }
1486 }
1487
1488 /* True when we need optimization summary for NODE. */
1489
1490 static int
1491 output_cgraph_opt_summary_p (struct cgraph_node *node)
1492 {
1493 return (node->clone_of
1494 && (node->clone.tree_map
1495 || node->clone.args_to_skip
1496 || node->clone.combined_args_to_skip));
1497 }
1498
1499 /* Output optimization summary for EDGE to OB. */
1500 static void
1501 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1502 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1503 {
1504 }
1505
1506 /* Output optimization summary for NODE to OB. */
1507
1508 static void
1509 output_node_opt_summary (struct output_block *ob,
1510 struct cgraph_node *node,
1511 lto_symtab_encoder_t encoder)
1512 {
1513 unsigned int index;
1514 bitmap_iterator bi;
1515 struct ipa_replace_map *map;
1516 struct bitpack_d bp;
1517 int i;
1518 struct cgraph_edge *e;
1519
1520 if (node->clone.args_to_skip)
1521 {
1522 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1523 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1524 streamer_write_uhwi (ob, index);
1525 }
1526 else
1527 streamer_write_uhwi (ob, 0);
1528 if (node->clone.combined_args_to_skip)
1529 {
1530 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1531 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1532 streamer_write_uhwi (ob, index);
1533 }
1534 else
1535 streamer_write_uhwi (ob, 0);
1536 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1537 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1538 {
1539 int parm_num;
1540 tree parm;
1541
1542 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1543 parm = DECL_CHAIN (parm), parm_num++)
1544 if (map->old_tree == parm)
1545 break;
1546 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1547 mechanism to store function local declarations into summaries. */
1548 gcc_assert (parm);
1549 streamer_write_uhwi (ob, parm_num);
1550 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1551 stream_write_tree (ob, map->new_tree, true);
1552 bp = bitpack_create (ob->main_stream);
1553 bp_pack_value (&bp, map->replace_p, 1);
1554 bp_pack_value (&bp, map->ref_p, 1);
1555 streamer_write_bitpack (&bp);
1556 }
1557
1558 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node) node))
1559 {
1560 for (e = node->callees; e; e = e->next_callee)
1561 output_edge_opt_summary (ob, e);
1562 for (e = node->indirect_calls; e; e = e->next_callee)
1563 output_edge_opt_summary (ob, e);
1564 }
1565 }
1566
1567 /* Output optimization summaries stored in callgraph.
1568 At the moment it is the clone info structure. */
1569
1570 static void
1571 output_cgraph_opt_summary (void)
1572 {
1573 int i, n_nodes;
1574 lto_symtab_encoder_t encoder;
1575 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1576 unsigned count = 0;
1577
1578 ob->cgraph_node = NULL;
1579 encoder = ob->decl_state->symtab_node_encoder;
1580 n_nodes = lto_symtab_encoder_size (encoder);
1581 for (i = 0; i < n_nodes; i++)
1582 {
1583 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1584 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1585 if (cnode && output_cgraph_opt_summary_p (cnode))
1586 count++;
1587 }
1588 streamer_write_uhwi (ob, count);
1589 for (i = 0; i < n_nodes; i++)
1590 {
1591 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1592 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1593 if (cnode && output_cgraph_opt_summary_p (cnode))
1594 {
1595 streamer_write_uhwi (ob, i);
1596 output_node_opt_summary (ob, cnode, encoder);
1597 }
1598 }
1599 produce_asm (ob, NULL);
1600 destroy_output_block (ob);
1601 }
1602
1603 /* Input optimisation summary of EDGE. */
1604
1605 static void
1606 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1607 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1608 {
1609 }
1610
1611 /* Input optimisation summary of NODE. */
1612
1613 static void
1614 input_node_opt_summary (struct cgraph_node *node,
1615 struct lto_input_block *ib_main,
1616 struct data_in *data_in)
1617 {
1618 int i;
1619 int count;
1620 int bit;
1621 struct bitpack_d bp;
1622 struct cgraph_edge *e;
1623
1624 count = streamer_read_uhwi (ib_main);
1625 if (count)
1626 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1627 for (i = 0; i < count; i++)
1628 {
1629 bit = streamer_read_uhwi (ib_main);
1630 bitmap_set_bit (node->clone.args_to_skip, bit);
1631 }
1632 count = streamer_read_uhwi (ib_main);
1633 if (count)
1634 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1635 for (i = 0; i < count; i++)
1636 {
1637 bit = streamer_read_uhwi (ib_main);
1638 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1639 }
1640 count = streamer_read_uhwi (ib_main);
1641 for (i = 0; i < count; i++)
1642 {
1643 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1644
1645 vec_safe_push (node->clone.tree_map, map);
1646 map->parm_num = streamer_read_uhwi (ib_main);
1647 map->old_tree = NULL;
1648 map->new_tree = stream_read_tree (ib_main, data_in);
1649 bp = streamer_read_bitpack (ib_main);
1650 map->replace_p = bp_unpack_value (&bp, 1);
1651 map->ref_p = bp_unpack_value (&bp, 1);
1652 }
1653 for (e = node->callees; e; e = e->next_callee)
1654 input_edge_opt_summary (e, ib_main);
1655 for (e = node->indirect_calls; e; e = e->next_callee)
1656 input_edge_opt_summary (e, ib_main);
1657 }
1658
1659 /* Read section in file FILE_DATA of length LEN with data DATA. */
1660
1661 static void
1662 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1663 const char *data, size_t len,
1664 vec<symtab_node> nodes)
1665 {
1666 const struct lto_function_header *header =
1667 (const struct lto_function_header *) data;
1668 const int cfg_offset = sizeof (struct lto_function_header);
1669 const int main_offset = cfg_offset + header->cfg_size;
1670 const int string_offset = main_offset + header->main_size;
1671 struct data_in *data_in;
1672 struct lto_input_block ib_main;
1673 unsigned int i;
1674 unsigned int count;
1675
1676 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1677 header->main_size);
1678
1679 data_in =
1680 lto_data_in_create (file_data, (const char *) data + string_offset,
1681 header->string_size, vNULL);
1682 count = streamer_read_uhwi (&ib_main);
1683
1684 for (i = 0; i < count; i++)
1685 {
1686 int ref = streamer_read_uhwi (&ib_main);
1687 input_node_opt_summary (cgraph (nodes[ref]),
1688 &ib_main, data_in);
1689 }
1690 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1691 len);
1692 lto_data_in_delete (data_in);
1693 }
1694
1695 /* Input optimization summary of cgraph. */
1696
1697 static void
1698 input_cgraph_opt_summary (vec<symtab_node> nodes)
1699 {
1700 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1701 struct lto_file_decl_data *file_data;
1702 unsigned int j = 0;
1703
1704 while ((file_data = file_data_vec[j++]))
1705 {
1706 size_t len;
1707 const char *data =
1708 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1709 &len);
1710
1711 if (data)
1712 input_cgraph_opt_section (file_data, data, len, nodes);
1713 }
1714 }