lto-cgraph.c (get_alias_symbol): Remove weakref sanity check.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "timevar.h"
43 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "tree-streamer.h"
47 #include "gcov-io.h"
48 #include "tree-pass.h"
49 #include "profile.h"
50
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node> nodes);
53
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57 /* All node orders are ofsetted by ORDER_BASE. */
58 static int order_base;
59
60 /* Cgraph streaming is organized as set of record whose type
61 is indicated by a tag. */
62 enum LTO_symtab_tags
63 {
64 /* Must leave 0 for the stopper. */
65
66 /* Cgraph node without body available. */
67 LTO_symtab_unavail_node = 1,
68 /* Cgraph node with function body. */
69 LTO_symtab_analyzed_node,
70 /* Cgraph edges. */
71 LTO_symtab_edge,
72 LTO_symtab_indirect_edge,
73 LTO_symtab_variable,
74 LTO_symtab_last_tag
75 };
76
77 /* Create a new symtab encoder.
78 if FOR_INPUT, the encoder allocate only datastructures needed
79 to read the symtab. */
80
81 lto_symtab_encoder_t
82 lto_symtab_encoder_new (bool for_input)
83 {
84 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
85
86 if (!for_input)
87 encoder->map = pointer_map_create ();
88 encoder->nodes.create (0);
89 return encoder;
90 }
91
92
93 /* Delete ENCODER and its components. */
94
95 void
96 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
97 {
98 encoder->nodes.release ();
99 if (encoder->map)
100 pointer_map_destroy (encoder->map);
101 free (encoder);
102 }
103
104
105 /* Return the existing reference number of NODE in the symtab encoder in
106 output block OB. Assign a new reference if this is the first time
107 NODE is encoded. */
108
109 int
110 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
111 symtab_node node)
112 {
113 int ref;
114 void **slot;
115
116 if (!encoder->map)
117 {
118 lto_encoder_entry entry = {node, false, false, false};
119
120 ref = encoder->nodes.length ();
121 encoder->nodes.safe_push (entry);
122 return ref;
123 }
124
125 slot = pointer_map_contains (encoder->map, node);
126 if (!slot || !*slot)
127 {
128 lto_encoder_entry entry = {node, false, false, false};
129 ref = encoder->nodes.length ();
130 if (!slot)
131 slot = pointer_map_insert (encoder->map, node);
132 *slot = (void *) (intptr_t) (ref + 1);
133 encoder->nodes.safe_push (entry);
134 }
135 else
136 ref = (size_t) *slot - 1;
137
138 return ref;
139 }
140
141 /* Remove NODE from encoder. */
142
143 bool
144 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
145 symtab_node node)
146 {
147 void **slot, **last_slot;
148 int index;
149 lto_encoder_entry last_node;
150
151 slot = pointer_map_contains (encoder->map, node);
152 if (slot == NULL || !*slot)
153 return false;
154
155 index = (size_t) *slot - 1;
156 gcc_checking_assert (encoder->nodes[index].node == node);
157
158 /* Remove from vector. We do this by swapping node with the last element
159 of the vector. */
160 last_node = encoder->nodes.pop ();
161 if (last_node.node != node)
162 {
163 last_slot = pointer_map_contains (encoder->map, last_node.node);
164 gcc_checking_assert (last_slot && *last_slot);
165 *last_slot = (void *)(size_t) (index + 1);
166
167 /* Move the last element to the original spot of NODE. */
168 encoder->nodes[index] = last_node;
169 }
170
171 /* Remove element from hash table. */
172 *slot = NULL;
173 return true;
174 }
175
176
177 /* Return TRUE if we should encode initializer of NODE (if any). */
178
179 bool
180 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
181 struct cgraph_node *node)
182 {
183 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
184 return encoder->nodes[index].body;
185 }
186
187 /* Return TRUE if we should encode body of NODE (if any). */
188
189 static void
190 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
191 struct cgraph_node *node)
192 {
193 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
194 gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
195 encoder->nodes[index].body = true;
196 }
197
198 /* Return TRUE if we should encode initializer of NODE (if any). */
199
200 bool
201 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
202 struct varpool_node *node)
203 {
204 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
205 if (index == LCC_NOT_FOUND)
206 return false;
207 return encoder->nodes[index].initializer;
208 }
209
210 /* Return TRUE if we should encode initializer of NODE (if any). */
211
212 static void
213 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
214 struct varpool_node *node)
215 {
216 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
217 encoder->nodes[index].initializer = true;
218 }
219
220 /* Return TRUE if we should encode initializer of NODE (if any). */
221
222 bool
223 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
224 symtab_node node)
225 {
226 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
227 if (index == LCC_NOT_FOUND)
228 return false;
229 return encoder->nodes[index].in_partition;
230 }
231
232 /* Return TRUE if we should encode body of NODE (if any). */
233
234 void
235 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
236 symtab_node node)
237 {
238 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
239 encoder->nodes[index].in_partition = true;
240 }
241
242 /* Output the cgraph EDGE to OB using ENCODER. */
243
244 static void
245 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
246 lto_symtab_encoder_t encoder)
247 {
248 unsigned int uid;
249 intptr_t ref;
250 struct bitpack_d bp;
251
252 if (edge->indirect_unknown_callee)
253 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
254 LTO_symtab_indirect_edge);
255 else
256 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
257 LTO_symtab_edge);
258
259 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->caller);
260 gcc_assert (ref != LCC_NOT_FOUND);
261 streamer_write_hwi_stream (ob->main_stream, ref);
262
263 if (!edge->indirect_unknown_callee)
264 {
265 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->callee);
266 gcc_assert (ref != LCC_NOT_FOUND);
267 streamer_write_hwi_stream (ob->main_stream, ref);
268 }
269
270 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
271
272 bp = bitpack_create (ob->main_stream);
273 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
274 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
275 bp_pack_enum (&bp, cgraph_inline_failed_enum,
276 CIF_N_REASONS, edge->inline_failed);
277 bp_pack_var_len_unsigned (&bp, uid);
278 bp_pack_var_len_unsigned (&bp, edge->frequency);
279 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
280 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
281 bp_pack_value (&bp, edge->can_throw_external, 1);
282 if (edge->indirect_unknown_callee)
283 {
284 int flags = edge->indirect_info->ecf_flags;
285 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
289 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
290 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
291 /* Flags that should not appear on indirect calls. */
292 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
293 | ECF_MAY_BE_ALLOCA
294 | ECF_SIBCALL
295 | ECF_LEAF
296 | ECF_NOVOPS)));
297 }
298 streamer_write_bitpack (&bp);
299 }
300
301 /* Return if LIST contain references from other partitions. */
302
303 bool
304 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
305 {
306 int i;
307 struct ipa_ref *ref;
308 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
309 {
310 if (ref->referring->symbol.in_other_partition
311 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
312 return true;
313 }
314 return false;
315 }
316
317 /* Return true when node is reachable from other partition. */
318
319 bool
320 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
321 {
322 struct cgraph_edge *e;
323 if (!node->symbol.definition)
324 return false;
325 if (node->global.inlined_to)
326 return false;
327 for (e = node->callers; e; e = e->next_caller)
328 if (e->caller->symbol.in_other_partition
329 || !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
330 return true;
331 return false;
332 }
333
334 /* Return if LIST contain references from other partitions. */
335
336 bool
337 referenced_from_this_partition_p (struct ipa_ref_list *list,
338 lto_symtab_encoder_t encoder)
339 {
340 int i;
341 struct ipa_ref *ref;
342 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
343 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
344 return true;
345 return false;
346 }
347
348 /* Return true when node is reachable from other partition. */
349
350 bool
351 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
352 {
353 struct cgraph_edge *e;
354 for (e = node->callers; e; e = e->next_caller)
355 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
356 return true;
357 return false;
358 }
359
360 /* Output the cgraph NODE to OB. ENCODER is used to find the
361 reference number of NODE->inlined_to. SET is the set of nodes we
362 are writing to the current file. If NODE is not in SET, then NODE
363 is a boundary of a cgraph_node_set and we pretend NODE just has a
364 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
365 that have had their callgraph node written so far. This is used to
366 determine if NODE is a clone of a previously written node. */
367
368 static void
369 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
370 lto_symtab_encoder_t encoder)
371 {
372 unsigned int tag;
373 struct bitpack_d bp;
374 bool boundary_p;
375 intptr_t ref;
376 bool in_other_partition = false;
377 struct cgraph_node *clone_of;
378 struct ipa_opt_pass_d *pass;
379 int i;
380 bool alias_p;
381
382 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
383
384 if (node->symbol.analyzed && !boundary_p)
385 tag = LTO_symtab_analyzed_node;
386 else
387 tag = LTO_symtab_unavail_node;
388
389 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
390 tag);
391 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
392
393 /* In WPA mode, we only output part of the call-graph. Also, we
394 fake cgraph node attributes. There are two cases that we care.
395
396 Boundary nodes: There are nodes that are not part of SET but are
397 called from within SET. We artificially make them look like
398 externally visible nodes with no function body.
399
400 Cherry-picked nodes: These are nodes we pulled from other
401 translation units into SET during IPA-inlining. We make them as
402 local static nodes to prevent clashes with other local statics. */
403 if (boundary_p && node->symbol.analyzed && !DECL_EXTERNAL (node->symbol.decl))
404 {
405 /* Inline clones can not be part of boundary.
406 gcc_assert (!node->global.inlined_to);
407
408 FIXME: At the moment they can be, when partition contains an inline
409 clone that is clone of inline clone from outside partition. We can
410 reshape the clone tree and make other tree to be the root, but it
411 needs a bit extra work and will be promplty done by cgraph_remove_node
412 after reading back. */
413 in_other_partition = 1;
414 }
415
416 clone_of = node->clone_of;
417 while (clone_of
418 && (ref = lto_symtab_encoder_lookup (encoder, (symtab_node)clone_of)) == LCC_NOT_FOUND)
419 if (clone_of->prev_sibling_clone)
420 clone_of = clone_of->prev_sibling_clone;
421 else
422 clone_of = clone_of->clone_of;
423
424 if (LTO_symtab_analyzed_node)
425 gcc_assert (clone_of || !node->clone_of);
426 if (!clone_of)
427 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
428 else
429 streamer_write_hwi_stream (ob->main_stream, ref);
430
431
432 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
433 streamer_write_gcov_count_stream (ob->main_stream, node->count);
434 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
435
436 streamer_write_hwi_stream (ob->main_stream,
437 node->ipa_transforms_to_apply.length ());
438 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
439 streamer_write_hwi_stream (ob->main_stream, pass->pass.static_pass_number);
440
441 if (tag == LTO_symtab_analyzed_node)
442 {
443 if (node->global.inlined_to)
444 {
445 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)node->global.inlined_to);
446 gcc_assert (ref != LCC_NOT_FOUND);
447 }
448 else
449 ref = LCC_NOT_FOUND;
450
451 streamer_write_hwi_stream (ob->main_stream, ref);
452 }
453
454 if (node->symbol.same_comdat_group && !boundary_p)
455 {
456 ref = lto_symtab_encoder_lookup (encoder,
457 node->symbol.same_comdat_group);
458 gcc_assert (ref != LCC_NOT_FOUND);
459 }
460 else
461 ref = LCC_NOT_FOUND;
462 streamer_write_hwi_stream (ob->main_stream, ref);
463
464 bp = bitpack_create (ob->main_stream);
465 bp_pack_value (&bp, node->local.local, 1);
466 bp_pack_value (&bp, node->symbol.externally_visible, 1);
467 bp_pack_value (&bp, node->symbol.definition, 1);
468 bp_pack_value (&bp, node->local.versionable, 1);
469 bp_pack_value (&bp, node->local.can_change_signature, 1);
470 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
471 bp_pack_value (&bp, node->symbol.force_output, 1);
472 bp_pack_value (&bp, node->symbol.unique_name, 1);
473 bp_pack_value (&bp, node->symbol.address_taken, 1);
474 bp_pack_value (&bp, node->abstract_and_needed, 1);
475 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
476 && !DECL_EXTERNAL (node->symbol.decl)
477 && !DECL_COMDAT (node->symbol.decl)
478 && (reachable_from_other_partition_p (node, encoder)
479 || referenced_from_other_partition_p (&node->symbol.ref_list,
480 encoder)), 1);
481 bp_pack_value (&bp, node->lowered, 1);
482 bp_pack_value (&bp, in_other_partition, 1);
483 /* Real aliases in a boundary become non-aliases. However we still stream
484 alias info on weakrefs.
485 TODO: We lose a bit of information here - when we know that variable is
486 defined in other unit, we may use the info on aliases to resolve
487 symbol1 != symbol2 type tests that we can do only for locally defined objects
488 otherwise. */
489 alias_p = node->symbol.alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl));
490 bp_pack_value (&bp, alias_p, 1);
491 bp_pack_value (&bp, node->frequency, 2);
492 bp_pack_value (&bp, node->only_called_at_startup, 1);
493 bp_pack_value (&bp, node->only_called_at_exit, 1);
494 bp_pack_value (&bp, node->tm_clone, 1);
495 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
496 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
497 LDPR_NUM_KNOWN, node->symbol.resolution);
498 streamer_write_bitpack (&bp);
499
500 if (node->thunk.thunk_p && !boundary_p)
501 {
502 streamer_write_uhwi_stream
503 (ob->main_stream,
504 1 + (node->thunk.this_adjusting != 0) * 2
505 + (node->thunk.virtual_offset_p != 0) * 4);
506 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
507 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
508 }
509 }
510
511 /* Output the varpool NODE to OB.
512 If NODE is not in SET, then NODE is a boundary. */
513
514 static void
515 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
516 lto_symtab_encoder_t encoder)
517 {
518 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
519 struct bitpack_d bp;
520 int ref;
521 bool alias_p;
522
523 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
524 LTO_symtab_variable);
525 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
526 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
527 bp = bitpack_create (ob->main_stream);
528 bp_pack_value (&bp, node->symbol.externally_visible, 1);
529 bp_pack_value (&bp, node->symbol.force_output, 1);
530 bp_pack_value (&bp, node->symbol.unique_name, 1);
531 bp_pack_value (&bp, node->symbol.definition, 1);
532 alias_p = node->symbol.alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl));
533 bp_pack_value (&bp, alias_p, 1);
534 bp_pack_value (&bp, node->symbol.analyzed && !boundary_p, 1);
535 gcc_assert (node->symbol.definition || !node->symbol.analyzed);
536 /* Constant pool initializers can be de-unified into individual ltrans units.
537 FIXME: Alternatively at -Os we may want to avoid generating for them the local
538 labels and share them across LTRANS partitions. */
539 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
540 && !DECL_EXTERNAL (node->symbol.decl)
541 && !DECL_COMDAT (node->symbol.decl))
542 {
543 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
544 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
545 }
546 else
547 {
548 bp_pack_value (&bp, node->symbol.definition
549 && referenced_from_other_partition_p (&node->symbol.ref_list,
550 encoder), 1);
551 bp_pack_value (&bp, node->symbol.analyzed
552 && boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
553 /* in_other_partition. */
554 }
555 streamer_write_bitpack (&bp);
556 if (node->symbol.same_comdat_group && !boundary_p)
557 {
558 ref = lto_symtab_encoder_lookup (encoder,
559 node->symbol.same_comdat_group);
560 gcc_assert (ref != LCC_NOT_FOUND);
561 }
562 else
563 ref = LCC_NOT_FOUND;
564 streamer_write_hwi_stream (ob->main_stream, ref);
565 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
566 LDPR_NUM_KNOWN, node->symbol.resolution);
567 }
568
569 /* Output the varpool NODE to OB.
570 If NODE is not in SET, then NODE is a boundary. */
571
572 static void
573 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
574 lto_symtab_encoder_t encoder)
575 {
576 struct bitpack_d bp;
577 int nref;
578
579 bp = bitpack_create (ob->main_stream);
580 bp_pack_value (&bp, ref->use, 2);
581 streamer_write_bitpack (&bp);
582 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
583 gcc_assert (nref != LCC_NOT_FOUND);
584 streamer_write_hwi_stream (ob->main_stream, nref);
585 }
586
587 /* Stream out profile_summary to OB. */
588
589 static void
590 output_profile_summary (struct lto_simple_output_block *ob)
591 {
592 unsigned h_ix;
593 struct bitpack_d bp;
594
595 if (profile_info)
596 {
597 /* We do not output num and run_max, they are not used by
598 GCC profile feedback and they are difficult to merge from multiple
599 units. */
600 gcc_assert (profile_info->runs);
601 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
602 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
603
604 /* sum_all is needed for computing the working set with the
605 histogram. */
606 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
607
608 /* Create and output a bitpack of non-zero histogram entries indices. */
609 bp = bitpack_create (ob->main_stream);
610 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
611 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
612 streamer_write_bitpack (&bp);
613 /* Now stream out only those non-zero entries. */
614 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
615 {
616 if (!profile_info->histogram[h_ix].num_counters)
617 continue;
618 streamer_write_gcov_count_stream (ob->main_stream,
619 profile_info->histogram[h_ix].num_counters);
620 streamer_write_gcov_count_stream (ob->main_stream,
621 profile_info->histogram[h_ix].min_value);
622 streamer_write_gcov_count_stream (ob->main_stream,
623 profile_info->histogram[h_ix].cum_value);
624 }
625 /* IPA-profile computes hot bb threshold based on cumulated
626 whole program profile. We need to stream it down to ltrans. */
627 if (flag_wpa)
628 streamer_write_gcov_count_stream (ob->main_stream,
629 get_hot_bb_threshold ());
630 }
631 else
632 streamer_write_uhwi_stream (ob->main_stream, 0);
633 }
634
635 /* Output all callees or indirect outgoing edges. EDGE must be the first such
636 edge. */
637
638 static void
639 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
640 struct lto_simple_output_block *ob,
641 lto_symtab_encoder_t encoder)
642 {
643 if (!edge)
644 return;
645
646 /* Output edges in backward direction, so the reconstructed callgraph match
647 and it is easy to associate call sites in the IPA pass summaries. */
648 while (edge->next_callee)
649 edge = edge->next_callee;
650 for (; edge; edge = edge->prev_callee)
651 lto_output_edge (ob, edge, encoder);
652 }
653
654 /* Output the part of the cgraph in SET. */
655
656 static void
657 output_refs (lto_symtab_encoder_t encoder)
658 {
659 lto_symtab_encoder_iterator lsei;
660 struct lto_simple_output_block *ob;
661 int count;
662 struct ipa_ref *ref;
663 int i;
664
665 ob = lto_create_simple_output_block (LTO_section_refs);
666
667 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
668 lsei_next_in_partition (&lsei))
669 {
670 symtab_node node = lsei_node (lsei);
671
672 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
673 if (count)
674 {
675 streamer_write_uhwi_stream (ob->main_stream, count);
676 streamer_write_uhwi_stream (ob->main_stream,
677 lto_symtab_encoder_lookup (encoder, node));
678 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
679 i, ref); i++)
680 lto_output_ref (ob, ref, encoder);
681 }
682 }
683
684 streamer_write_uhwi_stream (ob->main_stream, 0);
685
686 lto_destroy_simple_output_block (ob);
687 }
688
689 /* Add NODE into encoder as well as nodes it is cloned from.
690 Do it in a way so clones appear first. */
691
692 static void
693 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
694 bool include_body)
695 {
696 if (node->clone_of)
697 add_node_to (encoder, node->clone_of, include_body);
698 else if (include_body)
699 lto_set_symtab_encoder_encode_body (encoder, node);
700 lto_symtab_encoder_encode (encoder, (symtab_node)node);
701 }
702
703 /* Add all references in LIST to encoders. */
704
705 static void
706 add_references (lto_symtab_encoder_t encoder,
707 struct ipa_ref_list *list)
708 {
709 int i;
710 struct ipa_ref *ref;
711 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
712 if (is_a <cgraph_node> (ref->referred))
713 add_node_to (encoder, ipa_ref_node (ref), false);
714 else
715 lto_symtab_encoder_encode (encoder, ref->referred);
716 }
717
718 /* Find all symbols we want to stream into given partition and insert them
719 to encoders.
720
721 The function actually replaces IN_ENCODER by new one. The reason is that
722 streaming code needs clone's origin to be streamed before clone. This
723 means that we need to insert the nodes in specific order. This order is
724 ignored by the partitioning logic earlier. */
725
726 lto_symtab_encoder_t
727 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
728 {
729 struct cgraph_node *node;
730 struct cgraph_edge *edge;
731 int i;
732 lto_symtab_encoder_t encoder;
733 lto_symtab_encoder_iterator lsei;
734
735 encoder = lto_symtab_encoder_new (false);
736
737 /* Go over all entries in the IN_ENCODER and duplicate them to
738 ENCODER. At the same time insert masters of clones so
739 every master appears before clone. */
740 for (lsei = lsei_start_function_in_partition (in_encoder);
741 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
742 {
743 node = lsei_cgraph_node (lsei);
744 add_node_to (encoder, node, true);
745 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)node);
746 add_references (encoder, &node->symbol.ref_list);
747 }
748 for (lsei = lsei_start_variable_in_partition (in_encoder);
749 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
750 {
751 struct varpool_node *vnode = lsei_varpool_node (lsei);
752
753 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)vnode);
754 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
755 add_references (encoder, &vnode->symbol.ref_list);
756 }
757 /* Pickle in also the initializer of all referenced readonly variables
758 to help folding. Constant pool variables are not shared, so we must
759 pickle those too. */
760 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
761 {
762 symtab_node node = lto_symtab_encoder_deref (encoder, i);
763 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
764 {
765 if (DECL_INITIAL (vnode->symbol.decl)
766 && !lto_symtab_encoder_encode_initializer_p (encoder,
767 vnode)
768 && const_value_known_p (vnode->symbol.decl))
769 {
770 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
771 add_references (encoder, &vnode->symbol.ref_list);
772 }
773 }
774 }
775
776 /* Go over all the nodes again to include callees that are not in
777 SET. */
778 for (lsei = lsei_start_function_in_partition (encoder);
779 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
780 {
781 node = lsei_cgraph_node (lsei);
782 for (edge = node->callees; edge; edge = edge->next_callee)
783 {
784 struct cgraph_node *callee = edge->callee;
785 if (!lto_symtab_encoder_in_partition_p (encoder, (symtab_node)callee))
786 {
787 /* We should have moved all the inlines. */
788 gcc_assert (!callee->global.inlined_to);
789 add_node_to (encoder, callee, false);
790 }
791 }
792 }
793 lto_symtab_encoder_delete (in_encoder);
794 return encoder;
795 }
796
797 /* Output the part of the symtab in SET and VSET. */
798
799 void
800 output_symtab (void)
801 {
802 struct cgraph_node *node;
803 struct lto_simple_output_block *ob;
804 lto_symtab_encoder_iterator lsei;
805 int i, n_nodes;
806 lto_symtab_encoder_t encoder;
807 static bool asm_nodes_output = false;
808
809 if (flag_wpa)
810 output_cgraph_opt_summary ();
811
812 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
813
814 output_profile_summary (ob);
815
816 /* An encoder for cgraph nodes should have been created by
817 ipa_write_summaries_1. */
818 gcc_assert (ob->decl_state->symtab_node_encoder);
819 encoder = ob->decl_state->symtab_node_encoder;
820
821 /* Write out the nodes. We must first output a node and then its clones,
822 otherwise at a time reading back the node there would be nothing to clone
823 from. */
824 n_nodes = lto_symtab_encoder_size (encoder);
825 for (i = 0; i < n_nodes; i++)
826 {
827 symtab_node node = lto_symtab_encoder_deref (encoder, i);
828 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
829 lto_output_node (ob, cnode, encoder);
830 else
831 lto_output_varpool_node (ob, varpool (node), encoder);
832
833 }
834
835 /* Go over the nodes in SET again to write edges. */
836 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
837 lsei_next_function_in_partition (&lsei))
838 {
839 node = lsei_cgraph_node (lsei);
840 output_outgoing_cgraph_edges (node->callees, ob, encoder);
841 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
842 }
843
844 streamer_write_uhwi_stream (ob->main_stream, 0);
845
846 lto_destroy_simple_output_block (ob);
847
848 /* Emit toplevel asms.
849 When doing WPA we must output every asm just once. Since we do not partition asm
850 nodes at all, output them to first output. This is kind of hack, but should work
851 well. */
852 if (!asm_nodes_output)
853 {
854 asm_nodes_output = true;
855 lto_output_toplevel_asms ();
856 }
857
858 output_refs (encoder);
859 }
860
861 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
862 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
863 NODE or to replace the values in it, for instance because the first
864 time we saw it, the function body was not available but now it
865 is. BP is a bitpack with all the bitflags for NODE read from the
866 stream. */
867
868 static void
869 input_overwrite_node (struct lto_file_decl_data *file_data,
870 struct cgraph_node *node,
871 enum LTO_symtab_tags tag,
872 struct bitpack_d *bp)
873 {
874 node->symbol.aux = (void *) tag;
875 node->symbol.lto_file_data = file_data;
876
877 node->local.local = bp_unpack_value (bp, 1);
878 node->symbol.externally_visible = bp_unpack_value (bp, 1);
879 node->symbol.definition = bp_unpack_value (bp, 1);
880 node->local.versionable = bp_unpack_value (bp, 1);
881 node->local.can_change_signature = bp_unpack_value (bp, 1);
882 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
883 node->symbol.force_output = bp_unpack_value (bp, 1);
884 node->symbol.unique_name = bp_unpack_value (bp, 1);
885 node->symbol.address_taken = bp_unpack_value (bp, 1);
886 node->abstract_and_needed = bp_unpack_value (bp, 1);
887 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
888 node->lowered = bp_unpack_value (bp, 1);
889 node->symbol.analyzed = tag == LTO_symtab_analyzed_node;
890 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
891 if (node->symbol.in_other_partition
892 /* Avoid updating decl when we are seeing just inline clone.
893 When inlining function that has functions already inlined into it,
894 we produce clones of inline clones.
895
896 WPA partitioning might put each clone into different unit and
897 we might end up streaming inline clone from other partition
898 to support clone we are interested in. */
899 && (!node->clone_of
900 || node->clone_of->symbol.decl != node->symbol.decl))
901 {
902 DECL_EXTERNAL (node->symbol.decl) = 1;
903 TREE_STATIC (node->symbol.decl) = 0;
904 }
905 node->symbol.alias = bp_unpack_value (bp, 1);
906 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
907 node->only_called_at_startup = bp_unpack_value (bp, 1);
908 node->only_called_at_exit = bp_unpack_value (bp, 1);
909 node->tm_clone = bp_unpack_value (bp, 1);
910 node->thunk.thunk_p = bp_unpack_value (bp, 1);
911 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
912 LDPR_NUM_KNOWN);
913 }
914
915 /* Return string alias is alias of. */
916
917 static tree
918 get_alias_symbol (tree decl)
919 {
920 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
921 return get_identifier (TREE_STRING_POINTER
922 (TREE_VALUE (TREE_VALUE (alias))));
923 }
924
925 /* Read a node from input_block IB. TAG is the node's tag just read.
926 Return the node read or overwriten. */
927
928 static struct cgraph_node *
929 input_node (struct lto_file_decl_data *file_data,
930 struct lto_input_block *ib,
931 enum LTO_symtab_tags tag,
932 vec<symtab_node> nodes)
933 {
934 tree fn_decl;
935 struct cgraph_node *node;
936 struct bitpack_d bp;
937 unsigned decl_index;
938 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
939 int clone_ref;
940 int order;
941 int i, count;
942
943 order = streamer_read_hwi (ib) + order_base;
944 clone_ref = streamer_read_hwi (ib);
945
946 decl_index = streamer_read_uhwi (ib);
947 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
948
949 if (clone_ref != LCC_NOT_FOUND)
950 {
951 node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
952 0, CGRAPH_FREQ_BASE, false,
953 vNULL, false);
954 }
955 else
956 node = cgraph_get_create_node (fn_decl);
957
958 node->symbol.order = order;
959 if (order >= symtab_order)
960 symtab_order = order + 1;
961
962 node->count = streamer_read_gcov_count (ib);
963 node->count_materialization_scale = streamer_read_hwi (ib);
964
965 count = streamer_read_hwi (ib);
966 node->ipa_transforms_to_apply = vNULL;
967 for (i = 0; i < count; i++)
968 {
969 struct opt_pass *pass;
970 int pid = streamer_read_hwi (ib);
971
972 gcc_assert (pid < passes_by_id_size);
973 pass = passes_by_id[pid];
974 node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
975 }
976
977 if (tag == LTO_symtab_analyzed_node)
978 ref = streamer_read_hwi (ib);
979
980 ref2 = streamer_read_hwi (ib);
981
982 /* Make sure that we have not read this node before. Nodes that
983 have already been read will have their tag stored in the 'aux'
984 field. Since built-in functions can be referenced in multiple
985 functions, they are expected to be read more than once. */
986 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
987 internal_error ("bytecode stream: found multiple instances of cgraph "
988 "node with uid %d", node->uid);
989
990 bp = streamer_read_bitpack (ib);
991 input_overwrite_node (file_data, node, tag, &bp);
992
993 /* Store a reference for now, and fix up later to be a pointer. */
994 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
995
996 /* Store a reference for now, and fix up later to be a pointer. */
997 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
998
999 if (node->thunk.thunk_p)
1000 {
1001 int type = streamer_read_uhwi (ib);
1002 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1003 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1004
1005 node->thunk.fixed_offset = fixed_offset;
1006 node->thunk.this_adjusting = (type & 2);
1007 node->thunk.virtual_value = virtual_value;
1008 node->thunk.virtual_offset_p = (type & 4);
1009 }
1010 if (node->symbol.alias && !node->symbol.analyzed
1011 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1012 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1013 return node;
1014 }
1015
1016 /* Read a node from input_block IB. TAG is the node's tag just read.
1017 Return the node read or overwriten. */
1018
1019 static struct varpool_node *
1020 input_varpool_node (struct lto_file_decl_data *file_data,
1021 struct lto_input_block *ib)
1022 {
1023 int decl_index;
1024 tree var_decl;
1025 struct varpool_node *node;
1026 struct bitpack_d bp;
1027 int ref = LCC_NOT_FOUND;
1028 int order;
1029
1030 order = streamer_read_hwi (ib) + order_base;
1031 decl_index = streamer_read_uhwi (ib);
1032 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1033 node = varpool_node_for_decl (var_decl);
1034 node->symbol.order = order;
1035 if (order >= symtab_order)
1036 symtab_order = order + 1;
1037 node->symbol.lto_file_data = file_data;
1038
1039 bp = streamer_read_bitpack (ib);
1040 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1041 node->symbol.force_output = bp_unpack_value (&bp, 1);
1042 node->symbol.unique_name = bp_unpack_value (&bp, 1);
1043 node->symbol.definition = bp_unpack_value (&bp, 1);
1044 node->symbol.alias = bp_unpack_value (&bp, 1);
1045 node->symbol.analyzed = bp_unpack_value (&bp, 1);
1046 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1047 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1048 if (node->symbol.in_other_partition)
1049 {
1050 DECL_EXTERNAL (node->symbol.decl) = 1;
1051 TREE_STATIC (node->symbol.decl) = 0;
1052 }
1053 if (node->symbol.alias && !node->symbol.analyzed
1054 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1055 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1056 ref = streamer_read_hwi (ib);
1057 /* Store a reference for now, and fix up later to be a pointer. */
1058 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1059 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1060 LDPR_NUM_KNOWN);
1061
1062 return node;
1063 }
1064
1065 /* Read a node from input_block IB. TAG is the node's tag just read.
1066 Return the node read or overwriten. */
1067
1068 static void
1069 input_ref (struct lto_input_block *ib,
1070 symtab_node referring_node,
1071 vec<symtab_node> nodes)
1072 {
1073 symtab_node node = NULL;
1074 struct bitpack_d bp;
1075 enum ipa_ref_use use;
1076
1077 bp = streamer_read_bitpack (ib);
1078 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1079 node = nodes[streamer_read_hwi (ib)];
1080 ipa_record_reference (referring_node, node, use, NULL);
1081 }
1082
1083 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1084 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1085 edge being read is indirect (in the sense that it has
1086 indirect_unknown_callee set). */
1087
1088 static void
1089 input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
1090 bool indirect)
1091 {
1092 struct cgraph_node *caller, *callee;
1093 struct cgraph_edge *edge;
1094 unsigned int stmt_id;
1095 gcov_type count;
1096 int freq;
1097 cgraph_inline_failed_t inline_failed;
1098 struct bitpack_d bp;
1099 int ecf_flags = 0;
1100
1101 caller = cgraph (nodes[streamer_read_hwi (ib)]);
1102 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1103 internal_error ("bytecode stream: no caller found while reading edge");
1104
1105 if (!indirect)
1106 {
1107 callee = cgraph (nodes[streamer_read_hwi (ib)]);
1108 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1109 internal_error ("bytecode stream: no callee found while reading edge");
1110 }
1111 else
1112 callee = NULL;
1113
1114 count = streamer_read_gcov_count (ib);
1115
1116 bp = streamer_read_bitpack (ib);
1117 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1118 stmt_id = bp_unpack_var_len_unsigned (&bp);
1119 freq = (int) bp_unpack_var_len_unsigned (&bp);
1120
1121 if (indirect)
1122 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1123 else
1124 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1125
1126 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1127 edge->lto_stmt_uid = stmt_id;
1128 edge->inline_failed = inline_failed;
1129 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1130 edge->can_throw_external = bp_unpack_value (&bp, 1);
1131 if (indirect)
1132 {
1133 if (bp_unpack_value (&bp, 1))
1134 ecf_flags |= ECF_CONST;
1135 if (bp_unpack_value (&bp, 1))
1136 ecf_flags |= ECF_PURE;
1137 if (bp_unpack_value (&bp, 1))
1138 ecf_flags |= ECF_NORETURN;
1139 if (bp_unpack_value (&bp, 1))
1140 ecf_flags |= ECF_MALLOC;
1141 if (bp_unpack_value (&bp, 1))
1142 ecf_flags |= ECF_NOTHROW;
1143 if (bp_unpack_value (&bp, 1))
1144 ecf_flags |= ECF_RETURNS_TWICE;
1145 edge->indirect_info->ecf_flags = ecf_flags;
1146 }
1147 }
1148
1149
1150 /* Read a cgraph from IB using the info in FILE_DATA. */
1151
1152 static vec<symtab_node>
1153 input_cgraph_1 (struct lto_file_decl_data *file_data,
1154 struct lto_input_block *ib)
1155 {
1156 enum LTO_symtab_tags tag;
1157 vec<symtab_node> nodes = vNULL;
1158 symtab_node node;
1159 unsigned i;
1160
1161 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1162 order_base = symtab_order;
1163 while (tag)
1164 {
1165 if (tag == LTO_symtab_edge)
1166 input_edge (ib, nodes, false);
1167 else if (tag == LTO_symtab_indirect_edge)
1168 input_edge (ib, nodes, true);
1169 else if (tag == LTO_symtab_variable)
1170 {
1171 node = (symtab_node)input_varpool_node (file_data, ib);
1172 nodes.safe_push (node);
1173 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1174 }
1175 else
1176 {
1177 node = (symtab_node)input_node (file_data, ib, tag, nodes);
1178 if (node == NULL || node->symbol.decl == NULL_TREE)
1179 internal_error ("bytecode stream: found empty cgraph node");
1180 nodes.safe_push (node);
1181 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1182 }
1183
1184 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1185 }
1186
1187 lto_input_toplevel_asms (file_data, order_base);
1188
1189 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1190 #ifdef ENABLE_CHECKING
1191 FOR_EACH_VEC_ELT (nodes, i, node)
1192 gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
1193 #endif
1194 FOR_EACH_VEC_ELT (nodes, i, node)
1195 {
1196 int ref;
1197 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1198 {
1199 ref = (int) (intptr_t) cnode->global.inlined_to;
1200
1201 /* We share declaration of builtins, so we may read same node twice. */
1202 if (!node->symbol.aux)
1203 continue;
1204 node->symbol.aux = NULL;
1205
1206 /* Fixup inlined_to from reference to pointer. */
1207 if (ref != LCC_NOT_FOUND)
1208 cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1209 else
1210 cnode->global.inlined_to = NULL;
1211 }
1212
1213 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1214
1215 /* Fixup same_comdat_group from reference to pointer. */
1216 if (ref != LCC_NOT_FOUND)
1217 node->symbol.same_comdat_group = nodes[ref];
1218 else
1219 node->symbol.same_comdat_group = NULL;
1220 }
1221 FOR_EACH_VEC_ELT (nodes, i, node)
1222 node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1223 return nodes;
1224 }
1225
1226 /* Input ipa_refs. */
1227
1228 static void
1229 input_refs (struct lto_input_block *ib,
1230 vec<symtab_node> nodes)
1231 {
1232 int count;
1233 int idx;
1234 while (true)
1235 {
1236 symtab_node node;
1237 count = streamer_read_uhwi (ib);
1238 if (!count)
1239 break;
1240 idx = streamer_read_uhwi (ib);
1241 node = nodes[idx];
1242 while (count)
1243 {
1244 input_ref (ib, node, nodes);
1245 count--;
1246 }
1247 }
1248 }
1249
1250
1251 static struct gcov_ctr_summary lto_gcov_summary;
1252
1253 /* Input profile_info from IB. */
1254 static void
1255 input_profile_summary (struct lto_input_block *ib,
1256 struct lto_file_decl_data *file_data)
1257 {
1258 unsigned h_ix;
1259 struct bitpack_d bp;
1260 unsigned int runs = streamer_read_uhwi (ib);
1261 if (runs)
1262 {
1263 file_data->profile_info.runs = runs;
1264 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1265 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1266
1267 memset (file_data->profile_info.histogram, 0,
1268 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1269 /* Input the bitpack of non-zero histogram indices. */
1270 bp = streamer_read_bitpack (ib);
1271 /* Read in and unpack the full bitpack, flagging non-zero
1272 histogram entries by setting the num_counters non-zero. */
1273 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1274 {
1275 file_data->profile_info.histogram[h_ix].num_counters
1276 = bp_unpack_value (&bp, 1);
1277 }
1278 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1279 {
1280 if (!file_data->profile_info.histogram[h_ix].num_counters)
1281 continue;
1282
1283 file_data->profile_info.histogram[h_ix].num_counters
1284 = streamer_read_gcov_count (ib);
1285 file_data->profile_info.histogram[h_ix].min_value
1286 = streamer_read_gcov_count (ib);
1287 file_data->profile_info.histogram[h_ix].cum_value
1288 = streamer_read_gcov_count (ib);
1289 }
1290 /* IPA-profile computes hot bb threshold based on cumulated
1291 whole program profile. We need to stream it down to ltrans. */
1292 if (flag_ltrans)
1293 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1294 }
1295
1296 }
1297
1298 /* Rescale profile summaries to the same number of runs in the whole unit. */
1299
1300 static void
1301 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1302 {
1303 struct lto_file_decl_data *file_data;
1304 unsigned int j, h_ix;
1305 gcov_unsigned_t max_runs = 0;
1306 struct cgraph_node *node;
1307 struct cgraph_edge *edge;
1308 gcov_type saved_sum_all = 0;
1309 gcov_ctr_summary *saved_profile_info = 0;
1310 int saved_scale = 0;
1311
1312 /* Find unit with maximal number of runs. If we ever get serious about
1313 roundoff errors, we might also consider computing smallest common
1314 multiply. */
1315 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1316 if (max_runs < file_data->profile_info.runs)
1317 max_runs = file_data->profile_info.runs;
1318
1319 if (!max_runs)
1320 return;
1321
1322 /* Simple overflow check. We probably don't need to support that many train
1323 runs. Such a large value probably imply data corruption anyway. */
1324 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1325 {
1326 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1327 INT_MAX / REG_BR_PROB_BASE);
1328 return;
1329 }
1330
1331 profile_info = &lto_gcov_summary;
1332 lto_gcov_summary.runs = max_runs;
1333 lto_gcov_summary.sum_max = 0;
1334 memset (lto_gcov_summary.histogram, 0,
1335 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1336
1337 /* Rescale all units to the maximal number of runs.
1338 sum_max can not be easily merged, as we have no idea what files come from
1339 the same run. We do not use the info anyway, so leave it 0. */
1340 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1341 if (file_data->profile_info.runs)
1342 {
1343 int scale = GCOV_COMPUTE_SCALE (max_runs,
1344 file_data->profile_info.runs);
1345 lto_gcov_summary.sum_max
1346 = MAX (lto_gcov_summary.sum_max,
1347 apply_scale (file_data->profile_info.sum_max, scale));
1348 lto_gcov_summary.sum_all
1349 = MAX (lto_gcov_summary.sum_all,
1350 apply_scale (file_data->profile_info.sum_all, scale));
1351 /* Save a pointer to the profile_info with the largest
1352 scaled sum_all and the scale for use in merging the
1353 histogram. */
1354 if (!saved_profile_info
1355 || lto_gcov_summary.sum_all > saved_sum_all)
1356 {
1357 saved_profile_info = &file_data->profile_info;
1358 saved_sum_all = lto_gcov_summary.sum_all;
1359 saved_scale = scale;
1360 }
1361 }
1362
1363 gcc_assert (saved_profile_info);
1364
1365 /* Scale up the histogram from the profile that had the largest
1366 scaled sum_all above. */
1367 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1368 {
1369 /* Scale up the min value as we did the corresponding sum_all
1370 above. Use that to find the new histogram index. */
1371 gcov_type scaled_min
1372 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1373 saved_scale);
1374 /* The new index may be shared with another scaled histogram entry,
1375 so we need to account for a non-zero histogram entry at new_ix. */
1376 unsigned new_ix = gcov_histo_index (scaled_min);
1377 lto_gcov_summary.histogram[new_ix].min_value
1378 = (lto_gcov_summary.histogram[new_ix].num_counters
1379 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1380 : scaled_min);
1381 /* Some of the scaled counter values would ostensibly need to be placed
1382 into different (larger) histogram buckets, but we keep things simple
1383 here and place the scaled cumulative counter value in the bucket
1384 corresponding to the scaled minimum counter value. */
1385 lto_gcov_summary.histogram[new_ix].cum_value
1386 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1387 saved_scale);
1388 lto_gcov_summary.histogram[new_ix].num_counters
1389 += saved_profile_info->histogram[h_ix].num_counters;
1390 }
1391
1392 /* Watch roundoff errors. */
1393 if (lto_gcov_summary.sum_max < max_runs)
1394 lto_gcov_summary.sum_max = max_runs;
1395
1396 /* If merging already happent at WPA time, we are done. */
1397 if (flag_ltrans)
1398 return;
1399
1400 /* Now compute count_materialization_scale of each node.
1401 During LTRANS we already have values of count_materialization_scale
1402 computed, so just update them. */
1403 FOR_EACH_FUNCTION (node)
1404 if (node->symbol.lto_file_data
1405 && node->symbol.lto_file_data->profile_info.runs)
1406 {
1407 int scale;
1408
1409 scale = RDIV (node->count_materialization_scale * max_runs,
1410 node->symbol.lto_file_data->profile_info.runs);
1411 node->count_materialization_scale = scale;
1412 if (scale < 0)
1413 fatal_error ("Profile information in %s corrupted",
1414 file_data->file_name);
1415
1416 if (scale == REG_BR_PROB_BASE)
1417 continue;
1418 for (edge = node->callees; edge; edge = edge->next_callee)
1419 edge->count = apply_scale (edge->count, scale);
1420 node->count = apply_scale (node->count, scale);
1421 }
1422 }
1423
1424 /* Input and merge the symtab from each of the .o files passed to
1425 lto1. */
1426
1427 void
1428 input_symtab (void)
1429 {
1430 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1431 struct lto_file_decl_data *file_data;
1432 unsigned int j = 0;
1433 struct cgraph_node *node;
1434
1435 cgraph_state = CGRAPH_STATE_IPA_SSA;
1436
1437 while ((file_data = file_data_vec[j++]))
1438 {
1439 const char *data;
1440 size_t len;
1441 struct lto_input_block *ib;
1442 vec<symtab_node> nodes;
1443
1444 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1445 &data, &len);
1446 if (!ib)
1447 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1448 input_profile_summary (ib, file_data);
1449 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1450 nodes = input_cgraph_1 (file_data, ib);
1451 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1452 ib, data, len);
1453
1454 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1455 &data, &len);
1456 if (!ib)
1457 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1458 input_refs (ib, nodes);
1459 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1460 ib, data, len);
1461 if (flag_ltrans)
1462 input_cgraph_opt_summary (nodes);
1463 nodes.release ();
1464 }
1465
1466 merge_profile_summaries (file_data_vec);
1467 get_working_sets ();
1468
1469
1470 /* Clear out the aux field that was used to store enough state to
1471 tell which nodes should be overwritten. */
1472 FOR_EACH_FUNCTION (node)
1473 {
1474 /* Some nodes may have been created by cgraph_node. This
1475 happens when the callgraph contains nested functions. If the
1476 node for the parent function was never emitted to the gimple
1477 file, cgraph_node will create a node for it when setting the
1478 context of the nested function. */
1479 if (node->symbol.lto_file_data)
1480 node->symbol.aux = NULL;
1481 }
1482 }
1483
1484 /* True when we need optimization summary for NODE. */
1485
1486 static int
1487 output_cgraph_opt_summary_p (struct cgraph_node *node)
1488 {
1489 return (node->clone_of
1490 && (node->clone.tree_map
1491 || node->clone.args_to_skip
1492 || node->clone.combined_args_to_skip));
1493 }
1494
1495 /* Output optimization summary for EDGE to OB. */
1496 static void
1497 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1498 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1499 {
1500 }
1501
1502 /* Output optimization summary for NODE to OB. */
1503
1504 static void
1505 output_node_opt_summary (struct output_block *ob,
1506 struct cgraph_node *node,
1507 lto_symtab_encoder_t encoder)
1508 {
1509 unsigned int index;
1510 bitmap_iterator bi;
1511 struct ipa_replace_map *map;
1512 struct bitpack_d bp;
1513 int i;
1514 struct cgraph_edge *e;
1515
1516 if (node->clone.args_to_skip)
1517 {
1518 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1519 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1520 streamer_write_uhwi (ob, index);
1521 }
1522 else
1523 streamer_write_uhwi (ob, 0);
1524 if (node->clone.combined_args_to_skip)
1525 {
1526 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1527 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1528 streamer_write_uhwi (ob, index);
1529 }
1530 else
1531 streamer_write_uhwi (ob, 0);
1532 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1533 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1534 {
1535 int parm_num;
1536 tree parm;
1537
1538 for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
1539 parm = DECL_CHAIN (parm), parm_num++)
1540 if (map->old_tree == parm)
1541 break;
1542 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1543 mechanism to store function local declarations into summaries. */
1544 gcc_assert (parm);
1545 streamer_write_uhwi (ob, parm_num);
1546 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1547 stream_write_tree (ob, map->new_tree, true);
1548 bp = bitpack_create (ob->main_stream);
1549 bp_pack_value (&bp, map->replace_p, 1);
1550 bp_pack_value (&bp, map->ref_p, 1);
1551 streamer_write_bitpack (&bp);
1552 }
1553
1554 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node) node))
1555 {
1556 for (e = node->callees; e; e = e->next_callee)
1557 output_edge_opt_summary (ob, e);
1558 for (e = node->indirect_calls; e; e = e->next_callee)
1559 output_edge_opt_summary (ob, e);
1560 }
1561 }
1562
1563 /* Output optimization summaries stored in callgraph.
1564 At the moment it is the clone info structure. */
1565
1566 static void
1567 output_cgraph_opt_summary (void)
1568 {
1569 int i, n_nodes;
1570 lto_symtab_encoder_t encoder;
1571 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1572 unsigned count = 0;
1573
1574 ob->cgraph_node = NULL;
1575 encoder = ob->decl_state->symtab_node_encoder;
1576 n_nodes = lto_symtab_encoder_size (encoder);
1577 for (i = 0; i < n_nodes; i++)
1578 {
1579 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1580 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1581 if (cnode && output_cgraph_opt_summary_p (cnode))
1582 count++;
1583 }
1584 streamer_write_uhwi (ob, count);
1585 for (i = 0; i < n_nodes; i++)
1586 {
1587 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1588 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1589 if (cnode && output_cgraph_opt_summary_p (cnode))
1590 {
1591 streamer_write_uhwi (ob, i);
1592 output_node_opt_summary (ob, cnode, encoder);
1593 }
1594 }
1595 produce_asm (ob, NULL);
1596 destroy_output_block (ob);
1597 }
1598
1599 /* Input optimisation summary of EDGE. */
1600
1601 static void
1602 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1603 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1604 {
1605 }
1606
1607 /* Input optimisation summary of NODE. */
1608
1609 static void
1610 input_node_opt_summary (struct cgraph_node *node,
1611 struct lto_input_block *ib_main,
1612 struct data_in *data_in)
1613 {
1614 int i;
1615 int count;
1616 int bit;
1617 struct bitpack_d bp;
1618 struct cgraph_edge *e;
1619
1620 count = streamer_read_uhwi (ib_main);
1621 if (count)
1622 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1623 for (i = 0; i < count; i++)
1624 {
1625 bit = streamer_read_uhwi (ib_main);
1626 bitmap_set_bit (node->clone.args_to_skip, bit);
1627 }
1628 count = streamer_read_uhwi (ib_main);
1629 if (count)
1630 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1631 for (i = 0; i < count; i++)
1632 {
1633 bit = streamer_read_uhwi (ib_main);
1634 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1635 }
1636 count = streamer_read_uhwi (ib_main);
1637 for (i = 0; i < count; i++)
1638 {
1639 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1640
1641 vec_safe_push (node->clone.tree_map, map);
1642 map->parm_num = streamer_read_uhwi (ib_main);
1643 map->old_tree = NULL;
1644 map->new_tree = stream_read_tree (ib_main, data_in);
1645 bp = streamer_read_bitpack (ib_main);
1646 map->replace_p = bp_unpack_value (&bp, 1);
1647 map->ref_p = bp_unpack_value (&bp, 1);
1648 }
1649 for (e = node->callees; e; e = e->next_callee)
1650 input_edge_opt_summary (e, ib_main);
1651 for (e = node->indirect_calls; e; e = e->next_callee)
1652 input_edge_opt_summary (e, ib_main);
1653 }
1654
1655 /* Read section in file FILE_DATA of length LEN with data DATA. */
1656
1657 static void
1658 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1659 const char *data, size_t len,
1660 vec<symtab_node> nodes)
1661 {
1662 const struct lto_function_header *header =
1663 (const struct lto_function_header *) data;
1664 const int cfg_offset = sizeof (struct lto_function_header);
1665 const int main_offset = cfg_offset + header->cfg_size;
1666 const int string_offset = main_offset + header->main_size;
1667 struct data_in *data_in;
1668 struct lto_input_block ib_main;
1669 unsigned int i;
1670 unsigned int count;
1671
1672 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1673 header->main_size);
1674
1675 data_in =
1676 lto_data_in_create (file_data, (const char *) data + string_offset,
1677 header->string_size, vNULL);
1678 count = streamer_read_uhwi (&ib_main);
1679
1680 for (i = 0; i < count; i++)
1681 {
1682 int ref = streamer_read_uhwi (&ib_main);
1683 input_node_opt_summary (cgraph (nodes[ref]),
1684 &ib_main, data_in);
1685 }
1686 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1687 len);
1688 lto_data_in_delete (data_in);
1689 }
1690
1691 /* Input optimization summary of cgraph. */
1692
1693 static void
1694 input_cgraph_opt_summary (vec<symtab_node> nodes)
1695 {
1696 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1697 struct lto_file_decl_data *file_data;
1698 unsigned int j = 0;
1699
1700 while ((file_data = file_data_vec[j++]))
1701 {
1702 size_t len;
1703 const char *data =
1704 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1705 &len);
1706
1707 if (data)
1708 input_cgraph_opt_section (file_data, data, len, nodes);
1709 }
1710 }