tree-ssa.h: Remove all #include's
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "bitmap.h"
36 #include "function.h"
37 #include "ggc.h"
38 #include "diagnostic-core.h"
39 #include "except.h"
40 #include "vec.h"
41 #include "timevar.h"
42 #include "pointer-set.h"
43 #include "lto-streamer.h"
44 #include "data-streamer.h"
45 #include "tree-streamer.h"
46 #include "gcov-io.h"
47 #include "tree-pass.h"
48 #include "profile.h"
49 #include "context.h"
50 #include "pass_manager.h"
51 #include "ipa-utils.h"
52
53 static void output_cgraph_opt_summary (void);
54 static void input_cgraph_opt_summary (vec<symtab_node> nodes);
55
56 /* Number of LDPR values known to GCC. */
57 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
58
59 /* All node orders are ofsetted by ORDER_BASE. */
60 static int order_base;
61
62 /* Cgraph streaming is organized as set of record whose type
63 is indicated by a tag. */
64 enum LTO_symtab_tags
65 {
66 /* Must leave 0 for the stopper. */
67
68 /* Cgraph node without body available. */
69 LTO_symtab_unavail_node = 1,
70 /* Cgraph node with function body. */
71 LTO_symtab_analyzed_node,
72 /* Cgraph edges. */
73 LTO_symtab_edge,
74 LTO_symtab_indirect_edge,
75 LTO_symtab_variable,
76 LTO_symtab_last_tag
77 };
78
79 /* Create a new symtab encoder.
80 if FOR_INPUT, the encoder allocate only datastructures needed
81 to read the symtab. */
82
83 lto_symtab_encoder_t
84 lto_symtab_encoder_new (bool for_input)
85 {
86 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
87
88 if (!for_input)
89 encoder->map = pointer_map_create ();
90 encoder->nodes.create (0);
91 return encoder;
92 }
93
94
95 /* Delete ENCODER and its components. */
96
97 void
98 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
99 {
100 encoder->nodes.release ();
101 if (encoder->map)
102 pointer_map_destroy (encoder->map);
103 free (encoder);
104 }
105
106
107 /* Return the existing reference number of NODE in the symtab encoder in
108 output block OB. Assign a new reference if this is the first time
109 NODE is encoded. */
110
111 int
112 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
113 symtab_node node)
114 {
115 int ref;
116 void **slot;
117
118 if (!encoder->map)
119 {
120 lto_encoder_entry entry = {node, false, false, false};
121
122 ref = encoder->nodes.length ();
123 encoder->nodes.safe_push (entry);
124 return ref;
125 }
126
127 slot = pointer_map_contains (encoder->map, node);
128 if (!slot || !*slot)
129 {
130 lto_encoder_entry entry = {node, false, false, false};
131 ref = encoder->nodes.length ();
132 if (!slot)
133 slot = pointer_map_insert (encoder->map, node);
134 *slot = (void *) (intptr_t) (ref + 1);
135 encoder->nodes.safe_push (entry);
136 }
137 else
138 ref = (size_t) *slot - 1;
139
140 return ref;
141 }
142
143 /* Remove NODE from encoder. */
144
145 bool
146 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
147 symtab_node node)
148 {
149 void **slot, **last_slot;
150 int index;
151 lto_encoder_entry last_node;
152
153 slot = pointer_map_contains (encoder->map, node);
154 if (slot == NULL || !*slot)
155 return false;
156
157 index = (size_t) *slot - 1;
158 gcc_checking_assert (encoder->nodes[index].node == node);
159
160 /* Remove from vector. We do this by swapping node with the last element
161 of the vector. */
162 last_node = encoder->nodes.pop ();
163 if (last_node.node != node)
164 {
165 last_slot = pointer_map_contains (encoder->map, last_node.node);
166 gcc_checking_assert (last_slot && *last_slot);
167 *last_slot = (void *)(size_t) (index + 1);
168
169 /* Move the last element to the original spot of NODE. */
170 encoder->nodes[index] = last_node;
171 }
172
173 /* Remove element from hash table. */
174 *slot = NULL;
175 return true;
176 }
177
178
179 /* Return TRUE if we should encode initializer of NODE (if any). */
180
181 bool
182 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
183 struct cgraph_node *node)
184 {
185 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
186 return encoder->nodes[index].body;
187 }
188
189 /* Return TRUE if we should encode body of NODE (if any). */
190
191 static void
192 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
193 struct cgraph_node *node)
194 {
195 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
196 gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
197 encoder->nodes[index].body = true;
198 }
199
200 /* Return TRUE if we should encode initializer of NODE (if any). */
201
202 bool
203 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
204 struct varpool_node *node)
205 {
206 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
207 if (index == LCC_NOT_FOUND)
208 return false;
209 return encoder->nodes[index].initializer;
210 }
211
212 /* Return TRUE if we should encode initializer of NODE (if any). */
213
214 static void
215 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
216 struct varpool_node *node)
217 {
218 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
219 encoder->nodes[index].initializer = true;
220 }
221
222 /* Return TRUE if we should encode initializer of NODE (if any). */
223
224 bool
225 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
226 symtab_node node)
227 {
228 int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
229 if (index == LCC_NOT_FOUND)
230 return false;
231 return encoder->nodes[index].in_partition;
232 }
233
234 /* Return TRUE if we should encode body of NODE (if any). */
235
236 void
237 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
238 symtab_node node)
239 {
240 int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
241 encoder->nodes[index].in_partition = true;
242 }
243
244 /* Output the cgraph EDGE to OB using ENCODER. */
245
246 static void
247 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
248 lto_symtab_encoder_t encoder)
249 {
250 unsigned int uid;
251 intptr_t ref;
252 struct bitpack_d bp;
253
254 if (edge->indirect_unknown_callee)
255 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
256 LTO_symtab_indirect_edge);
257 else
258 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
259 LTO_symtab_edge);
260
261 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->caller);
262 gcc_assert (ref != LCC_NOT_FOUND);
263 streamer_write_hwi_stream (ob->main_stream, ref);
264
265 if (!edge->indirect_unknown_callee)
266 {
267 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)edge->callee);
268 gcc_assert (ref != LCC_NOT_FOUND);
269 streamer_write_hwi_stream (ob->main_stream, ref);
270 }
271
272 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
273
274 bp = bitpack_create (ob->main_stream);
275 uid = (!gimple_has_body_p (edge->caller->symbol.decl)
276 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
277 bp_pack_enum (&bp, cgraph_inline_failed_enum,
278 CIF_N_REASONS, edge->inline_failed);
279 bp_pack_var_len_unsigned (&bp, uid);
280 bp_pack_var_len_unsigned (&bp, edge->frequency);
281 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
282 bp_pack_value (&bp, edge->speculative, 1);
283 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
284 bp_pack_value (&bp, edge->can_throw_external, 1);
285 if (edge->indirect_unknown_callee)
286 {
287 int flags = edge->indirect_info->ecf_flags;
288 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
289 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
290 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
291 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
292 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
293 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
294 /* Flags that should not appear on indirect calls. */
295 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
296 | ECF_MAY_BE_ALLOCA
297 | ECF_SIBCALL
298 | ECF_LEAF
299 | ECF_NOVOPS)));
300 }
301 streamer_write_bitpack (&bp);
302 if (edge->indirect_unknown_callee)
303 {
304 streamer_write_hwi_stream (ob->main_stream,
305 edge->indirect_info->common_target_id);
306 if (edge->indirect_info->common_target_id)
307 streamer_write_hwi_stream
308 (ob->main_stream, edge->indirect_info->common_target_probability);
309 }
310 }
311
312 /* Return if LIST contain references from other partitions. */
313
314 bool
315 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
316 {
317 int i;
318 struct ipa_ref *ref;
319 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
320 {
321 if (ref->referring->symbol.in_other_partition
322 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
323 return true;
324 }
325 return false;
326 }
327
328 /* Return true when node is reachable from other partition. */
329
330 bool
331 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
332 {
333 struct cgraph_edge *e;
334 if (!node->symbol.definition)
335 return false;
336 if (node->global.inlined_to)
337 return false;
338 for (e = node->callers; e; e = e->next_caller)
339 if (e->caller->symbol.in_other_partition
340 || !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
341 return true;
342 return false;
343 }
344
345 /* Return if LIST contain references from other partitions. */
346
347 bool
348 referenced_from_this_partition_p (struct ipa_ref_list *list,
349 lto_symtab_encoder_t encoder)
350 {
351 int i;
352 struct ipa_ref *ref;
353 for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
354 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
355 return true;
356 return false;
357 }
358
359 /* Return true when node is reachable from other partition. */
360
361 bool
362 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
363 {
364 struct cgraph_edge *e;
365 for (e = node->callers; e; e = e->next_caller)
366 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node)e->caller))
367 return true;
368 return false;
369 }
370
371 /* Output the cgraph NODE to OB. ENCODER is used to find the
372 reference number of NODE->inlined_to. SET is the set of nodes we
373 are writing to the current file. If NODE is not in SET, then NODE
374 is a boundary of a cgraph_node_set and we pretend NODE just has a
375 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
376 that have had their callgraph node written so far. This is used to
377 determine if NODE is a clone of a previously written node. */
378
379 static void
380 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
381 lto_symtab_encoder_t encoder)
382 {
383 unsigned int tag;
384 struct bitpack_d bp;
385 bool boundary_p;
386 intptr_t ref;
387 bool in_other_partition = false;
388 struct cgraph_node *clone_of, *ultimate_clone_of;
389 struct ipa_opt_pass_d *pass;
390 int i;
391 bool alias_p;
392
393 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
394
395 if (node->symbol.analyzed && !boundary_p)
396 tag = LTO_symtab_analyzed_node;
397 else
398 tag = LTO_symtab_unavail_node;
399
400 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
401 tag);
402 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
403
404 /* In WPA mode, we only output part of the call-graph. Also, we
405 fake cgraph node attributes. There are two cases that we care.
406
407 Boundary nodes: There are nodes that are not part of SET but are
408 called from within SET. We artificially make them look like
409 externally visible nodes with no function body.
410
411 Cherry-picked nodes: These are nodes we pulled from other
412 translation units into SET during IPA-inlining. We make them as
413 local static nodes to prevent clashes with other local statics. */
414 if (boundary_p && node->symbol.analyzed && !DECL_EXTERNAL (node->symbol.decl))
415 {
416 /* Inline clones can not be part of boundary.
417 gcc_assert (!node->global.inlined_to);
418
419 FIXME: At the moment they can be, when partition contains an inline
420 clone that is clone of inline clone from outside partition. We can
421 reshape the clone tree and make other tree to be the root, but it
422 needs a bit extra work and will be promplty done by cgraph_remove_node
423 after reading back. */
424 in_other_partition = 1;
425 }
426
427 clone_of = node->clone_of;
428 while (clone_of
429 && (ref = lto_symtab_encoder_lookup (encoder, (symtab_node)clone_of)) == LCC_NOT_FOUND)
430 if (clone_of->prev_sibling_clone)
431 clone_of = clone_of->prev_sibling_clone;
432 else
433 clone_of = clone_of->clone_of;
434
435 /* See if body of the master function is output. If not, we are seeing only
436 an declaration and we do not need to pass down clone tree. */
437 ultimate_clone_of = clone_of;
438 while (ultimate_clone_of && ultimate_clone_of->clone_of)
439 ultimate_clone_of = ultimate_clone_of->clone_of;
440
441 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
442 clone_of = NULL;
443
444 if (tag == LTO_symtab_analyzed_node)
445 gcc_assert (clone_of || !node->clone_of);
446 if (!clone_of)
447 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
448 else
449 streamer_write_hwi_stream (ob->main_stream, ref);
450
451
452 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
453 streamer_write_gcov_count_stream (ob->main_stream, node->count);
454 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
455
456 streamer_write_hwi_stream (ob->main_stream,
457 node->ipa_transforms_to_apply.length ());
458 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
459 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
460
461 if (tag == LTO_symtab_analyzed_node)
462 {
463 if (node->global.inlined_to)
464 {
465 ref = lto_symtab_encoder_lookup (encoder, (symtab_node)node->global.inlined_to);
466 gcc_assert (ref != LCC_NOT_FOUND);
467 }
468 else
469 ref = LCC_NOT_FOUND;
470
471 streamer_write_hwi_stream (ob->main_stream, ref);
472 }
473
474 if (node->symbol.same_comdat_group && !boundary_p)
475 {
476 ref = lto_symtab_encoder_lookup (encoder,
477 node->symbol.same_comdat_group);
478 gcc_assert (ref != LCC_NOT_FOUND);
479 }
480 else
481 ref = LCC_NOT_FOUND;
482 streamer_write_hwi_stream (ob->main_stream, ref);
483
484 bp = bitpack_create (ob->main_stream);
485 bp_pack_value (&bp, node->local.local, 1);
486 bp_pack_value (&bp, node->symbol.externally_visible, 1);
487 bp_pack_value (&bp, node->symbol.definition, 1);
488 bp_pack_value (&bp, node->local.versionable, 1);
489 bp_pack_value (&bp, node->local.can_change_signature, 1);
490 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
491 bp_pack_value (&bp, node->symbol.force_output, 1);
492 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
493 bp_pack_value (&bp, node->symbol.unique_name, 1);
494 bp_pack_value (&bp, node->symbol.address_taken, 1);
495 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
496 && !DECL_EXTERNAL (node->symbol.decl)
497 && !DECL_COMDAT (node->symbol.decl)
498 && (reachable_from_other_partition_p (node, encoder)
499 || referenced_from_other_partition_p (&node->symbol.ref_list,
500 encoder)), 1);
501 bp_pack_value (&bp, node->lowered, 1);
502 bp_pack_value (&bp, in_other_partition, 1);
503 /* Real aliases in a boundary become non-aliases. However we still stream
504 alias info on weakrefs.
505 TODO: We lose a bit of information here - when we know that variable is
506 defined in other unit, we may use the info on aliases to resolve
507 symbol1 != symbol2 type tests that we can do only for locally defined objects
508 otherwise. */
509 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
510 bp_pack_value (&bp, alias_p, 1);
511 bp_pack_value (&bp, node->symbol.weakref, 1);
512 bp_pack_value (&bp, node->frequency, 2);
513 bp_pack_value (&bp, node->only_called_at_startup, 1);
514 bp_pack_value (&bp, node->only_called_at_exit, 1);
515 bp_pack_value (&bp, node->tm_clone, 1);
516 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
517 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
518 LDPR_NUM_KNOWN, node->symbol.resolution);
519 streamer_write_bitpack (&bp);
520
521 if (node->thunk.thunk_p && !boundary_p)
522 {
523 streamer_write_uhwi_stream
524 (ob->main_stream,
525 1 + (node->thunk.this_adjusting != 0) * 2
526 + (node->thunk.virtual_offset_p != 0) * 4);
527 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
528 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
529 }
530 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
531 }
532
533 /* Output the varpool NODE to OB.
534 If NODE is not in SET, then NODE is a boundary. */
535
536 static void
537 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
538 lto_symtab_encoder_t encoder)
539 {
540 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, (symtab_node)node);
541 struct bitpack_d bp;
542 int ref;
543 bool alias_p;
544
545 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
546 LTO_symtab_variable);
547 streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
548 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
549 bp = bitpack_create (ob->main_stream);
550 bp_pack_value (&bp, node->symbol.externally_visible, 1);
551 bp_pack_value (&bp, node->symbol.force_output, 1);
552 bp_pack_value (&bp, node->symbol.forced_by_abi, 1);
553 bp_pack_value (&bp, node->symbol.unique_name, 1);
554 bp_pack_value (&bp, node->symbol.definition, 1);
555 alias_p = node->symbol.alias && (!boundary_p || node->symbol.weakref);
556 bp_pack_value (&bp, alias_p, 1);
557 bp_pack_value (&bp, node->symbol.weakref, 1);
558 bp_pack_value (&bp, node->symbol.analyzed && !boundary_p, 1);
559 gcc_assert (node->symbol.definition || !node->symbol.analyzed);
560 /* Constant pool initializers can be de-unified into individual ltrans units.
561 FIXME: Alternatively at -Os we may want to avoid generating for them the local
562 labels and share them across LTRANS partitions. */
563 if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
564 && !DECL_EXTERNAL (node->symbol.decl)
565 && !DECL_COMDAT (node->symbol.decl))
566 {
567 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
568 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
569 }
570 else
571 {
572 bp_pack_value (&bp, node->symbol.definition
573 && referenced_from_other_partition_p (&node->symbol.ref_list,
574 encoder), 1);
575 bp_pack_value (&bp, node->symbol.analyzed
576 && boundary_p && !DECL_EXTERNAL (node->symbol.decl), 1);
577 /* in_other_partition. */
578 }
579 streamer_write_bitpack (&bp);
580 if (node->symbol.same_comdat_group && !boundary_p)
581 {
582 ref = lto_symtab_encoder_lookup (encoder,
583 node->symbol.same_comdat_group);
584 gcc_assert (ref != LCC_NOT_FOUND);
585 }
586 else
587 ref = LCC_NOT_FOUND;
588 streamer_write_hwi_stream (ob->main_stream, ref);
589 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
590 LDPR_NUM_KNOWN, node->symbol.resolution);
591 }
592
593 /* Output the varpool NODE to OB.
594 If NODE is not in SET, then NODE is a boundary. */
595
596 static void
597 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
598 lto_symtab_encoder_t encoder)
599 {
600 struct bitpack_d bp;
601 int nref;
602 int uid = ref->lto_stmt_uid;
603 struct cgraph_node *node;
604
605 bp = bitpack_create (ob->main_stream);
606 bp_pack_value (&bp, ref->use, 2);
607 bp_pack_value (&bp, ref->speculative, 1);
608 streamer_write_bitpack (&bp);
609 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
610 gcc_assert (nref != LCC_NOT_FOUND);
611 streamer_write_hwi_stream (ob->main_stream, nref);
612
613 node = dyn_cast <cgraph_node> (ref->referring);
614 if (node)
615 {
616 if (ref->stmt)
617 uid = gimple_uid (ref->stmt) + 1;
618 streamer_write_hwi_stream (ob->main_stream, uid);
619 }
620 }
621
622 /* Stream out profile_summary to OB. */
623
624 static void
625 output_profile_summary (struct lto_simple_output_block *ob)
626 {
627 unsigned h_ix;
628 struct bitpack_d bp;
629
630 if (profile_info)
631 {
632 /* We do not output num and run_max, they are not used by
633 GCC profile feedback and they are difficult to merge from multiple
634 units. */
635 gcc_assert (profile_info->runs);
636 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
637 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
638
639 /* sum_all is needed for computing the working set with the
640 histogram. */
641 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
642
643 /* Create and output a bitpack of non-zero histogram entries indices. */
644 bp = bitpack_create (ob->main_stream);
645 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
646 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
647 streamer_write_bitpack (&bp);
648 /* Now stream out only those non-zero entries. */
649 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
650 {
651 if (!profile_info->histogram[h_ix].num_counters)
652 continue;
653 streamer_write_gcov_count_stream (ob->main_stream,
654 profile_info->histogram[h_ix].num_counters);
655 streamer_write_gcov_count_stream (ob->main_stream,
656 profile_info->histogram[h_ix].min_value);
657 streamer_write_gcov_count_stream (ob->main_stream,
658 profile_info->histogram[h_ix].cum_value);
659 }
660 /* IPA-profile computes hot bb threshold based on cumulated
661 whole program profile. We need to stream it down to ltrans. */
662 if (flag_wpa)
663 streamer_write_gcov_count_stream (ob->main_stream,
664 get_hot_bb_threshold ());
665 }
666 else
667 streamer_write_uhwi_stream (ob->main_stream, 0);
668 }
669
670 /* Output all callees or indirect outgoing edges. EDGE must be the first such
671 edge. */
672
673 static void
674 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
675 struct lto_simple_output_block *ob,
676 lto_symtab_encoder_t encoder)
677 {
678 if (!edge)
679 return;
680
681 /* Output edges in backward direction, so the reconstructed callgraph match
682 and it is easy to associate call sites in the IPA pass summaries. */
683 while (edge->next_callee)
684 edge = edge->next_callee;
685 for (; edge; edge = edge->prev_callee)
686 lto_output_edge (ob, edge, encoder);
687 }
688
689 /* Output the part of the cgraph in SET. */
690
691 static void
692 output_refs (lto_symtab_encoder_t encoder)
693 {
694 lto_symtab_encoder_iterator lsei;
695 struct lto_simple_output_block *ob;
696 int count;
697 struct ipa_ref *ref;
698 int i;
699
700 ob = lto_create_simple_output_block (LTO_section_refs);
701
702 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
703 lsei_next_in_partition (&lsei))
704 {
705 symtab_node node = lsei_node (lsei);
706
707 count = ipa_ref_list_nreferences (&node->symbol.ref_list);
708 if (count)
709 {
710 streamer_write_gcov_count_stream (ob->main_stream, count);
711 streamer_write_uhwi_stream (ob->main_stream,
712 lto_symtab_encoder_lookup (encoder, node));
713 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
714 i, ref); i++)
715 lto_output_ref (ob, ref, encoder);
716 }
717 }
718
719 streamer_write_uhwi_stream (ob->main_stream, 0);
720
721 lto_destroy_simple_output_block (ob);
722 }
723
724 /* Add NODE into encoder as well as nodes it is cloned from.
725 Do it in a way so clones appear first. */
726
727 static void
728 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
729 bool include_body)
730 {
731 if (node->clone_of)
732 add_node_to (encoder, node->clone_of, include_body);
733 else if (include_body)
734 lto_set_symtab_encoder_encode_body (encoder, node);
735 lto_symtab_encoder_encode (encoder, (symtab_node)node);
736 }
737
738 /* Add all references in LIST to encoders. */
739
740 static void
741 add_references (lto_symtab_encoder_t encoder,
742 struct ipa_ref_list *list)
743 {
744 int i;
745 struct ipa_ref *ref;
746 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
747 if (is_a <cgraph_node> (ref->referred))
748 add_node_to (encoder, ipa_ref_node (ref), false);
749 else
750 lto_symtab_encoder_encode (encoder, ref->referred);
751 }
752
753 /* Find all symbols we want to stream into given partition and insert them
754 to encoders.
755
756 The function actually replaces IN_ENCODER by new one. The reason is that
757 streaming code needs clone's origin to be streamed before clone. This
758 means that we need to insert the nodes in specific order. This order is
759 ignored by the partitioning logic earlier. */
760
761 lto_symtab_encoder_t
762 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
763 {
764 struct cgraph_node *node;
765 struct cgraph_edge *edge;
766 int i;
767 lto_symtab_encoder_t encoder;
768 lto_symtab_encoder_iterator lsei;
769 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
770
771 encoder = lto_symtab_encoder_new (false);
772
773 /* Go over all entries in the IN_ENCODER and duplicate them to
774 ENCODER. At the same time insert masters of clones so
775 every master appears before clone. */
776 for (lsei = lsei_start_function_in_partition (in_encoder);
777 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
778 {
779 node = lsei_cgraph_node (lsei);
780 add_node_to (encoder, node, true);
781 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)node);
782 add_references (encoder, &node->symbol.ref_list);
783 /* For proper debug info, we need to ship the origins, too. */
784 if (DECL_ABSTRACT_ORIGIN (node->symbol.decl))
785 {
786 struct cgraph_node *origin_node
787 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
788 add_node_to (encoder, origin_node, true);
789 }
790 }
791 for (lsei = lsei_start_variable_in_partition (in_encoder);
792 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
793 {
794 struct varpool_node *vnode = lsei_varpool_node (lsei);
795
796 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)vnode);
797 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
798 add_references (encoder, &vnode->symbol.ref_list);
799 /* For proper debug info, we need to ship the origins, too. */
800 if (DECL_ABSTRACT_ORIGIN (vnode->symbol.decl))
801 {
802 struct varpool_node *origin_node
803 = varpool_get_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
804 lto_set_symtab_encoder_in_partition (encoder, (symtab_node)origin_node);
805 }
806 }
807 /* Pickle in also the initializer of all referenced readonly variables
808 to help folding. Constant pool variables are not shared, so we must
809 pickle those too. */
810 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
811 {
812 symtab_node node = lto_symtab_encoder_deref (encoder, i);
813 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
814 {
815 if (!lto_symtab_encoder_encode_initializer_p (encoder,
816 vnode)
817 && ctor_for_folding (vnode->symbol.decl) != error_mark_node)
818 {
819 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
820 add_references (encoder, &vnode->symbol.ref_list);
821 }
822 }
823 }
824
825 /* Go over all the nodes again to include callees that are not in
826 SET. */
827 for (lsei = lsei_start_function_in_partition (encoder);
828 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
829 {
830 node = lsei_cgraph_node (lsei);
831 for (edge = node->callees; edge; edge = edge->next_callee)
832 {
833 struct cgraph_node *callee = edge->callee;
834 if (!lto_symtab_encoder_in_partition_p (encoder, (symtab_node)callee))
835 {
836 /* We should have moved all the inlines. */
837 gcc_assert (!callee->global.inlined_to);
838 add_node_to (encoder, callee, false);
839 }
840 }
841 /* Add all possible targets for late devirtualization. */
842 if (flag_devirtualize)
843 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
844 if (edge->indirect_info->polymorphic)
845 {
846 unsigned int i;
847 void *cache_token;
848 bool final;
849 vec <cgraph_node *>targets
850 = possible_polymorphic_call_targets
851 (edge, &final, &cache_token);
852 if (!pointer_set_insert (reachable_call_targets,
853 cache_token))
854 {
855 for (i = 0; i < targets.length (); i++)
856 {
857 struct cgraph_node *callee = targets[i];
858
859 /* Adding an external declarations into the unit serves
860 no purpose and just increases its boundary. */
861 if (callee->symbol.definition
862 && !lto_symtab_encoder_in_partition_p
863 (encoder, (symtab_node)callee))
864 {
865 gcc_assert (!callee->global.inlined_to);
866 add_node_to (encoder, callee, false);
867 }
868 }
869 }
870 }
871 }
872 lto_symtab_encoder_delete (in_encoder);
873 pointer_set_destroy (reachable_call_targets);
874 return encoder;
875 }
876
877 /* Output the part of the symtab in SET and VSET. */
878
879 void
880 output_symtab (void)
881 {
882 struct cgraph_node *node;
883 struct lto_simple_output_block *ob;
884 lto_symtab_encoder_iterator lsei;
885 int i, n_nodes;
886 lto_symtab_encoder_t encoder;
887 static bool asm_nodes_output = false;
888
889 if (flag_wpa)
890 output_cgraph_opt_summary ();
891
892 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
893
894 output_profile_summary (ob);
895
896 /* An encoder for cgraph nodes should have been created by
897 ipa_write_summaries_1. */
898 gcc_assert (ob->decl_state->symtab_node_encoder);
899 encoder = ob->decl_state->symtab_node_encoder;
900
901 /* Write out the nodes. We must first output a node and then its clones,
902 otherwise at a time reading back the node there would be nothing to clone
903 from. */
904 n_nodes = lto_symtab_encoder_size (encoder);
905 for (i = 0; i < n_nodes; i++)
906 {
907 symtab_node node = lto_symtab_encoder_deref (encoder, i);
908 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
909 lto_output_node (ob, cnode, encoder);
910 else
911 lto_output_varpool_node (ob, varpool (node), encoder);
912
913 }
914
915 /* Go over the nodes in SET again to write edges. */
916 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
917 lsei_next_function_in_partition (&lsei))
918 {
919 node = lsei_cgraph_node (lsei);
920 output_outgoing_cgraph_edges (node->callees, ob, encoder);
921 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
922 }
923
924 streamer_write_uhwi_stream (ob->main_stream, 0);
925
926 lto_destroy_simple_output_block (ob);
927
928 /* Emit toplevel asms.
929 When doing WPA we must output every asm just once. Since we do not partition asm
930 nodes at all, output them to first output. This is kind of hack, but should work
931 well. */
932 if (!asm_nodes_output)
933 {
934 asm_nodes_output = true;
935 lto_output_toplevel_asms ();
936 }
937
938 output_refs (encoder);
939 }
940
941 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
942 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
943 NODE or to replace the values in it, for instance because the first
944 time we saw it, the function body was not available but now it
945 is. BP is a bitpack with all the bitflags for NODE read from the
946 stream. */
947
948 static void
949 input_overwrite_node (struct lto_file_decl_data *file_data,
950 struct cgraph_node *node,
951 enum LTO_symtab_tags tag,
952 struct bitpack_d *bp)
953 {
954 node->symbol.aux = (void *) tag;
955 node->symbol.lto_file_data = file_data;
956
957 node->local.local = bp_unpack_value (bp, 1);
958 node->symbol.externally_visible = bp_unpack_value (bp, 1);
959 node->symbol.definition = bp_unpack_value (bp, 1);
960 node->local.versionable = bp_unpack_value (bp, 1);
961 node->local.can_change_signature = bp_unpack_value (bp, 1);
962 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
963 node->symbol.force_output = bp_unpack_value (bp, 1);
964 node->symbol.forced_by_abi = bp_unpack_value (bp, 1);
965 node->symbol.unique_name = bp_unpack_value (bp, 1);
966 node->symbol.address_taken = bp_unpack_value (bp, 1);
967 node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
968 node->lowered = bp_unpack_value (bp, 1);
969 node->symbol.analyzed = tag == LTO_symtab_analyzed_node;
970 node->symbol.in_other_partition = bp_unpack_value (bp, 1);
971 if (node->symbol.in_other_partition
972 /* Avoid updating decl when we are seeing just inline clone.
973 When inlining function that has functions already inlined into it,
974 we produce clones of inline clones.
975
976 WPA partitioning might put each clone into different unit and
977 we might end up streaming inline clone from other partition
978 to support clone we are interested in. */
979 && (!node->clone_of
980 || node->clone_of->symbol.decl != node->symbol.decl))
981 {
982 DECL_EXTERNAL (node->symbol.decl) = 1;
983 TREE_STATIC (node->symbol.decl) = 0;
984 }
985 node->symbol.alias = bp_unpack_value (bp, 1);
986 node->symbol.weakref = bp_unpack_value (bp, 1);
987 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
988 node->only_called_at_startup = bp_unpack_value (bp, 1);
989 node->only_called_at_exit = bp_unpack_value (bp, 1);
990 node->tm_clone = bp_unpack_value (bp, 1);
991 node->thunk.thunk_p = bp_unpack_value (bp, 1);
992 node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
993 LDPR_NUM_KNOWN);
994 }
995
996 /* Return string alias is alias of. */
997
998 static tree
999 get_alias_symbol (tree decl)
1000 {
1001 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1002 return get_identifier (TREE_STRING_POINTER
1003 (TREE_VALUE (TREE_VALUE (alias))));
1004 }
1005
1006 /* Read a node from input_block IB. TAG is the node's tag just read.
1007 Return the node read or overwriten. */
1008
1009 static struct cgraph_node *
1010 input_node (struct lto_file_decl_data *file_data,
1011 struct lto_input_block *ib,
1012 enum LTO_symtab_tags tag,
1013 vec<symtab_node> nodes)
1014 {
1015 gcc::pass_manager *passes = g->get_passes ();
1016 tree fn_decl;
1017 struct cgraph_node *node;
1018 struct bitpack_d bp;
1019 unsigned decl_index;
1020 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1021 int clone_ref;
1022 int order;
1023 int i, count;
1024
1025 order = streamer_read_hwi (ib) + order_base;
1026 clone_ref = streamer_read_hwi (ib);
1027
1028 decl_index = streamer_read_uhwi (ib);
1029 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1030
1031 if (clone_ref != LCC_NOT_FOUND)
1032 {
1033 node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
1034 0, CGRAPH_FREQ_BASE, false,
1035 vNULL, false, NULL);
1036 }
1037 else
1038 {
1039 /* Declaration of functions can be already merged with a declaration
1040 from other input file. We keep cgraph unmerged until after streaming
1041 of ipa passes is done. Alays forcingly create a fresh node. */
1042 node = cgraph_create_empty_node ();
1043 node->symbol.decl = fn_decl;
1044 symtab_register_node ((symtab_node)node);
1045 }
1046
1047 node->symbol.order = order;
1048 if (order >= symtab_order)
1049 symtab_order = order + 1;
1050
1051 node->count = streamer_read_gcov_count (ib);
1052 node->count_materialization_scale = streamer_read_hwi (ib);
1053
1054 count = streamer_read_hwi (ib);
1055 node->ipa_transforms_to_apply = vNULL;
1056 for (i = 0; i < count; i++)
1057 {
1058 struct opt_pass *pass;
1059 int pid = streamer_read_hwi (ib);
1060
1061 gcc_assert (pid < passes->passes_by_id_size);
1062 pass = passes->passes_by_id[pid];
1063 node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
1064 }
1065
1066 if (tag == LTO_symtab_analyzed_node)
1067 ref = streamer_read_hwi (ib);
1068
1069 ref2 = streamer_read_hwi (ib);
1070
1071 /* Make sure that we have not read this node before. Nodes that
1072 have already been read will have their tag stored in the 'aux'
1073 field. Since built-in functions can be referenced in multiple
1074 functions, they are expected to be read more than once. */
1075 if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
1076 internal_error ("bytecode stream: found multiple instances of cgraph "
1077 "node with uid %d", node->uid);
1078
1079 bp = streamer_read_bitpack (ib);
1080 input_overwrite_node (file_data, node, tag, &bp);
1081
1082 /* Store a reference for now, and fix up later to be a pointer. */
1083 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1084
1085 /* Store a reference for now, and fix up later to be a pointer. */
1086 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
1087
1088 if (node->thunk.thunk_p)
1089 {
1090 int type = streamer_read_uhwi (ib);
1091 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1092 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1093
1094 node->thunk.fixed_offset = fixed_offset;
1095 node->thunk.this_adjusting = (type & 2);
1096 node->thunk.virtual_value = virtual_value;
1097 node->thunk.virtual_offset_p = (type & 4);
1098 }
1099 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1100 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1101 node->profile_id = streamer_read_hwi (ib);
1102 return node;
1103 }
1104
1105 /* Read a node from input_block IB. TAG is the node's tag just read.
1106 Return the node read or overwriten. */
1107
1108 static struct varpool_node *
1109 input_varpool_node (struct lto_file_decl_data *file_data,
1110 struct lto_input_block *ib)
1111 {
1112 int decl_index;
1113 tree var_decl;
1114 struct varpool_node *node;
1115 struct bitpack_d bp;
1116 int ref = LCC_NOT_FOUND;
1117 int order;
1118
1119 order = streamer_read_hwi (ib) + order_base;
1120 decl_index = streamer_read_uhwi (ib);
1121 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1122
1123 /* Declaration of functions can be already merged with a declaration
1124 from other input file. We keep cgraph unmerged until after streaming
1125 of ipa passes is done. Alays forcingly create a fresh node. */
1126 node = varpool_create_empty_node ();
1127 node->symbol.decl = var_decl;
1128 symtab_register_node ((symtab_node)node);
1129
1130 node->symbol.order = order;
1131 if (order >= symtab_order)
1132 symtab_order = order + 1;
1133 node->symbol.lto_file_data = file_data;
1134
1135 bp = streamer_read_bitpack (ib);
1136 node->symbol.externally_visible = bp_unpack_value (&bp, 1);
1137 node->symbol.force_output = bp_unpack_value (&bp, 1);
1138 node->symbol.forced_by_abi = bp_unpack_value (&bp, 1);
1139 node->symbol.unique_name = bp_unpack_value (&bp, 1);
1140 node->symbol.definition = bp_unpack_value (&bp, 1);
1141 node->symbol.alias = bp_unpack_value (&bp, 1);
1142 node->symbol.weakref = bp_unpack_value (&bp, 1);
1143 node->symbol.analyzed = bp_unpack_value (&bp, 1);
1144 node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
1145 node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
1146 if (node->symbol.in_other_partition)
1147 {
1148 DECL_EXTERNAL (node->symbol.decl) = 1;
1149 TREE_STATIC (node->symbol.decl) = 0;
1150 }
1151 if (node->symbol.alias && !node->symbol.analyzed && node->symbol.weakref)
1152 node->symbol.alias_target = get_alias_symbol (node->symbol.decl);
1153 ref = streamer_read_hwi (ib);
1154 /* Store a reference for now, and fix up later to be a pointer. */
1155 node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
1156 node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1157 LDPR_NUM_KNOWN);
1158
1159 return node;
1160 }
1161
1162 /* Read a node from input_block IB. TAG is the node's tag just read.
1163 Return the node read or overwriten. */
1164
1165 static void
1166 input_ref (struct lto_input_block *ib,
1167 symtab_node referring_node,
1168 vec<symtab_node> nodes)
1169 {
1170 symtab_node node = NULL;
1171 struct bitpack_d bp;
1172 enum ipa_ref_use use;
1173 bool speculative;
1174 struct ipa_ref *ref;
1175
1176 bp = streamer_read_bitpack (ib);
1177 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1178 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1179 node = nodes[streamer_read_hwi (ib)];
1180 ref = ipa_record_reference (referring_node, node, use, NULL);
1181 ref->speculative = speculative;
1182 if (is_a <cgraph_node> (referring_node))
1183 ref->lto_stmt_uid = streamer_read_hwi (ib);
1184 }
1185
1186 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1187 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1188 edge being read is indirect (in the sense that it has
1189 indirect_unknown_callee set). */
1190
1191 static void
1192 input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
1193 bool indirect)
1194 {
1195 struct cgraph_node *caller, *callee;
1196 struct cgraph_edge *edge;
1197 unsigned int stmt_id;
1198 gcov_type count;
1199 int freq;
1200 cgraph_inline_failed_t inline_failed;
1201 struct bitpack_d bp;
1202 int ecf_flags = 0;
1203
1204 caller = cgraph (nodes[streamer_read_hwi (ib)]);
1205 if (caller == NULL || caller->symbol.decl == NULL_TREE)
1206 internal_error ("bytecode stream: no caller found while reading edge");
1207
1208 if (!indirect)
1209 {
1210 callee = cgraph (nodes[streamer_read_hwi (ib)]);
1211 if (callee == NULL || callee->symbol.decl == NULL_TREE)
1212 internal_error ("bytecode stream: no callee found while reading edge");
1213 }
1214 else
1215 callee = NULL;
1216
1217 count = streamer_read_gcov_count (ib);
1218
1219 bp = streamer_read_bitpack (ib);
1220 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1221 stmt_id = bp_unpack_var_len_unsigned (&bp);
1222 freq = (int) bp_unpack_var_len_unsigned (&bp);
1223
1224 if (indirect)
1225 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1226 else
1227 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1228
1229 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1230 edge->speculative = bp_unpack_value (&bp, 1);
1231 edge->lto_stmt_uid = stmt_id;
1232 edge->inline_failed = inline_failed;
1233 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1234 edge->can_throw_external = bp_unpack_value (&bp, 1);
1235 if (indirect)
1236 {
1237 if (bp_unpack_value (&bp, 1))
1238 ecf_flags |= ECF_CONST;
1239 if (bp_unpack_value (&bp, 1))
1240 ecf_flags |= ECF_PURE;
1241 if (bp_unpack_value (&bp, 1))
1242 ecf_flags |= ECF_NORETURN;
1243 if (bp_unpack_value (&bp, 1))
1244 ecf_flags |= ECF_MALLOC;
1245 if (bp_unpack_value (&bp, 1))
1246 ecf_flags |= ECF_NOTHROW;
1247 if (bp_unpack_value (&bp, 1))
1248 ecf_flags |= ECF_RETURNS_TWICE;
1249 edge->indirect_info->ecf_flags = ecf_flags;
1250 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1251 if (edge->indirect_info->common_target_id)
1252 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1253 }
1254 }
1255
1256
1257 /* Read a cgraph from IB using the info in FILE_DATA. */
1258
1259 static vec<symtab_node>
1260 input_cgraph_1 (struct lto_file_decl_data *file_data,
1261 struct lto_input_block *ib)
1262 {
1263 enum LTO_symtab_tags tag;
1264 vec<symtab_node> nodes = vNULL;
1265 symtab_node node;
1266 unsigned i;
1267
1268 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1269 order_base = symtab_order;
1270 while (tag)
1271 {
1272 if (tag == LTO_symtab_edge)
1273 input_edge (ib, nodes, false);
1274 else if (tag == LTO_symtab_indirect_edge)
1275 input_edge (ib, nodes, true);
1276 else if (tag == LTO_symtab_variable)
1277 {
1278 node = (symtab_node)input_varpool_node (file_data, ib);
1279 nodes.safe_push (node);
1280 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1281 }
1282 else
1283 {
1284 node = (symtab_node)input_node (file_data, ib, tag, nodes);
1285 if (node == NULL || node->symbol.decl == NULL_TREE)
1286 internal_error ("bytecode stream: found empty cgraph node");
1287 nodes.safe_push (node);
1288 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1289 }
1290
1291 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1292 }
1293
1294 lto_input_toplevel_asms (file_data, order_base);
1295
1296 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1297 #ifdef ENABLE_CHECKING
1298 FOR_EACH_VEC_ELT (nodes, i, node)
1299 gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
1300 #endif
1301 FOR_EACH_VEC_ELT (nodes, i, node)
1302 {
1303 int ref;
1304 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1305 {
1306 ref = (int) (intptr_t) cnode->global.inlined_to;
1307
1308 /* We share declaration of builtins, so we may read same node twice. */
1309 if (!node->symbol.aux)
1310 continue;
1311 node->symbol.aux = NULL;
1312
1313 /* Fixup inlined_to from reference to pointer. */
1314 if (ref != LCC_NOT_FOUND)
1315 cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1316 else
1317 cnode->global.inlined_to = NULL;
1318 }
1319
1320 ref = (int) (intptr_t) node->symbol.same_comdat_group;
1321
1322 /* Fixup same_comdat_group from reference to pointer. */
1323 if (ref != LCC_NOT_FOUND)
1324 node->symbol.same_comdat_group = nodes[ref];
1325 else
1326 node->symbol.same_comdat_group = NULL;
1327 }
1328 FOR_EACH_VEC_ELT (nodes, i, node)
1329 node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1330 return nodes;
1331 }
1332
1333 /* Input ipa_refs. */
1334
1335 static void
1336 input_refs (struct lto_input_block *ib,
1337 vec<symtab_node> nodes)
1338 {
1339 int count;
1340 int idx;
1341 while (true)
1342 {
1343 symtab_node node;
1344 count = streamer_read_uhwi (ib);
1345 if (!count)
1346 break;
1347 idx = streamer_read_uhwi (ib);
1348 node = nodes[idx];
1349 while (count)
1350 {
1351 input_ref (ib, node, nodes);
1352 count--;
1353 }
1354 }
1355 }
1356
1357
1358 static struct gcov_ctr_summary lto_gcov_summary;
1359
1360 /* Input profile_info from IB. */
1361 static void
1362 input_profile_summary (struct lto_input_block *ib,
1363 struct lto_file_decl_data *file_data)
1364 {
1365 unsigned h_ix;
1366 struct bitpack_d bp;
1367 unsigned int runs = streamer_read_uhwi (ib);
1368 if (runs)
1369 {
1370 file_data->profile_info.runs = runs;
1371 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1372 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1373
1374 memset (file_data->profile_info.histogram, 0,
1375 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1376 /* Input the bitpack of non-zero histogram indices. */
1377 bp = streamer_read_bitpack (ib);
1378 /* Read in and unpack the full bitpack, flagging non-zero
1379 histogram entries by setting the num_counters non-zero. */
1380 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1381 {
1382 file_data->profile_info.histogram[h_ix].num_counters
1383 = bp_unpack_value (&bp, 1);
1384 }
1385 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1386 {
1387 if (!file_data->profile_info.histogram[h_ix].num_counters)
1388 continue;
1389
1390 file_data->profile_info.histogram[h_ix].num_counters
1391 = streamer_read_gcov_count (ib);
1392 file_data->profile_info.histogram[h_ix].min_value
1393 = streamer_read_gcov_count (ib);
1394 file_data->profile_info.histogram[h_ix].cum_value
1395 = streamer_read_gcov_count (ib);
1396 }
1397 /* IPA-profile computes hot bb threshold based on cumulated
1398 whole program profile. We need to stream it down to ltrans. */
1399 if (flag_ltrans)
1400 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1401 }
1402
1403 }
1404
1405 /* Rescale profile summaries to the same number of runs in the whole unit. */
1406
1407 static void
1408 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1409 {
1410 struct lto_file_decl_data *file_data;
1411 unsigned int j, h_ix;
1412 gcov_unsigned_t max_runs = 0;
1413 struct cgraph_node *node;
1414 struct cgraph_edge *edge;
1415 gcov_type saved_sum_all = 0;
1416 gcov_ctr_summary *saved_profile_info = 0;
1417 int saved_scale = 0;
1418
1419 /* Find unit with maximal number of runs. If we ever get serious about
1420 roundoff errors, we might also consider computing smallest common
1421 multiply. */
1422 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1423 if (max_runs < file_data->profile_info.runs)
1424 max_runs = file_data->profile_info.runs;
1425
1426 if (!max_runs)
1427 return;
1428
1429 /* Simple overflow check. We probably don't need to support that many train
1430 runs. Such a large value probably imply data corruption anyway. */
1431 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1432 {
1433 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1434 INT_MAX / REG_BR_PROB_BASE);
1435 return;
1436 }
1437
1438 profile_info = &lto_gcov_summary;
1439 lto_gcov_summary.runs = max_runs;
1440 lto_gcov_summary.sum_max = 0;
1441 memset (lto_gcov_summary.histogram, 0,
1442 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1443
1444 /* Rescale all units to the maximal number of runs.
1445 sum_max can not be easily merged, as we have no idea what files come from
1446 the same run. We do not use the info anyway, so leave it 0. */
1447 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1448 if (file_data->profile_info.runs)
1449 {
1450 int scale = GCOV_COMPUTE_SCALE (max_runs,
1451 file_data->profile_info.runs);
1452 lto_gcov_summary.sum_max
1453 = MAX (lto_gcov_summary.sum_max,
1454 apply_scale (file_data->profile_info.sum_max, scale));
1455 lto_gcov_summary.sum_all
1456 = MAX (lto_gcov_summary.sum_all,
1457 apply_scale (file_data->profile_info.sum_all, scale));
1458 /* Save a pointer to the profile_info with the largest
1459 scaled sum_all and the scale for use in merging the
1460 histogram. */
1461 if (!saved_profile_info
1462 || lto_gcov_summary.sum_all > saved_sum_all)
1463 {
1464 saved_profile_info = &file_data->profile_info;
1465 saved_sum_all = lto_gcov_summary.sum_all;
1466 saved_scale = scale;
1467 }
1468 }
1469
1470 gcc_assert (saved_profile_info);
1471
1472 /* Scale up the histogram from the profile that had the largest
1473 scaled sum_all above. */
1474 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1475 {
1476 /* Scale up the min value as we did the corresponding sum_all
1477 above. Use that to find the new histogram index. */
1478 gcov_type scaled_min
1479 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1480 saved_scale);
1481 /* The new index may be shared with another scaled histogram entry,
1482 so we need to account for a non-zero histogram entry at new_ix. */
1483 unsigned new_ix = gcov_histo_index (scaled_min);
1484 lto_gcov_summary.histogram[new_ix].min_value
1485 = (lto_gcov_summary.histogram[new_ix].num_counters
1486 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1487 : scaled_min);
1488 /* Some of the scaled counter values would ostensibly need to be placed
1489 into different (larger) histogram buckets, but we keep things simple
1490 here and place the scaled cumulative counter value in the bucket
1491 corresponding to the scaled minimum counter value. */
1492 lto_gcov_summary.histogram[new_ix].cum_value
1493 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1494 saved_scale);
1495 lto_gcov_summary.histogram[new_ix].num_counters
1496 += saved_profile_info->histogram[h_ix].num_counters;
1497 }
1498
1499 /* Watch roundoff errors. */
1500 if (lto_gcov_summary.sum_max < max_runs)
1501 lto_gcov_summary.sum_max = max_runs;
1502
1503 /* If merging already happent at WPA time, we are done. */
1504 if (flag_ltrans)
1505 return;
1506
1507 /* Now compute count_materialization_scale of each node.
1508 During LTRANS we already have values of count_materialization_scale
1509 computed, so just update them. */
1510 FOR_EACH_FUNCTION (node)
1511 if (node->symbol.lto_file_data
1512 && node->symbol.lto_file_data->profile_info.runs)
1513 {
1514 int scale;
1515
1516 scale = RDIV (node->count_materialization_scale * max_runs,
1517 node->symbol.lto_file_data->profile_info.runs);
1518 node->count_materialization_scale = scale;
1519 if (scale < 0)
1520 fatal_error ("Profile information in %s corrupted",
1521 file_data->file_name);
1522
1523 if (scale == REG_BR_PROB_BASE)
1524 continue;
1525 for (edge = node->callees; edge; edge = edge->next_callee)
1526 edge->count = apply_scale (edge->count, scale);
1527 node->count = apply_scale (node->count, scale);
1528 }
1529 }
1530
1531 /* Input and merge the symtab from each of the .o files passed to
1532 lto1. */
1533
1534 void
1535 input_symtab (void)
1536 {
1537 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1538 struct lto_file_decl_data *file_data;
1539 unsigned int j = 0;
1540 struct cgraph_node *node;
1541
1542 while ((file_data = file_data_vec[j++]))
1543 {
1544 const char *data;
1545 size_t len;
1546 struct lto_input_block *ib;
1547 vec<symtab_node> nodes;
1548
1549 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1550 &data, &len);
1551 if (!ib)
1552 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1553 input_profile_summary (ib, file_data);
1554 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1555 nodes = input_cgraph_1 (file_data, ib);
1556 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1557 ib, data, len);
1558
1559 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1560 &data, &len);
1561 if (!ib)
1562 fatal_error ("cannot find LTO section refs in %s",
1563 file_data->file_name);
1564 input_refs (ib, nodes);
1565 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1566 ib, data, len);
1567 if (flag_ltrans)
1568 input_cgraph_opt_summary (nodes);
1569 nodes.release ();
1570 }
1571
1572 merge_profile_summaries (file_data_vec);
1573 get_working_sets ();
1574
1575
1576 /* Clear out the aux field that was used to store enough state to
1577 tell which nodes should be overwritten. */
1578 FOR_EACH_FUNCTION (node)
1579 {
1580 /* Some nodes may have been created by cgraph_node. This
1581 happens when the callgraph contains nested functions. If the
1582 node for the parent function was never emitted to the gimple
1583 file, cgraph_node will create a node for it when setting the
1584 context of the nested function. */
1585 if (node->symbol.lto_file_data)
1586 node->symbol.aux = NULL;
1587 }
1588 }
1589
1590 /* True when we need optimization summary for NODE. */
1591
1592 static int
1593 output_cgraph_opt_summary_p (struct cgraph_node *node)
1594 {
1595 return (node->clone_of
1596 && (node->clone.tree_map
1597 || node->clone.args_to_skip
1598 || node->clone.combined_args_to_skip));
1599 }
1600
1601 /* Output optimization summary for EDGE to OB. */
1602 static void
1603 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1604 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1605 {
1606 }
1607
1608 /* Output optimization summary for NODE to OB. */
1609
1610 static void
1611 output_node_opt_summary (struct output_block *ob,
1612 struct cgraph_node *node,
1613 lto_symtab_encoder_t encoder)
1614 {
1615 unsigned int index;
1616 bitmap_iterator bi;
1617 struct ipa_replace_map *map;
1618 struct bitpack_d bp;
1619 int i;
1620 struct cgraph_edge *e;
1621
1622 if (node->clone.args_to_skip)
1623 {
1624 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1625 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1626 streamer_write_uhwi (ob, index);
1627 }
1628 else
1629 streamer_write_uhwi (ob, 0);
1630 if (node->clone.combined_args_to_skip)
1631 {
1632 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1633 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1634 streamer_write_uhwi (ob, index);
1635 }
1636 else
1637 streamer_write_uhwi (ob, 0);
1638 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1639 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1640 {
1641 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1642 mechanism to store function local declarations into summaries. */
1643 gcc_assert (!map->old_tree);
1644 streamer_write_uhwi (ob, map->parm_num);
1645 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1646 stream_write_tree (ob, map->new_tree, true);
1647 bp = bitpack_create (ob->main_stream);
1648 bp_pack_value (&bp, map->replace_p, 1);
1649 bp_pack_value (&bp, map->ref_p, 1);
1650 streamer_write_bitpack (&bp);
1651 }
1652
1653 if (lto_symtab_encoder_in_partition_p (encoder, (symtab_node) node))
1654 {
1655 for (e = node->callees; e; e = e->next_callee)
1656 output_edge_opt_summary (ob, e);
1657 for (e = node->indirect_calls; e; e = e->next_callee)
1658 output_edge_opt_summary (ob, e);
1659 }
1660 }
1661
1662 /* Output optimization summaries stored in callgraph.
1663 At the moment it is the clone info structure. */
1664
1665 static void
1666 output_cgraph_opt_summary (void)
1667 {
1668 int i, n_nodes;
1669 lto_symtab_encoder_t encoder;
1670 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1671 unsigned count = 0;
1672
1673 ob->cgraph_node = NULL;
1674 encoder = ob->decl_state->symtab_node_encoder;
1675 n_nodes = lto_symtab_encoder_size (encoder);
1676 for (i = 0; i < n_nodes; i++)
1677 {
1678 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1679 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1680 if (cnode && output_cgraph_opt_summary_p (cnode))
1681 count++;
1682 }
1683 streamer_write_uhwi (ob, count);
1684 for (i = 0; i < n_nodes; i++)
1685 {
1686 symtab_node node = lto_symtab_encoder_deref (encoder, i);
1687 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1688 if (cnode && output_cgraph_opt_summary_p (cnode))
1689 {
1690 streamer_write_uhwi (ob, i);
1691 output_node_opt_summary (ob, cnode, encoder);
1692 }
1693 }
1694 produce_asm (ob, NULL);
1695 destroy_output_block (ob);
1696 }
1697
1698 /* Input optimisation summary of EDGE. */
1699
1700 static void
1701 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1702 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1703 {
1704 }
1705
1706 /* Input optimisation summary of NODE. */
1707
1708 static void
1709 input_node_opt_summary (struct cgraph_node *node,
1710 struct lto_input_block *ib_main,
1711 struct data_in *data_in)
1712 {
1713 int i;
1714 int count;
1715 int bit;
1716 struct bitpack_d bp;
1717 struct cgraph_edge *e;
1718
1719 count = streamer_read_uhwi (ib_main);
1720 if (count)
1721 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1722 for (i = 0; i < count; i++)
1723 {
1724 bit = streamer_read_uhwi (ib_main);
1725 bitmap_set_bit (node->clone.args_to_skip, bit);
1726 }
1727 count = streamer_read_uhwi (ib_main);
1728 if (count)
1729 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1730 for (i = 0; i < count; i++)
1731 {
1732 bit = streamer_read_uhwi (ib_main);
1733 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1734 }
1735 count = streamer_read_uhwi (ib_main);
1736 for (i = 0; i < count; i++)
1737 {
1738 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1739
1740 vec_safe_push (node->clone.tree_map, map);
1741 map->parm_num = streamer_read_uhwi (ib_main);
1742 map->old_tree = NULL;
1743 map->new_tree = stream_read_tree (ib_main, data_in);
1744 bp = streamer_read_bitpack (ib_main);
1745 map->replace_p = bp_unpack_value (&bp, 1);
1746 map->ref_p = bp_unpack_value (&bp, 1);
1747 }
1748 for (e = node->callees; e; e = e->next_callee)
1749 input_edge_opt_summary (e, ib_main);
1750 for (e = node->indirect_calls; e; e = e->next_callee)
1751 input_edge_opt_summary (e, ib_main);
1752 }
1753
1754 /* Read section in file FILE_DATA of length LEN with data DATA. */
1755
1756 static void
1757 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1758 const char *data, size_t len,
1759 vec<symtab_node> nodes)
1760 {
1761 const struct lto_function_header *header =
1762 (const struct lto_function_header *) data;
1763 const int cfg_offset = sizeof (struct lto_function_header);
1764 const int main_offset = cfg_offset + header->cfg_size;
1765 const int string_offset = main_offset + header->main_size;
1766 struct data_in *data_in;
1767 struct lto_input_block ib_main;
1768 unsigned int i;
1769 unsigned int count;
1770
1771 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1772 header->main_size);
1773
1774 data_in =
1775 lto_data_in_create (file_data, (const char *) data + string_offset,
1776 header->string_size, vNULL);
1777 count = streamer_read_uhwi (&ib_main);
1778
1779 for (i = 0; i < count; i++)
1780 {
1781 int ref = streamer_read_uhwi (&ib_main);
1782 input_node_opt_summary (cgraph (nodes[ref]),
1783 &ib_main, data_in);
1784 }
1785 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1786 len);
1787 lto_data_in_delete (data_in);
1788 }
1789
1790 /* Input optimization summary of cgraph. */
1791
1792 static void
1793 input_cgraph_opt_summary (vec<symtab_node> nodes)
1794 {
1795 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1796 struct lto_file_decl_data *file_data;
1797 unsigned int j = 0;
1798
1799 while ((file_data = file_data_vec[j++]))
1800 {
1801 size_t len;
1802 const char *data =
1803 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1804 &len);
1805
1806 if (data)
1807 input_cgraph_opt_section (file_data, data, len, nodes);
1808 }
1809 }