m68k: add musl support
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2019 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
45
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51
52 /* Cgraph streaming is organized as set of record whose type
53 is indicated by a tag. */
54 enum LTO_symtab_tags
55 {
56 /* Must leave 0 for the stopper. */
57
58 /* Cgraph node without body available. */
59 LTO_symtab_unavail_node = 1,
60 /* Cgraph node with function body. */
61 LTO_symtab_analyzed_node,
62 /* Cgraph edges. */
63 LTO_symtab_edge,
64 LTO_symtab_indirect_edge,
65 LTO_symtab_variable,
66 LTO_symtab_last_tag
67 };
68
69 /* Create a new symtab encoder.
70 if FOR_INPUT, the encoder allocate only datastructures needed
71 to read the symtab. */
72
73 lto_symtab_encoder_t
74 lto_symtab_encoder_new (bool for_input)
75 {
76 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
77
78 if (!for_input)
79 encoder->map = new hash_map<symtab_node *, size_t>;
80 encoder->nodes.create (0);
81 return encoder;
82 }
83
84
85 /* Delete ENCODER and its components. */
86
87 void
88 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
89 {
90 encoder->nodes.release ();
91 if (encoder->map)
92 delete encoder->map;
93 free (encoder);
94 }
95
96
97 /* Return the existing reference number of NODE in the symtab encoder in
98 output block OB. Assign a new reference if this is the first time
99 NODE is encoded. */
100
101 int
102 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
103 symtab_node *node)
104 {
105 int ref;
106
107 if (!encoder->map)
108 {
109 lto_encoder_entry entry = {node, false, false, false};
110
111 ref = encoder->nodes.length ();
112 encoder->nodes.safe_push (entry);
113 return ref;
114 }
115
116 size_t *slot = encoder->map->get (node);
117 if (!slot || !*slot)
118 {
119 lto_encoder_entry entry = {node, false, false, false};
120 ref = encoder->nodes.length ();
121 if (!slot)
122 encoder->map->put (node, ref + 1);
123 encoder->nodes.safe_push (entry);
124 }
125 else
126 ref = *slot - 1;
127
128 return ref;
129 }
130
131 /* Remove NODE from encoder. */
132
133 bool
134 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
135 symtab_node *node)
136 {
137 int index;
138 lto_encoder_entry last_node;
139
140 size_t *slot = encoder->map->get (node);
141 if (slot == NULL || !*slot)
142 return false;
143
144 index = *slot - 1;
145 gcc_checking_assert (encoder->nodes[index].node == node);
146
147 /* Remove from vector. We do this by swapping node with the last element
148 of the vector. */
149 last_node = encoder->nodes.pop ();
150 if (last_node.node != node)
151 {
152 gcc_assert (encoder->map->put (last_node.node, index + 1));
153
154 /* Move the last element to the original spot of NODE. */
155 encoder->nodes[index] = last_node;
156 }
157
158 /* Remove element from hash table. */
159 encoder->map->remove (node);
160 return true;
161 }
162
163
164 /* Return TRUE if we should encode the body of NODE (if any). */
165
166 bool
167 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
168 struct cgraph_node *node)
169 {
170 int index = lto_symtab_encoder_lookup (encoder, node);
171 return encoder->nodes[index].body;
172 }
173
174 /* Specify that we encode the body of NODE in this partition. */
175
176 static void
177 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
178 struct cgraph_node *node)
179 {
180 int index = lto_symtab_encoder_encode (encoder, node);
181 gcc_checking_assert (encoder->nodes[index].node == node);
182 encoder->nodes[index].body = true;
183 }
184
185 /* Return TRUE if we should encode initializer of NODE (if any). */
186
187 bool
188 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
189 varpool_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 if (index == LCC_NOT_FOUND)
193 return false;
194 return encoder->nodes[index].initializer;
195 }
196
197 /* Specify that we should encode initializer of NODE (if any). */
198
199 static void
200 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
201 varpool_node *node)
202 {
203 int index = lto_symtab_encoder_lookup (encoder, node);
204 encoder->nodes[index].initializer = true;
205 }
206
207 /* Return TRUE if NODE is in this partition. */
208
209 bool
210 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
211 symtab_node *node)
212 {
213 int index = lto_symtab_encoder_lookup (encoder, node);
214 if (index == LCC_NOT_FOUND)
215 return false;
216 return encoder->nodes[index].in_partition;
217 }
218
219 /* Specify that NODE is in this partition. */
220
221 void
222 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
223 symtab_node *node)
224 {
225 int index = lto_symtab_encoder_encode (encoder, node);
226 encoder->nodes[index].in_partition = true;
227 }
228
229 /* Output the cgraph EDGE to OB using ENCODER. */
230
231 static void
232 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
233 lto_symtab_encoder_t encoder)
234 {
235 unsigned int uid;
236 intptr_t ref;
237 struct bitpack_d bp;
238
239 if (edge->indirect_unknown_callee)
240 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
241 LTO_symtab_indirect_edge);
242 else
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_edge);
245
246 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
247 gcc_assert (ref != LCC_NOT_FOUND);
248 streamer_write_hwi_stream (ob->main_stream, ref);
249
250 if (!edge->indirect_unknown_callee)
251 {
252 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
255 }
256
257 edge->count.stream_out (ob->main_stream);
258
259 bp = bitpack_create (ob->main_stream);
260 uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
261 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
262 bp_pack_enum (&bp, cgraph_inline_failed_t,
263 CIF_N_REASONS, edge->inline_failed);
264 bp_pack_var_len_unsigned (&bp, uid);
265 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
266 bp_pack_value (&bp, edge->speculative, 1);
267 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
268 gcc_assert (!edge->call_stmt_cannot_inline_p
269 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
270 bp_pack_value (&bp, edge->can_throw_external, 1);
271 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
272 if (edge->indirect_unknown_callee)
273 {
274 int flags = edge->indirect_info->ecf_flags;
275 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
276 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
277 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
278 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
281 /* Flags that should not appear on indirect calls. */
282 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
283 | ECF_MAY_BE_ALLOCA
284 | ECF_SIBCALL
285 | ECF_LEAF
286 | ECF_NOVOPS)));
287 }
288 streamer_write_bitpack (&bp);
289 if (edge->indirect_unknown_callee)
290 {
291 streamer_write_hwi_stream (ob->main_stream,
292 edge->indirect_info->common_target_id);
293 if (edge->indirect_info->common_target_id)
294 streamer_write_hwi_stream
295 (ob->main_stream, edge->indirect_info->common_target_probability);
296 }
297 }
298
299 /* Return if NODE contain references from other partitions. */
300
301 bool
302 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
303 {
304 int i;
305 struct ipa_ref *ref = NULL;
306
307 for (i = 0; node->iterate_referring (i, ref); i++)
308 {
309 /* Ignore references from non-offloadable nodes while streaming NODE into
310 offload LTO section. */
311 if (!ref->referring->need_lto_streaming)
312 continue;
313
314 if (ref->referring->in_other_partition
315 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
316 return true;
317 }
318 return false;
319 }
320
321 /* Return true when node is reachable from other partition. */
322
323 bool
324 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
325 {
326 struct cgraph_edge *e;
327 if (!node->definition)
328 return false;
329 if (node->inlined_to)
330 return false;
331 for (e = node->callers; e; e = e->next_caller)
332 {
333 /* Ignore references from non-offloadable nodes while streaming NODE into
334 offload LTO section. */
335 if (!e->caller->need_lto_streaming)
336 continue;
337
338 if (e->caller->in_other_partition
339 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
340 return true;
341 }
342 return false;
343 }
344
345 /* Return if NODE contain references from other partitions. */
346
347 bool
348 referenced_from_this_partition_p (symtab_node *node,
349 lto_symtab_encoder_t encoder)
350 {
351 int i;
352 struct ipa_ref *ref = NULL;
353
354 for (i = 0; node->iterate_referring (i, ref); i++)
355 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
356 return true;
357 return false;
358 }
359
360 /* Return true when node is reachable from other partition. */
361
362 bool
363 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
364 {
365 struct cgraph_edge *e;
366 for (e = node->callers; e; e = e->next_caller)
367 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
368 return true;
369 return false;
370 }
371
372 /* Output the cgraph NODE to OB. ENCODER is used to find the
373 reference number of NODE->inlined_to. SET is the set of nodes we
374 are writing to the current file. If NODE is not in SET, then NODE
375 is a boundary of a cgraph_node_set and we pretend NODE just has a
376 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
377 that have had their callgraph node written so far. This is used to
378 determine if NODE is a clone of a previously written node. */
379
380 static void
381 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
382 lto_symtab_encoder_t encoder)
383 {
384 unsigned int tag;
385 struct bitpack_d bp;
386 bool boundary_p;
387 intptr_t ref;
388 bool in_other_partition = false;
389 struct cgraph_node *clone_of, *ultimate_clone_of;
390 ipa_opt_pass_d *pass;
391 int i;
392 const char *comdat;
393 const char *section;
394 tree group;
395
396 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
397
398 if (node->analyzed && (!boundary_p || node->alias
399 || (node->thunk.thunk_p && !node->inlined_to)))
400 tag = LTO_symtab_analyzed_node;
401 else
402 tag = LTO_symtab_unavail_node;
403
404 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
405 tag);
406 streamer_write_hwi_stream (ob->main_stream, node->order);
407
408 /* In WPA mode, we only output part of the call-graph. Also, we
409 fake cgraph node attributes. There are two cases that we care.
410
411 Boundary nodes: There are nodes that are not part of SET but are
412 called from within SET. We artificially make them look like
413 externally visible nodes with no function body.
414
415 Cherry-picked nodes: These are nodes we pulled from other
416 translation units into SET during IPA-inlining. We make them as
417 local static nodes to prevent clashes with other local statics. */
418 if (boundary_p && node->analyzed
419 && node->get_partitioning_class () == SYMBOL_PARTITION)
420 {
421 /* Inline clones cannot be part of boundary.
422 gcc_assert (!node->inlined_to);
423
424 FIXME: At the moment they can be, when partition contains an inline
425 clone that is clone of inline clone from outside partition. We can
426 reshape the clone tree and make other tree to be the root, but it
427 needs a bit extra work and will be promplty done by cgraph_remove_node
428 after reading back. */
429 in_other_partition = 1;
430 }
431
432 clone_of = node->clone_of;
433 while (clone_of
434 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
435 if (clone_of->prev_sibling_clone)
436 clone_of = clone_of->prev_sibling_clone;
437 else
438 clone_of = clone_of->clone_of;
439
440 /* See if body of the master function is output. If not, we are seeing only
441 an declaration and we do not need to pass down clone tree. */
442 ultimate_clone_of = clone_of;
443 while (ultimate_clone_of && ultimate_clone_of->clone_of)
444 ultimate_clone_of = ultimate_clone_of->clone_of;
445
446 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
447 clone_of = NULL;
448
449 if (tag == LTO_symtab_analyzed_node)
450 gcc_assert (clone_of || !node->clone_of);
451 if (!clone_of)
452 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
453 else
454 streamer_write_hwi_stream (ob->main_stream, ref);
455
456
457 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
458 node->count.stream_out (ob->main_stream);
459 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
460
461 streamer_write_hwi_stream (ob->main_stream,
462 node->ipa_transforms_to_apply.length ());
463 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
464 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
465
466 if (tag == LTO_symtab_analyzed_node)
467 {
468 if (node->inlined_to)
469 {
470 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
471 gcc_assert (ref != LCC_NOT_FOUND);
472 }
473 else
474 ref = LCC_NOT_FOUND;
475
476 streamer_write_hwi_stream (ob->main_stream, ref);
477 }
478
479 group = node->get_comdat_group ();
480 if (group)
481 comdat = IDENTIFIER_POINTER (group);
482 else
483 comdat = "";
484 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
485
486 if (group)
487 {
488 if (node->same_comdat_group)
489 {
490 ref = LCC_NOT_FOUND;
491 for (struct symtab_node *n = node->same_comdat_group;
492 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
493 ref = lto_symtab_encoder_lookup (encoder, n);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 streamer_write_hwi_stream (ob->main_stream, ref);
498 }
499
500 section = node->get_section ();
501 if (!section)
502 section = "";
503
504 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
505
506 bp = bitpack_create (ob->main_stream);
507 bp_pack_value (&bp, node->local, 1);
508 bp_pack_value (&bp, node->externally_visible, 1);
509 bp_pack_value (&bp, node->no_reorder, 1);
510 bp_pack_value (&bp, node->definition, 1);
511 bp_pack_value (&bp, node->versionable, 1);
512 bp_pack_value (&bp, node->can_change_signature, 1);
513 bp_pack_value (&bp, node->redefined_extern_inline, 1);
514 bp_pack_value (&bp, node->force_output, 1);
515 bp_pack_value (&bp, node->forced_by_abi, 1);
516 bp_pack_value (&bp, node->unique_name, 1);
517 bp_pack_value (&bp, node->body_removed, 1);
518 bp_pack_value (&bp, node->implicit_section, 1);
519 bp_pack_value (&bp, node->address_taken, 1);
520 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
521 && node->get_partitioning_class () == SYMBOL_PARTITION
522 && (reachable_from_other_partition_p (node, encoder)
523 || referenced_from_other_partition_p (node, encoder)), 1);
524 bp_pack_value (&bp, node->lowered, 1);
525 bp_pack_value (&bp, in_other_partition, 1);
526 bp_pack_value (&bp, node->alias, 1);
527 bp_pack_value (&bp, node->transparent_alias, 1);
528 bp_pack_value (&bp, node->weakref, 1);
529 bp_pack_value (&bp, node->frequency, 2);
530 bp_pack_value (&bp, node->only_called_at_startup, 1);
531 bp_pack_value (&bp, node->only_called_at_exit, 1);
532 bp_pack_value (&bp, node->tm_clone, 1);
533 bp_pack_value (&bp, node->calls_comdat_local, 1);
534 bp_pack_value (&bp, node->icf_merged, 1);
535 bp_pack_value (&bp, node->nonfreeing_fn, 1);
536 bp_pack_value (&bp, node->thunk.thunk_p, 1);
537 bp_pack_value (&bp, node->parallelized_function, 1);
538 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
539 LDPR_NUM_KNOWN,
540 /* When doing incremental link, we will get new resolution
541 info next time we process the file. */
542 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
543 bp_pack_value (&bp, node->split_part, 1);
544 streamer_write_bitpack (&bp);
545 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
546
547 /* Stream thunk info always because we use it in
548 ipa_polymorphic_call_context::ipa_polymorphic_call_context
549 to properly interpret THIS pointers for thunks that has been converted
550 to Gimple. */
551 if (node->definition)
552 {
553 streamer_write_uhwi_stream
554 (ob->main_stream,
555 1 + (node->thunk.this_adjusting != 0) * 2
556 + (node->thunk.virtual_offset_p != 0) * 4);
557 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
558 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
559 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
560 }
561 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
562 if (DECL_STATIC_CONSTRUCTOR (node->decl))
563 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
564 if (DECL_STATIC_DESTRUCTOR (node->decl))
565 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
566 }
567
568 /* Output the varpool NODE to OB.
569 If NODE is not in SET, then NODE is a boundary. */
570
571 static void
572 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
573 lto_symtab_encoder_t encoder)
574 {
575 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
576 bool encode_initializer_p
577 = (node->definition
578 && lto_symtab_encoder_encode_initializer_p (encoder, node));
579 struct bitpack_d bp;
580 int ref;
581 const char *comdat;
582 const char *section;
583 tree group;
584
585 gcc_assert (!encode_initializer_p || node->definition);
586 gcc_assert (boundary_p || encode_initializer_p);
587
588 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
589 LTO_symtab_variable);
590 streamer_write_hwi_stream (ob->main_stream, node->order);
591 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
592 bp = bitpack_create (ob->main_stream);
593 bp_pack_value (&bp, node->externally_visible, 1);
594 bp_pack_value (&bp, node->no_reorder, 1);
595 bp_pack_value (&bp, node->force_output, 1);
596 bp_pack_value (&bp, node->forced_by_abi, 1);
597 bp_pack_value (&bp, node->unique_name, 1);
598 bp_pack_value (&bp,
599 node->body_removed
600 || (!encode_initializer_p && !node->alias && node->definition),
601 1);
602 bp_pack_value (&bp, node->implicit_section, 1);
603 bp_pack_value (&bp, node->writeonly, 1);
604 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
605 1);
606 bp_pack_value (&bp, node->alias, 1);
607 bp_pack_value (&bp, node->transparent_alias, 1);
608 bp_pack_value (&bp, node->weakref, 1);
609 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
610 gcc_assert (node->definition || !node->analyzed);
611 /* Constant pool initializers can be de-unified into individual ltrans units.
612 FIXME: Alternatively at -Os we may want to avoid generating for them the local
613 labels and share them across LTRANS partitions. */
614 if (node->get_partitioning_class () != SYMBOL_PARTITION)
615 {
616 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
617 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
618 }
619 else
620 {
621 bp_pack_value (&bp, node->definition
622 && referenced_from_other_partition_p (node, encoder), 1);
623 bp_pack_value (&bp, node->analyzed
624 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
625 /* in_other_partition. */
626 }
627 bp_pack_value (&bp, node->tls_model, 3);
628 bp_pack_value (&bp, node->used_by_single_function, 1);
629 bp_pack_value (&bp, node->dynamically_initialized, 1);
630 streamer_write_bitpack (&bp);
631
632 group = node->get_comdat_group ();
633 if (group)
634 comdat = IDENTIFIER_POINTER (group);
635 else
636 comdat = "";
637 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
638
639 if (group)
640 {
641 if (node->same_comdat_group)
642 {
643 ref = LCC_NOT_FOUND;
644 for (struct symtab_node *n = node->same_comdat_group;
645 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
646 ref = lto_symtab_encoder_lookup (encoder, n);
647 }
648 else
649 ref = LCC_NOT_FOUND;
650 streamer_write_hwi_stream (ob->main_stream, ref);
651 }
652
653 section = node->get_section ();
654 if (!section)
655 section = "";
656 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
657
658 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
659 LDPR_NUM_KNOWN, node->resolution);
660 }
661
662 /* Output the varpool NODE to OB.
663 If NODE is not in SET, then NODE is a boundary. */
664
665 static void
666 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
667 lto_symtab_encoder_t encoder)
668 {
669 struct bitpack_d bp;
670 int nref;
671 int uid = ref->lto_stmt_uid;
672 struct cgraph_node *node;
673
674 bp = bitpack_create (ob->main_stream);
675 bp_pack_value (&bp, ref->use, 3);
676 bp_pack_value (&bp, ref->speculative, 1);
677 streamer_write_bitpack (&bp);
678 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
679 gcc_assert (nref != LCC_NOT_FOUND);
680 streamer_write_hwi_stream (ob->main_stream, nref);
681
682 node = dyn_cast <cgraph_node *> (ref->referring);
683 if (node)
684 {
685 if (ref->stmt)
686 uid = gimple_uid (ref->stmt) + 1;
687 streamer_write_hwi_stream (ob->main_stream, uid);
688 }
689 }
690
691 /* Stream out profile_summary to OB. */
692
693 static void
694 output_profile_summary (struct lto_simple_output_block *ob)
695 {
696 if (profile_info)
697 {
698 /* We do not output num and run_max, they are not used by
699 GCC profile feedback and they are difficult to merge from multiple
700 units. */
701 unsigned runs = (profile_info->runs);
702 streamer_write_uhwi_stream (ob->main_stream, runs);
703
704 /* IPA-profile computes hot bb threshold based on cumulated
705 whole program profile. We need to stream it down to ltrans. */
706 if (flag_wpa)
707 streamer_write_gcov_count_stream (ob->main_stream,
708 get_hot_bb_threshold ());
709 }
710 else
711 streamer_write_uhwi_stream (ob->main_stream, 0);
712 }
713
714 /* Output all callees or indirect outgoing edges. EDGE must be the first such
715 edge. */
716
717 static void
718 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
719 struct lto_simple_output_block *ob,
720 lto_symtab_encoder_t encoder)
721 {
722 if (!edge)
723 return;
724
725 /* Output edges in backward direction, so the reconstructed callgraph match
726 and it is easy to associate call sites in the IPA pass summaries. */
727 while (edge->next_callee)
728 edge = edge->next_callee;
729 for (; edge; edge = edge->prev_callee)
730 lto_output_edge (ob, edge, encoder);
731 }
732
733 /* Output the part of the cgraph in SET. */
734
735 static void
736 output_refs (lto_symtab_encoder_t encoder)
737 {
738 struct lto_simple_output_block *ob;
739 int count;
740 struct ipa_ref *ref;
741
742 ob = lto_create_simple_output_block (LTO_section_refs);
743
744 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
745 {
746 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
747
748 /* IPA_REF_ALIAS references are always preserved
749 in the boundary. Alias node can't have other references and
750 can be always handled as if it's not in the boundary. */
751 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
752 continue;
753
754 count = node->ref_list.nreferences ();
755 if (count)
756 {
757 streamer_write_gcov_count_stream (ob->main_stream, count);
758 streamer_write_uhwi_stream (ob->main_stream,
759 lto_symtab_encoder_lookup (encoder, node));
760 for (int i = 0; node->iterate_reference (i, ref); i++)
761 lto_output_ref (ob, ref, encoder);
762 }
763 }
764
765 streamer_write_uhwi_stream (ob->main_stream, 0);
766
767 lto_destroy_simple_output_block (ob);
768 }
769
770 /* Add NODE into encoder as well as nodes it is cloned from.
771 Do it in a way so clones appear first. */
772
773 static void
774 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
775 bool include_body)
776 {
777 if (node->clone_of)
778 add_node_to (encoder, node->clone_of, include_body);
779 else if (include_body)
780 lto_set_symtab_encoder_encode_body (encoder, node);
781 lto_symtab_encoder_encode (encoder, node);
782 }
783
784 /* Add all references in NODE to encoders. */
785
786 static void
787 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
788 {
789 int i;
790 struct ipa_ref *ref = NULL;
791 for (i = 0; node->iterate_reference (i, ref); i++)
792 if (is_a <cgraph_node *> (ref->referred))
793 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
794 else
795 lto_symtab_encoder_encode (encoder, ref->referred);
796 }
797
798 /* Select what needs to be streamed out. In regular lto mode stream everything.
799 In offload lto mode stream only nodes marked as offloadable. */
800 void
801 select_what_to_stream (void)
802 {
803 struct symtab_node *snode;
804 FOR_EACH_SYMBOL (snode)
805 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
806 }
807
808 /* Find all symbols we want to stream into given partition and insert them
809 to encoders.
810
811 The function actually replaces IN_ENCODER by new one. The reason is that
812 streaming code needs clone's origin to be streamed before clone. This
813 means that we need to insert the nodes in specific order. This order is
814 ignored by the partitioning logic earlier. */
815
816 lto_symtab_encoder_t
817 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
818 {
819 struct cgraph_edge *edge;
820 int i;
821 lto_symtab_encoder_t encoder;
822 lto_symtab_encoder_iterator lsei;
823 hash_set<void *> reachable_call_targets;
824
825 encoder = lto_symtab_encoder_new (false);
826
827 /* Go over all entries in the IN_ENCODER and duplicate them to
828 ENCODER. At the same time insert masters of clones so
829 every master appears before clone. */
830 for (lsei = lsei_start_function_in_partition (in_encoder);
831 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
832 {
833 struct cgraph_node *node = lsei_cgraph_node (lsei);
834 if (!node->need_lto_streaming)
835 continue;
836 add_node_to (encoder, node, true);
837 lto_set_symtab_encoder_in_partition (encoder, node);
838 create_references (encoder, node);
839 }
840 for (lsei = lsei_start_variable_in_partition (in_encoder);
841 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
842 {
843 varpool_node *vnode = lsei_varpool_node (lsei);
844
845 if (!vnode->need_lto_streaming)
846 continue;
847 lto_set_symtab_encoder_in_partition (encoder, vnode);
848 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
849 create_references (encoder, vnode);
850 }
851 /* Pickle in also the initializer of all referenced readonly variables
852 to help folding. Constant pool variables are not shared, so we must
853 pickle those too. */
854 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
855 {
856 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
857 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
858 {
859 if (!lto_symtab_encoder_encode_initializer_p (encoder,
860 vnode)
861 && (((vnode->ctor_useable_for_folding_p ()
862 && (!DECL_VIRTUAL_P (vnode->decl)
863 || !flag_wpa
864 || flag_ltrans_devirtualize)))))
865 {
866 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
867 create_references (encoder, vnode);
868 }
869 }
870 }
871
872 /* Go over all the nodes again to include callees that are not in
873 SET. */
874 for (lsei = lsei_start_function_in_partition (encoder);
875 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
876 {
877 struct cgraph_node *node = lsei_cgraph_node (lsei);
878 for (edge = node->callees; edge; edge = edge->next_callee)
879 {
880 struct cgraph_node *callee = edge->callee;
881 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
882 {
883 /* We should have moved all the inlines. */
884 gcc_assert (!callee->inlined_to);
885 add_node_to (encoder, callee, false);
886 }
887 }
888 /* Add all possible targets for late devirtualization. */
889 if (flag_ltrans_devirtualize || !flag_wpa)
890 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
891 if (edge->indirect_info->polymorphic)
892 {
893 unsigned int i;
894 void *cache_token;
895 bool final;
896 vec <cgraph_node *>targets
897 = possible_polymorphic_call_targets
898 (edge, &final, &cache_token);
899 if (!reachable_call_targets.add (cache_token))
900 {
901 for (i = 0; i < targets.length (); i++)
902 {
903 struct cgraph_node *callee = targets[i];
904
905 /* Adding an external declarations into the unit serves
906 no purpose and just increases its boundary. */
907 if (callee->definition
908 && !lto_symtab_encoder_in_partition_p
909 (encoder, callee))
910 {
911 gcc_assert (!callee->inlined_to);
912 add_node_to (encoder, callee, false);
913 }
914 }
915 }
916 }
917 }
918 /* Be sure to also insert alias targert and thunk callees. These needs
919 to stay to aid local calling conventions. */
920 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
921 {
922 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
923 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
924
925 if (node->alias && node->analyzed)
926 create_references (encoder, node);
927 if (cnode
928 && cnode->thunk.thunk_p && !cnode->inlined_to)
929 add_node_to (encoder, cnode->callees->callee, false);
930 while (node->transparent_alias && node->analyzed)
931 {
932 node = node->get_alias_target ();
933 if (is_a <cgraph_node *> (node))
934 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
935 false);
936 else
937 lto_symtab_encoder_encode (encoder, node);
938 }
939 }
940 lto_symtab_encoder_delete (in_encoder);
941 return encoder;
942 }
943
944 /* Output the part of the symtab in SET and VSET. */
945
946 void
947 output_symtab (void)
948 {
949 struct cgraph_node *node;
950 struct lto_simple_output_block *ob;
951 int i, n_nodes;
952 lto_symtab_encoder_t encoder;
953
954 if (flag_wpa)
955 output_cgraph_opt_summary ();
956
957 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
958
959 output_profile_summary (ob);
960
961 /* An encoder for cgraph nodes should have been created by
962 ipa_write_summaries_1. */
963 gcc_assert (ob->decl_state->symtab_node_encoder);
964 encoder = ob->decl_state->symtab_node_encoder;
965
966 /* Write out the nodes. We must first output a node and then its clones,
967 otherwise at a time reading back the node there would be nothing to clone
968 from. */
969 n_nodes = lto_symtab_encoder_size (encoder);
970 for (i = 0; i < n_nodes; i++)
971 {
972 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
973 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
974 lto_output_node (ob, cnode, encoder);
975 else
976 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
977 }
978
979 /* Go over the nodes in SET again to write edges. */
980 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
981 {
982 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
983 if (node
984 && ((node->thunk.thunk_p && !node->inlined_to)
985 || lto_symtab_encoder_in_partition_p (encoder, node)))
986 {
987 output_outgoing_cgraph_edges (node->callees, ob, encoder);
988 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
989 }
990 }
991
992 streamer_write_uhwi_stream (ob->main_stream, 0);
993
994 lto_destroy_simple_output_block (ob);
995
996 /* Emit toplevel asms.
997 When doing WPA we must output every asm just once. Since we do not partition asm
998 nodes at all, output them to first output. This is kind of hack, but should work
999 well. */
1000 if (!asm_nodes_output)
1001 {
1002 asm_nodes_output = true;
1003 lto_output_toplevel_asms ();
1004 }
1005
1006 output_refs (encoder);
1007 }
1008
1009 /* Return identifier encoded in IB as a plain string. */
1010
1011 static tree
1012 read_identifier (class lto_input_block *ib)
1013 {
1014 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1015 tree id;
1016
1017 if (ib->data[ib->p + len])
1018 lto_section_overrun (ib);
1019 if (!len)
1020 {
1021 ib->p++;
1022 return NULL;
1023 }
1024 id = get_identifier (ib->data + ib->p);
1025 ib->p += len + 1;
1026 return id;
1027 }
1028
1029 /* Return string encoded in IB, NULL if string is empty. */
1030
1031 static const char *
1032 read_string (class lto_input_block *ib)
1033 {
1034 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1035 const char *str;
1036
1037 if (ib->data[ib->p + len])
1038 lto_section_overrun (ib);
1039 if (!len)
1040 {
1041 ib->p++;
1042 return NULL;
1043 }
1044 str = ib->data + ib->p;
1045 ib->p += len + 1;
1046 return str;
1047 }
1048
1049 /* Output function/variable tables that will allow libgomp to look up offload
1050 target code.
1051 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1052 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1053 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1054
1055 void
1056 output_offload_tables (void)
1057 {
1058 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1059 return;
1060
1061 struct lto_simple_output_block *ob
1062 = lto_create_simple_output_block (LTO_section_offload_table);
1063
1064 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1065 {
1066 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1067 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1068 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1069 (*offload_funcs)[i]);
1070 }
1071
1072 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1073 {
1074 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1075 LTO_symtab_last_tag, LTO_symtab_variable);
1076 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1077 (*offload_vars)[i]);
1078 }
1079
1080 streamer_write_uhwi_stream (ob->main_stream, 0);
1081 lto_destroy_simple_output_block (ob);
1082
1083 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1084 streamed to one partition only. That's why we free offload_funcs and
1085 offload_vars after the first call of output_offload_tables. */
1086 if (flag_wpa)
1087 {
1088 vec_free (offload_funcs);
1089 vec_free (offload_vars);
1090 }
1091 }
1092
1093 /* Verify the partitioning of NODE. */
1094
1095 static inline void
1096 verify_node_partition (symtab_node *node)
1097 {
1098 if (flag_ltrans)
1099 return;
1100
1101 #ifdef ACCEL_COMPILER
1102 if (node->in_other_partition)
1103 {
1104 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1105 error_at (DECL_SOURCE_LOCATION (node->decl),
1106 "function %qs has been referenced in offloaded code but"
1107 " hasn%'t been marked to be included in the offloaded code",
1108 node->name ());
1109 else if (VAR_P (node->decl))
1110 error_at (DECL_SOURCE_LOCATION (node->decl),
1111 "variable %qs has been referenced in offloaded code but"
1112 " hasn%'t been marked to be included in the offloaded code",
1113 node->name ());
1114 else
1115 gcc_unreachable ();
1116 }
1117 #else
1118 gcc_assert (!node->in_other_partition
1119 && !node->used_from_other_partition);
1120 #endif
1121 }
1122
1123 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1124 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1125 NODE or to replace the values in it, for instance because the first
1126 time we saw it, the function body was not available but now it
1127 is. BP is a bitpack with all the bitflags for NODE read from the
1128 stream. */
1129
1130 static void
1131 input_overwrite_node (struct lto_file_decl_data *file_data,
1132 struct cgraph_node *node,
1133 enum LTO_symtab_tags tag,
1134 struct bitpack_d *bp)
1135 {
1136 node->aux = (void *) tag;
1137 node->lto_file_data = file_data;
1138
1139 node->local = bp_unpack_value (bp, 1);
1140 node->externally_visible = bp_unpack_value (bp, 1);
1141 node->no_reorder = bp_unpack_value (bp, 1);
1142 node->definition = bp_unpack_value (bp, 1);
1143 node->versionable = bp_unpack_value (bp, 1);
1144 node->can_change_signature = bp_unpack_value (bp, 1);
1145 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1146 node->force_output = bp_unpack_value (bp, 1);
1147 node->forced_by_abi = bp_unpack_value (bp, 1);
1148 node->unique_name = bp_unpack_value (bp, 1);
1149 node->body_removed = bp_unpack_value (bp, 1);
1150 node->implicit_section = bp_unpack_value (bp, 1);
1151 node->address_taken = bp_unpack_value (bp, 1);
1152 node->used_from_other_partition = bp_unpack_value (bp, 1);
1153 node->lowered = bp_unpack_value (bp, 1);
1154 node->analyzed = tag == LTO_symtab_analyzed_node;
1155 node->in_other_partition = bp_unpack_value (bp, 1);
1156 if (node->in_other_partition
1157 /* Avoid updating decl when we are seeing just inline clone.
1158 When inlining function that has functions already inlined into it,
1159 we produce clones of inline clones.
1160
1161 WPA partitioning might put each clone into different unit and
1162 we might end up streaming inline clone from other partition
1163 to support clone we are interested in. */
1164 && (!node->clone_of
1165 || node->clone_of->decl != node->decl))
1166 {
1167 DECL_EXTERNAL (node->decl) = 1;
1168 TREE_STATIC (node->decl) = 0;
1169 }
1170 node->alias = bp_unpack_value (bp, 1);
1171 node->transparent_alias = bp_unpack_value (bp, 1);
1172 node->weakref = bp_unpack_value (bp, 1);
1173 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1174 node->only_called_at_startup = bp_unpack_value (bp, 1);
1175 node->only_called_at_exit = bp_unpack_value (bp, 1);
1176 node->tm_clone = bp_unpack_value (bp, 1);
1177 node->calls_comdat_local = bp_unpack_value (bp, 1);
1178 node->icf_merged = bp_unpack_value (bp, 1);
1179 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1180 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1181 node->parallelized_function = bp_unpack_value (bp, 1);
1182 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1183 LDPR_NUM_KNOWN);
1184 node->split_part = bp_unpack_value (bp, 1);
1185 verify_node_partition (node);
1186 }
1187
1188 /* Return string alias is alias of. */
1189
1190 static tree
1191 get_alias_symbol (tree decl)
1192 {
1193 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1194 return get_identifier (TREE_STRING_POINTER
1195 (TREE_VALUE (TREE_VALUE (alias))));
1196 }
1197
1198 /* Read a node from input_block IB. TAG is the node's tag just read.
1199 Return the node read or overwriten. */
1200
1201 static struct cgraph_node *
1202 input_node (struct lto_file_decl_data *file_data,
1203 class lto_input_block *ib,
1204 enum LTO_symtab_tags tag,
1205 vec<symtab_node *> nodes)
1206 {
1207 gcc::pass_manager *passes = g->get_passes ();
1208 tree fn_decl;
1209 struct cgraph_node *node;
1210 struct bitpack_d bp;
1211 unsigned decl_index;
1212 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1213 int clone_ref;
1214 int order;
1215 int i, count;
1216 tree group;
1217 const char *section;
1218 order = streamer_read_hwi (ib) + file_data->order_base;
1219 clone_ref = streamer_read_hwi (ib);
1220
1221 decl_index = streamer_read_uhwi (ib);
1222 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1223
1224 if (clone_ref != LCC_NOT_FOUND)
1225 {
1226 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1227 profile_count::uninitialized (), false,
1228 vNULL, false, NULL, NULL);
1229 }
1230 else
1231 {
1232 /* Declaration of functions can be already merged with a declaration
1233 from other input file. We keep cgraph unmerged until after streaming
1234 of ipa passes is done. Alays forcingly create a fresh node. */
1235 node = symtab->create_empty ();
1236 node->decl = fn_decl;
1237 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1238 node->ifunc_resolver = 1;
1239 node->register_symbol ();
1240 }
1241
1242 node->order = order;
1243 if (order >= symtab->order)
1244 symtab->order = order + 1;
1245
1246 node->count = profile_count::stream_in (ib);
1247 node->count_materialization_scale = streamer_read_hwi (ib);
1248
1249 count = streamer_read_hwi (ib);
1250 node->ipa_transforms_to_apply = vNULL;
1251 for (i = 0; i < count; i++)
1252 {
1253 opt_pass *pass;
1254 int pid = streamer_read_hwi (ib);
1255
1256 gcc_assert (pid < passes->passes_by_id_size);
1257 pass = passes->passes_by_id[pid];
1258 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1259 }
1260
1261 if (tag == LTO_symtab_analyzed_node)
1262 ref = streamer_read_hwi (ib);
1263
1264 group = read_identifier (ib);
1265 if (group)
1266 ref2 = streamer_read_hwi (ib);
1267
1268 /* Make sure that we have not read this node before. Nodes that
1269 have already been read will have their tag stored in the 'aux'
1270 field. Since built-in functions can be referenced in multiple
1271 functions, they are expected to be read more than once. */
1272 if (node->aux && !fndecl_built_in_p (node->decl))
1273 internal_error ("bytecode stream: found multiple instances of cgraph "
1274 "node with uid %d", node->get_uid ());
1275
1276 node->tp_first_run = streamer_read_uhwi (ib);
1277
1278 bp = streamer_read_bitpack (ib);
1279
1280 input_overwrite_node (file_data, node, tag, &bp);
1281
1282 /* Store a reference for now, and fix up later to be a pointer. */
1283 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1284
1285 if (group)
1286 {
1287 node->set_comdat_group (group);
1288 /* Store a reference for now, and fix up later to be a pointer. */
1289 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1290 }
1291 else
1292 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1293 section = read_string (ib);
1294 if (section)
1295 node->set_section_for_node (section);
1296
1297 if (node->definition)
1298 {
1299 int type = streamer_read_uhwi (ib);
1300 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1301 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1302 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1303
1304 node->thunk.fixed_offset = fixed_offset;
1305 node->thunk.virtual_value = virtual_value;
1306 node->thunk.indirect_offset = indirect_offset;
1307 node->thunk.this_adjusting = (type & 2);
1308 node->thunk.virtual_offset_p = (type & 4);
1309 }
1310 if (node->alias && !node->analyzed && node->weakref)
1311 node->alias_target = get_alias_symbol (node->decl);
1312 node->profile_id = streamer_read_hwi (ib);
1313 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1314 node->set_init_priority (streamer_read_hwi (ib));
1315 if (DECL_STATIC_DESTRUCTOR (node->decl))
1316 node->set_fini_priority (streamer_read_hwi (ib));
1317
1318 return node;
1319 }
1320
1321 /* Read a node from input_block IB. TAG is the node's tag just read.
1322 Return the node read or overwriten. */
1323
1324 static varpool_node *
1325 input_varpool_node (struct lto_file_decl_data *file_data,
1326 class lto_input_block *ib)
1327 {
1328 int decl_index;
1329 tree var_decl;
1330 varpool_node *node;
1331 struct bitpack_d bp;
1332 int ref = LCC_NOT_FOUND;
1333 int order;
1334 tree group;
1335 const char *section;
1336
1337 order = streamer_read_hwi (ib) + file_data->order_base;
1338 decl_index = streamer_read_uhwi (ib);
1339 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1340
1341 /* Declaration of functions can be already merged with a declaration
1342 from other input file. We keep cgraph unmerged until after streaming
1343 of ipa passes is done. Alays forcingly create a fresh node. */
1344 node = varpool_node::create_empty ();
1345 node->decl = var_decl;
1346 node->register_symbol ();
1347
1348 node->order = order;
1349 if (order >= symtab->order)
1350 symtab->order = order + 1;
1351 node->lto_file_data = file_data;
1352
1353 bp = streamer_read_bitpack (ib);
1354 node->externally_visible = bp_unpack_value (&bp, 1);
1355 node->no_reorder = bp_unpack_value (&bp, 1);
1356 node->force_output = bp_unpack_value (&bp, 1);
1357 node->forced_by_abi = bp_unpack_value (&bp, 1);
1358 node->unique_name = bp_unpack_value (&bp, 1);
1359 node->body_removed = bp_unpack_value (&bp, 1);
1360 node->implicit_section = bp_unpack_value (&bp, 1);
1361 node->writeonly = bp_unpack_value (&bp, 1);
1362 node->definition = bp_unpack_value (&bp, 1);
1363 node->alias = bp_unpack_value (&bp, 1);
1364 node->transparent_alias = bp_unpack_value (&bp, 1);
1365 node->weakref = bp_unpack_value (&bp, 1);
1366 node->analyzed = bp_unpack_value (&bp, 1);
1367 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1368 node->in_other_partition = bp_unpack_value (&bp, 1);
1369 if (node->in_other_partition)
1370 {
1371 DECL_EXTERNAL (node->decl) = 1;
1372 TREE_STATIC (node->decl) = 0;
1373 }
1374 if (node->alias && !node->analyzed && node->weakref)
1375 node->alias_target = get_alias_symbol (node->decl);
1376 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1377 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1378 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1379 group = read_identifier (ib);
1380 if (group)
1381 {
1382 node->set_comdat_group (group);
1383 ref = streamer_read_hwi (ib);
1384 /* Store a reference for now, and fix up later to be a pointer. */
1385 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1386 }
1387 else
1388 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1389 section = read_string (ib);
1390 if (section)
1391 node->set_section_for_node (section);
1392 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1393 LDPR_NUM_KNOWN);
1394 verify_node_partition (node);
1395 return node;
1396 }
1397
1398 /* Read a node from input_block IB. TAG is the node's tag just read.
1399 Return the node read or overwriten. */
1400
1401 static void
1402 input_ref (class lto_input_block *ib,
1403 symtab_node *referring_node,
1404 vec<symtab_node *> nodes)
1405 {
1406 symtab_node *node = NULL;
1407 struct bitpack_d bp;
1408 enum ipa_ref_use use;
1409 bool speculative;
1410 struct ipa_ref *ref;
1411
1412 bp = streamer_read_bitpack (ib);
1413 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1414 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1415 node = nodes[streamer_read_hwi (ib)];
1416 ref = referring_node->create_reference (node, use);
1417 ref->speculative = speculative;
1418 if (is_a <cgraph_node *> (referring_node))
1419 ref->lto_stmt_uid = streamer_read_hwi (ib);
1420 }
1421
1422 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1423 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1424 edge being read is indirect (in the sense that it has
1425 indirect_unknown_callee set). */
1426
1427 static void
1428 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1429 bool indirect)
1430 {
1431 struct cgraph_node *caller, *callee;
1432 struct cgraph_edge *edge;
1433 unsigned int stmt_id;
1434 profile_count count;
1435 cgraph_inline_failed_t inline_failed;
1436 struct bitpack_d bp;
1437 int ecf_flags = 0;
1438
1439 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1440 if (caller == NULL || caller->decl == NULL_TREE)
1441 internal_error ("bytecode stream: no caller found while reading edge");
1442
1443 if (!indirect)
1444 {
1445 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1446 if (callee == NULL || callee->decl == NULL_TREE)
1447 internal_error ("bytecode stream: no callee found while reading edge");
1448 }
1449 else
1450 callee = NULL;
1451
1452 count = profile_count::stream_in (ib);
1453
1454 bp = streamer_read_bitpack (ib);
1455 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1456 stmt_id = bp_unpack_var_len_unsigned (&bp);
1457
1458 if (indirect)
1459 edge = caller->create_indirect_edge (NULL, 0, count);
1460 else
1461 edge = caller->create_edge (callee, NULL, count);
1462
1463 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1464 edge->speculative = bp_unpack_value (&bp, 1);
1465 edge->lto_stmt_uid = stmt_id;
1466 edge->inline_failed = inline_failed;
1467 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1468 edge->can_throw_external = bp_unpack_value (&bp, 1);
1469 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1470 if (indirect)
1471 {
1472 if (bp_unpack_value (&bp, 1))
1473 ecf_flags |= ECF_CONST;
1474 if (bp_unpack_value (&bp, 1))
1475 ecf_flags |= ECF_PURE;
1476 if (bp_unpack_value (&bp, 1))
1477 ecf_flags |= ECF_NORETURN;
1478 if (bp_unpack_value (&bp, 1))
1479 ecf_flags |= ECF_MALLOC;
1480 if (bp_unpack_value (&bp, 1))
1481 ecf_flags |= ECF_NOTHROW;
1482 if (bp_unpack_value (&bp, 1))
1483 ecf_flags |= ECF_RETURNS_TWICE;
1484 edge->indirect_info->ecf_flags = ecf_flags;
1485 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1486 if (edge->indirect_info->common_target_id)
1487 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1488 }
1489 }
1490
1491
1492 /* Read a cgraph from IB using the info in FILE_DATA. */
1493
1494 static vec<symtab_node *>
1495 input_cgraph_1 (struct lto_file_decl_data *file_data,
1496 class lto_input_block *ib)
1497 {
1498 enum LTO_symtab_tags tag;
1499 vec<symtab_node *> nodes = vNULL;
1500 symtab_node *node;
1501 unsigned i;
1502
1503 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1504 file_data->order_base = symtab->order;
1505 while (tag)
1506 {
1507 if (tag == LTO_symtab_edge)
1508 input_edge (ib, nodes, false);
1509 else if (tag == LTO_symtab_indirect_edge)
1510 input_edge (ib, nodes, true);
1511 else if (tag == LTO_symtab_variable)
1512 {
1513 node = input_varpool_node (file_data, ib);
1514 nodes.safe_push (node);
1515 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1516 }
1517 else
1518 {
1519 node = input_node (file_data, ib, tag, nodes);
1520 if (node == NULL || node->decl == NULL_TREE)
1521 internal_error ("bytecode stream: found empty cgraph node");
1522 nodes.safe_push (node);
1523 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1524 }
1525
1526 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1527 }
1528
1529 lto_input_toplevel_asms (file_data, file_data->order_base);
1530
1531 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1532 if (flag_checking)
1533 {
1534 FOR_EACH_VEC_ELT (nodes, i, node)
1535 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1536 }
1537 FOR_EACH_VEC_ELT (nodes, i, node)
1538 {
1539 int ref;
1540 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1541 {
1542 ref = (int) (intptr_t) cnode->inlined_to;
1543
1544 /* We share declaration of builtins, so we may read same node twice. */
1545 if (!node->aux)
1546 continue;
1547 node->aux = NULL;
1548
1549 /* Fixup inlined_to from reference to pointer. */
1550 if (ref != LCC_NOT_FOUND)
1551 dyn_cast<cgraph_node *> (node)->inlined_to
1552 = dyn_cast<cgraph_node *> (nodes[ref]);
1553 else
1554 cnode->inlined_to = NULL;
1555 }
1556
1557 ref = (int) (intptr_t) node->same_comdat_group;
1558
1559 /* Fixup same_comdat_group from reference to pointer. */
1560 if (ref != LCC_NOT_FOUND)
1561 node->same_comdat_group = nodes[ref];
1562 else
1563 node->same_comdat_group = NULL;
1564 }
1565 FOR_EACH_VEC_ELT (nodes, i, node)
1566 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1567 return nodes;
1568 }
1569
1570 /* Input ipa_refs. */
1571
1572 static void
1573 input_refs (class lto_input_block *ib,
1574 vec<symtab_node *> nodes)
1575 {
1576 int count;
1577 int idx;
1578 while (true)
1579 {
1580 symtab_node *node;
1581 count = streamer_read_uhwi (ib);
1582 if (!count)
1583 break;
1584 idx = streamer_read_uhwi (ib);
1585 node = nodes[idx];
1586 while (count)
1587 {
1588 input_ref (ib, node, nodes);
1589 count--;
1590 }
1591 }
1592 }
1593
1594 /* Input profile_info from IB. */
1595 static void
1596 input_profile_summary (class lto_input_block *ib,
1597 struct lto_file_decl_data *file_data)
1598 {
1599 unsigned int runs = streamer_read_uhwi (ib);
1600 if (runs)
1601 {
1602 file_data->profile_info.runs = runs;
1603
1604 /* IPA-profile computes hot bb threshold based on cumulated
1605 whole program profile. We need to stream it down to ltrans. */
1606 if (flag_ltrans)
1607 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1608 }
1609
1610 }
1611
1612 /* Rescale profile summaries to the same number of runs in the whole unit. */
1613
1614 static void
1615 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1616 {
1617 struct lto_file_decl_data *file_data;
1618 unsigned int j;
1619 gcov_unsigned_t max_runs = 0;
1620 struct cgraph_node *node;
1621 struct cgraph_edge *edge;
1622
1623 /* Find unit with maximal number of runs. If we ever get serious about
1624 roundoff errors, we might also consider computing smallest common
1625 multiply. */
1626 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1627 if (max_runs < file_data->profile_info.runs)
1628 max_runs = file_data->profile_info.runs;
1629
1630 if (!max_runs)
1631 return;
1632
1633 /* Simple overflow check. We probably don't need to support that many train
1634 runs. Such a large value probably imply data corruption anyway. */
1635 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1636 {
1637 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1638 INT_MAX / REG_BR_PROB_BASE);
1639 return;
1640 }
1641
1642 profile_info = XCNEW (gcov_summary);
1643 profile_info->runs = max_runs;
1644
1645 /* If merging already happent at WPA time, we are done. */
1646 if (flag_ltrans)
1647 return;
1648
1649 /* Now compute count_materialization_scale of each node.
1650 During LTRANS we already have values of count_materialization_scale
1651 computed, so just update them. */
1652 FOR_EACH_FUNCTION (node)
1653 if (node->lto_file_data
1654 && node->lto_file_data->profile_info.runs)
1655 {
1656 int scale;
1657
1658 scale = RDIV (node->count_materialization_scale * max_runs,
1659 node->lto_file_data->profile_info.runs);
1660 node->count_materialization_scale = scale;
1661 if (scale < 0)
1662 fatal_error (input_location, "Profile information in %s corrupted",
1663 file_data->file_name);
1664
1665 if (scale == REG_BR_PROB_BASE)
1666 continue;
1667 for (edge = node->callees; edge; edge = edge->next_callee)
1668 if (edge->count.ipa ().nonzero_p ())
1669 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1670 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1671 if (edge->count.ipa ().nonzero_p ())
1672 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1673 if (node->count.ipa ().nonzero_p ())
1674 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1675 }
1676 }
1677
1678 /* Input and merge the symtab from each of the .o files passed to
1679 lto1. */
1680
1681 void
1682 input_symtab (void)
1683 {
1684 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1685 struct lto_file_decl_data *file_data;
1686 unsigned int j = 0;
1687 struct cgraph_node *node;
1688
1689 while ((file_data = file_data_vec[j++]))
1690 {
1691 const char *data;
1692 size_t len;
1693 class lto_input_block *ib;
1694 vec<symtab_node *> nodes;
1695
1696 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1697 &data, &len);
1698 if (!ib)
1699 fatal_error (input_location,
1700 "cannot find LTO cgraph in %s", file_data->file_name);
1701 input_profile_summary (ib, file_data);
1702 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1703 nodes = input_cgraph_1 (file_data, ib);
1704 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1705 ib, data, len);
1706
1707 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1708 &data, &len);
1709 if (!ib)
1710 fatal_error (input_location, "cannot find LTO section refs in %s",
1711 file_data->file_name);
1712 input_refs (ib, nodes);
1713 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1714 ib, data, len);
1715 if (flag_ltrans)
1716 input_cgraph_opt_summary (nodes);
1717 nodes.release ();
1718 }
1719
1720 merge_profile_summaries (file_data_vec);
1721
1722 /* Clear out the aux field that was used to store enough state to
1723 tell which nodes should be overwritten. */
1724 FOR_EACH_FUNCTION (node)
1725 {
1726 /* Some nodes may have been created by cgraph_node. This
1727 happens when the callgraph contains nested functions. If the
1728 node for the parent function was never emitted to the gimple
1729 file, cgraph_node will create a node for it when setting the
1730 context of the nested function. */
1731 if (node->lto_file_data)
1732 node->aux = NULL;
1733 }
1734 }
1735
1736 /* Input function/variable tables that will allow libgomp to look up offload
1737 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1738
1739 void
1740 input_offload_tables (bool do_force_output)
1741 {
1742 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1743 struct lto_file_decl_data *file_data;
1744 unsigned int j = 0;
1745
1746 while ((file_data = file_data_vec[j++]))
1747 {
1748 const char *data;
1749 size_t len;
1750 class lto_input_block *ib
1751 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1752 &data, &len);
1753 if (!ib)
1754 continue;
1755
1756 enum LTO_symtab_tags tag
1757 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1758 while (tag)
1759 {
1760 if (tag == LTO_symtab_unavail_node)
1761 {
1762 int decl_index = streamer_read_uhwi (ib);
1763 tree fn_decl
1764 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1765 vec_safe_push (offload_funcs, fn_decl);
1766
1767 /* Prevent IPA from removing fn_decl as unreachable, since there
1768 may be no refs from the parent function to child_fn in offload
1769 LTO mode. */
1770 if (do_force_output)
1771 cgraph_node::get (fn_decl)->mark_force_output ();
1772 }
1773 else if (tag == LTO_symtab_variable)
1774 {
1775 int decl_index = streamer_read_uhwi (ib);
1776 tree var_decl
1777 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1778 vec_safe_push (offload_vars, var_decl);
1779
1780 /* Prevent IPA from removing var_decl as unused, since there
1781 may be no refs to var_decl in offload LTO mode. */
1782 if (do_force_output)
1783 varpool_node::get (var_decl)->force_output = 1;
1784 }
1785 else
1786 fatal_error (input_location,
1787 "invalid offload table in %s", file_data->file_name);
1788
1789 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1790 }
1791
1792 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1793 ib, data, len);
1794 }
1795 }
1796
1797 /* True when we need optimization summary for NODE. */
1798
1799 static int
1800 output_cgraph_opt_summary_p (struct cgraph_node *node)
1801 {
1802 return ((node->clone_of || node->former_clone_of)
1803 && (node->clone.tree_map
1804 || node->clone.param_adjustments));
1805 }
1806
1807 /* Output optimization summary for EDGE to OB. */
1808 static void
1809 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1810 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1811 {
1812 }
1813
1814 /* Output optimization summary for NODE to OB. */
1815
1816 static void
1817 output_node_opt_summary (struct output_block *ob,
1818 struct cgraph_node *node,
1819 lto_symtab_encoder_t encoder)
1820 {
1821 struct ipa_replace_map *map;
1822 int i;
1823 struct cgraph_edge *e;
1824
1825 /* TODO: Should this code be moved to ipa-param-manipulation? */
1826 struct bitpack_d bp;
1827 bp = bitpack_create (ob->main_stream);
1828 bp_pack_value (&bp, (node->clone.param_adjustments != NULL), 1);
1829 streamer_write_bitpack (&bp);
1830 if (ipa_param_adjustments *adjustments = node->clone.param_adjustments)
1831 {
1832 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1833 ipa_adjusted_param *adj;
1834 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1835 {
1836 bp = bitpack_create (ob->main_stream);
1837 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1838 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1839 bp_pack_value (&bp, adj->op, 2);
1840 bp_pack_value (&bp, adj->param_prefix_index, 2);
1841 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1842 bp_pack_value (&bp, adj->reverse, 1);
1843 bp_pack_value (&bp, adj->user_flag, 1);
1844 streamer_write_bitpack (&bp);
1845 if (adj->op == IPA_PARAM_OP_SPLIT
1846 || adj->op == IPA_PARAM_OP_NEW)
1847 {
1848 stream_write_tree (ob, adj->type, true);
1849 if (adj->op == IPA_PARAM_OP_SPLIT)
1850 {
1851 stream_write_tree (ob, adj->alias_ptr_type, true);
1852 streamer_write_uhwi (ob, adj->unit_offset);
1853 }
1854 }
1855 }
1856 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1857 bp = bitpack_create (ob->main_stream);
1858 bp_pack_value (&bp, node->clone.param_adjustments->m_skip_return, 1);
1859 streamer_write_bitpack (&bp);
1860 }
1861
1862 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1863 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1864 {
1865 streamer_write_uhwi (ob, map->parm_num);
1866 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1867 stream_write_tree (ob, map->new_tree, true);
1868 }
1869
1870 if (lto_symtab_encoder_in_partition_p (encoder, node))
1871 {
1872 for (e = node->callees; e; e = e->next_callee)
1873 output_edge_opt_summary (ob, e);
1874 for (e = node->indirect_calls; e; e = e->next_callee)
1875 output_edge_opt_summary (ob, e);
1876 }
1877 }
1878
1879 /* Output optimization summaries stored in callgraph.
1880 At the moment it is the clone info structure. */
1881
1882 static void
1883 output_cgraph_opt_summary (void)
1884 {
1885 int i, n_nodes;
1886 lto_symtab_encoder_t encoder;
1887 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1888 unsigned count = 0;
1889
1890 ob->symbol = NULL;
1891 encoder = ob->decl_state->symtab_node_encoder;
1892 n_nodes = lto_symtab_encoder_size (encoder);
1893 for (i = 0; i < n_nodes; i++)
1894 {
1895 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1896 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1897 if (cnode && output_cgraph_opt_summary_p (cnode))
1898 count++;
1899 }
1900 streamer_write_uhwi (ob, count);
1901 for (i = 0; i < n_nodes; i++)
1902 {
1903 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1904 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1905 if (cnode && output_cgraph_opt_summary_p (cnode))
1906 {
1907 streamer_write_uhwi (ob, i);
1908 output_node_opt_summary (ob, cnode, encoder);
1909 }
1910 }
1911 produce_asm (ob, NULL);
1912 destroy_output_block (ob);
1913 }
1914
1915 /* Input optimisation summary of EDGE. */
1916
1917 static void
1918 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1919 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1920 {
1921 }
1922
1923 /* Input optimisation summary of NODE. */
1924
1925 static void
1926 input_node_opt_summary (struct cgraph_node *node,
1927 class lto_input_block *ib_main,
1928 class data_in *data_in)
1929 {
1930 int i;
1931 int count;
1932 struct cgraph_edge *e;
1933
1934 /* TODO: Should this code be moved to ipa-param-manipulation? */
1935 struct bitpack_d bp;
1936 bp = streamer_read_bitpack (ib_main);
1937 bool have_adjustments = bp_unpack_value (&bp, 1);
1938 if (have_adjustments)
1939 {
1940 count = streamer_read_uhwi (ib_main);
1941 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1942 for (i = 0; i < count; i++)
1943 {
1944 ipa_adjusted_param adj;
1945 memset (&adj, 0, sizeof (adj));
1946 bp = streamer_read_bitpack (ib_main);
1947 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1948 adj.prev_clone_index
1949 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1950 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1951 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1952 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1953 adj.reverse = bp_unpack_value (&bp, 1);
1954 adj.user_flag = bp_unpack_value (&bp, 1);
1955 if (adj.op == IPA_PARAM_OP_SPLIT
1956 || adj.op == IPA_PARAM_OP_NEW)
1957 {
1958 adj.type = stream_read_tree (ib_main, data_in);
1959 if (adj.op == IPA_PARAM_OP_SPLIT)
1960 {
1961 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1962 adj.unit_offset = streamer_read_uhwi (ib_main);
1963 }
1964 }
1965 vec_safe_push (new_params, adj);
1966 }
1967 int always_copy_start = streamer_read_hwi (ib_main);
1968 bp = streamer_read_bitpack (ib_main);
1969 bool skip_return = bp_unpack_value (&bp, 1);
1970 node->clone.param_adjustments
1971 = (new (ggc_alloc <ipa_param_adjustments> ())
1972 ipa_param_adjustments (new_params, always_copy_start, skip_return));
1973 }
1974
1975 count = streamer_read_uhwi (ib_main);
1976 for (i = 0; i < count; i++)
1977 {
1978 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1979
1980 vec_safe_push (node->clone.tree_map, map);
1981 map->parm_num = streamer_read_uhwi (ib_main);
1982 map->new_tree = stream_read_tree (ib_main, data_in);
1983 }
1984 for (e = node->callees; e; e = e->next_callee)
1985 input_edge_opt_summary (e, ib_main);
1986 for (e = node->indirect_calls; e; e = e->next_callee)
1987 input_edge_opt_summary (e, ib_main);
1988 }
1989
1990 /* Read section in file FILE_DATA of length LEN with data DATA. */
1991
1992 static void
1993 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1994 const char *data, size_t len,
1995 vec<symtab_node *> nodes)
1996 {
1997 const struct lto_function_header *header =
1998 (const struct lto_function_header *) data;
1999 const int cfg_offset = sizeof (struct lto_function_header);
2000 const int main_offset = cfg_offset + header->cfg_size;
2001 const int string_offset = main_offset + header->main_size;
2002 class data_in *data_in;
2003 unsigned int i;
2004 unsigned int count;
2005
2006 lto_input_block ib_main ((const char *) data + main_offset,
2007 header->main_size, file_data->mode_table);
2008
2009 data_in =
2010 lto_data_in_create (file_data, (const char *) data + string_offset,
2011 header->string_size, vNULL);
2012 count = streamer_read_uhwi (&ib_main);
2013
2014 for (i = 0; i < count; i++)
2015 {
2016 int ref = streamer_read_uhwi (&ib_main);
2017 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2018 &ib_main, data_in);
2019 }
2020 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2021 len);
2022 lto_data_in_delete (data_in);
2023 }
2024
2025 /* Input optimization summary of cgraph. */
2026
2027 static void
2028 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2029 {
2030 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2031 struct lto_file_decl_data *file_data;
2032 unsigned int j = 0;
2033
2034 while ((file_data = file_data_vec[j++]))
2035 {
2036 size_t len;
2037 const char *data
2038 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2039 &len);
2040 if (data)
2041 input_cgraph_opt_section (file_data, data, len, nodes);
2042 }
2043 }