Update copyright years.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2020 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
45
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51
52 /* Cgraph streaming is organized as set of record whose type
53 is indicated by a tag. */
54 enum LTO_symtab_tags
55 {
56 /* Must leave 0 for the stopper. */
57
58 /* Cgraph node without body available. */
59 LTO_symtab_unavail_node = 1,
60 /* Cgraph node with function body. */
61 LTO_symtab_analyzed_node,
62 /* Cgraph edges. */
63 LTO_symtab_edge,
64 LTO_symtab_indirect_edge,
65 LTO_symtab_variable,
66 LTO_symtab_last_tag
67 };
68
69 /* Create a new symtab encoder.
70 if FOR_INPUT, the encoder allocate only datastructures needed
71 to read the symtab. */
72
73 lto_symtab_encoder_t
74 lto_symtab_encoder_new (bool for_input)
75 {
76 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
77
78 if (!for_input)
79 encoder->map = new hash_map<symtab_node *, size_t>;
80 encoder->nodes.create (0);
81 return encoder;
82 }
83
84
85 /* Delete ENCODER and its components. */
86
87 void
88 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
89 {
90 encoder->nodes.release ();
91 if (encoder->map)
92 delete encoder->map;
93 free (encoder);
94 }
95
96
97 /* Return the existing reference number of NODE in the symtab encoder in
98 output block OB. Assign a new reference if this is the first time
99 NODE is encoded. */
100
101 int
102 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
103 symtab_node *node)
104 {
105 int ref;
106
107 if (!encoder->map)
108 {
109 lto_encoder_entry entry = {node, false, false, false};
110
111 ref = encoder->nodes.length ();
112 encoder->nodes.safe_push (entry);
113 return ref;
114 }
115
116 size_t *slot = encoder->map->get (node);
117 if (!slot || !*slot)
118 {
119 lto_encoder_entry entry = {node, false, false, false};
120 ref = encoder->nodes.length ();
121 if (!slot)
122 encoder->map->put (node, ref + 1);
123 encoder->nodes.safe_push (entry);
124 }
125 else
126 ref = *slot - 1;
127
128 return ref;
129 }
130
131 /* Remove NODE from encoder. */
132
133 bool
134 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
135 symtab_node *node)
136 {
137 int index;
138 lto_encoder_entry last_node;
139
140 size_t *slot = encoder->map->get (node);
141 if (slot == NULL || !*slot)
142 return false;
143
144 index = *slot - 1;
145 gcc_checking_assert (encoder->nodes[index].node == node);
146
147 /* Remove from vector. We do this by swapping node with the last element
148 of the vector. */
149 last_node = encoder->nodes.pop ();
150 if (last_node.node != node)
151 {
152 gcc_assert (encoder->map->put (last_node.node, index + 1));
153
154 /* Move the last element to the original spot of NODE. */
155 encoder->nodes[index] = last_node;
156 }
157
158 /* Remove element from hash table. */
159 encoder->map->remove (node);
160 return true;
161 }
162
163
164 /* Return TRUE if we should encode the body of NODE (if any). */
165
166 bool
167 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
168 struct cgraph_node *node)
169 {
170 int index = lto_symtab_encoder_lookup (encoder, node);
171 return encoder->nodes[index].body;
172 }
173
174 /* Specify that we encode the body of NODE in this partition. */
175
176 static void
177 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
178 struct cgraph_node *node)
179 {
180 int index = lto_symtab_encoder_encode (encoder, node);
181 gcc_checking_assert (encoder->nodes[index].node == node);
182 encoder->nodes[index].body = true;
183 }
184
185 /* Return TRUE if we should encode initializer of NODE (if any). */
186
187 bool
188 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
189 varpool_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 if (index == LCC_NOT_FOUND)
193 return false;
194 return encoder->nodes[index].initializer;
195 }
196
197 /* Specify that we should encode initializer of NODE (if any). */
198
199 static void
200 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
201 varpool_node *node)
202 {
203 int index = lto_symtab_encoder_lookup (encoder, node);
204 encoder->nodes[index].initializer = true;
205 }
206
207 /* Return TRUE if NODE is in this partition. */
208
209 bool
210 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
211 symtab_node *node)
212 {
213 int index = lto_symtab_encoder_lookup (encoder, node);
214 if (index == LCC_NOT_FOUND)
215 return false;
216 return encoder->nodes[index].in_partition;
217 }
218
219 /* Specify that NODE is in this partition. */
220
221 void
222 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
223 symtab_node *node)
224 {
225 int index = lto_symtab_encoder_encode (encoder, node);
226 encoder->nodes[index].in_partition = true;
227 }
228
229 /* Output the cgraph EDGE to OB using ENCODER. */
230
231 static void
232 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
233 lto_symtab_encoder_t encoder)
234 {
235 unsigned int uid;
236 intptr_t ref;
237 struct bitpack_d bp;
238
239 if (edge->indirect_unknown_callee)
240 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
241 LTO_symtab_indirect_edge);
242 else
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_edge);
245
246 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
247 gcc_assert (ref != LCC_NOT_FOUND);
248 streamer_write_hwi_stream (ob->main_stream, ref);
249
250 if (!edge->indirect_unknown_callee)
251 {
252 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
255 }
256
257 edge->count.stream_out (ob->main_stream);
258
259 bp = bitpack_create (ob->main_stream);
260 uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
261 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
262 bp_pack_enum (&bp, cgraph_inline_failed_t,
263 CIF_N_REASONS, edge->inline_failed);
264 bp_pack_var_len_unsigned (&bp, uid);
265 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
266 bp_pack_value (&bp, edge->speculative, 1);
267 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
268 gcc_assert (!edge->call_stmt_cannot_inline_p
269 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
270 bp_pack_value (&bp, edge->can_throw_external, 1);
271 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
272 if (edge->indirect_unknown_callee)
273 {
274 int flags = edge->indirect_info->ecf_flags;
275 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
276 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
277 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
278 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
281 /* Flags that should not appear on indirect calls. */
282 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
283 | ECF_MAY_BE_ALLOCA
284 | ECF_SIBCALL
285 | ECF_LEAF
286 | ECF_NOVOPS)));
287 }
288 streamer_write_bitpack (&bp);
289 if (edge->indirect_unknown_callee)
290 {
291 streamer_write_hwi_stream (ob->main_stream,
292 edge->indirect_info->common_target_id);
293 if (edge->indirect_info->common_target_id)
294 streamer_write_hwi_stream
295 (ob->main_stream, edge->indirect_info->common_target_probability);
296 }
297 }
298
299 /* Return if NODE contain references from other partitions. */
300
301 bool
302 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
303 {
304 int i;
305 struct ipa_ref *ref = NULL;
306
307 for (i = 0; node->iterate_referring (i, ref); i++)
308 {
309 /* Ignore references from non-offloadable nodes while streaming NODE into
310 offload LTO section. */
311 if (!ref->referring->need_lto_streaming)
312 continue;
313
314 if (ref->referring->in_other_partition
315 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
316 return true;
317 }
318 return false;
319 }
320
321 /* Return true when node is reachable from other partition. */
322
323 bool
324 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
325 {
326 struct cgraph_edge *e;
327 if (!node->definition)
328 return false;
329 if (node->inlined_to)
330 return false;
331 for (e = node->callers; e; e = e->next_caller)
332 {
333 /* Ignore references from non-offloadable nodes while streaming NODE into
334 offload LTO section. */
335 if (!e->caller->need_lto_streaming)
336 continue;
337
338 if (e->caller->in_other_partition
339 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
340 return true;
341 }
342 return false;
343 }
344
345 /* Return if NODE contain references from other partitions. */
346
347 bool
348 referenced_from_this_partition_p (symtab_node *node,
349 lto_symtab_encoder_t encoder)
350 {
351 int i;
352 struct ipa_ref *ref = NULL;
353
354 for (i = 0; node->iterate_referring (i, ref); i++)
355 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
356 return true;
357 return false;
358 }
359
360 /* Return true when node is reachable from other partition. */
361
362 bool
363 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
364 {
365 struct cgraph_edge *e;
366 for (e = node->callers; e; e = e->next_caller)
367 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
368 return true;
369 return false;
370 }
371
372 /* Output the cgraph NODE to OB. ENCODER is used to find the
373 reference number of NODE->inlined_to. SET is the set of nodes we
374 are writing to the current file. If NODE is not in SET, then NODE
375 is a boundary of a cgraph_node_set and we pretend NODE just has a
376 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
377 that have had their callgraph node written so far. This is used to
378 determine if NODE is a clone of a previously written node. */
379
380 static void
381 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
382 lto_symtab_encoder_t encoder)
383 {
384 unsigned int tag;
385 struct bitpack_d bp;
386 bool boundary_p;
387 intptr_t ref;
388 bool in_other_partition = false;
389 struct cgraph_node *clone_of, *ultimate_clone_of;
390 ipa_opt_pass_d *pass;
391 int i;
392 const char *comdat;
393 const char *section;
394 tree group;
395
396 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
397
398 if (node->analyzed && (!boundary_p || node->alias
399 || (node->thunk.thunk_p && !node->inlined_to)))
400 tag = LTO_symtab_analyzed_node;
401 else
402 tag = LTO_symtab_unavail_node;
403
404 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
405 tag);
406 streamer_write_hwi_stream (ob->main_stream, node->order);
407
408 /* In WPA mode, we only output part of the call-graph. Also, we
409 fake cgraph node attributes. There are two cases that we care.
410
411 Boundary nodes: There are nodes that are not part of SET but are
412 called from within SET. We artificially make them look like
413 externally visible nodes with no function body.
414
415 Cherry-picked nodes: These are nodes we pulled from other
416 translation units into SET during IPA-inlining. We make them as
417 local static nodes to prevent clashes with other local statics. */
418 if (boundary_p && node->analyzed
419 && node->get_partitioning_class () == SYMBOL_PARTITION)
420 {
421 /* Inline clones cannot be part of boundary.
422 gcc_assert (!node->inlined_to);
423
424 FIXME: At the moment they can be, when partition contains an inline
425 clone that is clone of inline clone from outside partition. We can
426 reshape the clone tree and make other tree to be the root, but it
427 needs a bit extra work and will be promplty done by cgraph_remove_node
428 after reading back. */
429 in_other_partition = 1;
430 }
431
432 clone_of = node->clone_of;
433 while (clone_of
434 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
435 if (clone_of->prev_sibling_clone)
436 clone_of = clone_of->prev_sibling_clone;
437 else
438 clone_of = clone_of->clone_of;
439
440 /* See if body of the master function is output. If not, we are seeing only
441 an declaration and we do not need to pass down clone tree. */
442 ultimate_clone_of = clone_of;
443 while (ultimate_clone_of && ultimate_clone_of->clone_of)
444 ultimate_clone_of = ultimate_clone_of->clone_of;
445
446 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
447 clone_of = NULL;
448
449 if (tag == LTO_symtab_analyzed_node)
450 gcc_assert (clone_of || !node->clone_of);
451 if (!clone_of)
452 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
453 else
454 streamer_write_hwi_stream (ob->main_stream, ref);
455
456
457 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
458 node->count.stream_out (ob->main_stream);
459 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
460
461 streamer_write_hwi_stream (ob->main_stream,
462 node->ipa_transforms_to_apply.length ());
463 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
464 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
465
466 if (tag == LTO_symtab_analyzed_node)
467 {
468 if (node->inlined_to)
469 {
470 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
471 gcc_assert (ref != LCC_NOT_FOUND);
472 }
473 else
474 ref = LCC_NOT_FOUND;
475
476 streamer_write_hwi_stream (ob->main_stream, ref);
477 }
478
479 group = node->get_comdat_group ();
480 if (group)
481 comdat = IDENTIFIER_POINTER (group);
482 else
483 comdat = "";
484 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
485
486 if (group)
487 {
488 if (node->same_comdat_group)
489 {
490 ref = LCC_NOT_FOUND;
491 for (struct symtab_node *n = node->same_comdat_group;
492 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
493 ref = lto_symtab_encoder_lookup (encoder, n);
494 }
495 else
496 ref = LCC_NOT_FOUND;
497 streamer_write_hwi_stream (ob->main_stream, ref);
498 }
499
500 section = node->get_section ();
501 if (!section)
502 section = "";
503
504 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
505
506 bp = bitpack_create (ob->main_stream);
507 bp_pack_value (&bp, node->local, 1);
508 bp_pack_value (&bp, node->externally_visible, 1);
509 bp_pack_value (&bp, node->no_reorder, 1);
510 bp_pack_value (&bp, node->definition, 1);
511 bp_pack_value (&bp, node->versionable, 1);
512 bp_pack_value (&bp, node->can_change_signature, 1);
513 bp_pack_value (&bp, node->redefined_extern_inline, 1);
514 bp_pack_value (&bp, node->force_output, 1);
515 bp_pack_value (&bp, node->forced_by_abi, 1);
516 bp_pack_value (&bp, node->unique_name, 1);
517 bp_pack_value (&bp, node->body_removed, 1);
518 bp_pack_value (&bp, node->implicit_section, 1);
519 bp_pack_value (&bp, node->address_taken, 1);
520 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
521 && node->get_partitioning_class () == SYMBOL_PARTITION
522 && (reachable_from_other_partition_p (node, encoder)
523 || referenced_from_other_partition_p (node, encoder)), 1);
524 bp_pack_value (&bp, node->lowered, 1);
525 bp_pack_value (&bp, in_other_partition, 1);
526 bp_pack_value (&bp, node->alias, 1);
527 bp_pack_value (&bp, node->transparent_alias, 1);
528 bp_pack_value (&bp, node->weakref, 1);
529 bp_pack_value (&bp, node->symver, 1);
530 bp_pack_value (&bp, node->frequency, 2);
531 bp_pack_value (&bp, node->only_called_at_startup, 1);
532 bp_pack_value (&bp, node->only_called_at_exit, 1);
533 bp_pack_value (&bp, node->tm_clone, 1);
534 bp_pack_value (&bp, node->calls_comdat_local, 1);
535 bp_pack_value (&bp, node->icf_merged, 1);
536 bp_pack_value (&bp, node->nonfreeing_fn, 1);
537 bp_pack_value (&bp, node->merged_comdat, 1);
538 bp_pack_value (&bp, node->merged_extern_inline, 1);
539 bp_pack_value (&bp, node->thunk.thunk_p, 1);
540 bp_pack_value (&bp, node->parallelized_function, 1);
541 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
542 LDPR_NUM_KNOWN,
543 /* When doing incremental link, we will get new resolution
544 info next time we process the file. */
545 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
546 bp_pack_value (&bp, node->split_part, 1);
547 streamer_write_bitpack (&bp);
548 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
549
550 /* Stream thunk info always because we use it in
551 ipa_polymorphic_call_context::ipa_polymorphic_call_context
552 to properly interpret THIS pointers for thunks that has been converted
553 to Gimple. */
554 if (node->definition)
555 {
556 streamer_write_uhwi_stream
557 (ob->main_stream,
558 1 + (node->thunk.this_adjusting != 0) * 2
559 + (node->thunk.virtual_offset_p != 0) * 4);
560 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
561 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
562 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
563 }
564 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
565 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
566 if (DECL_STATIC_CONSTRUCTOR (node->decl))
567 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
568 if (DECL_STATIC_DESTRUCTOR (node->decl))
569 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
570 }
571
572 /* Output the varpool NODE to OB.
573 If NODE is not in SET, then NODE is a boundary. */
574
575 static void
576 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
577 lto_symtab_encoder_t encoder)
578 {
579 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
580 bool encode_initializer_p
581 = (node->definition
582 && lto_symtab_encoder_encode_initializer_p (encoder, node));
583 struct bitpack_d bp;
584 int ref;
585 const char *comdat;
586 const char *section;
587 tree group;
588
589 gcc_assert (!encode_initializer_p || node->definition);
590 gcc_assert (boundary_p || encode_initializer_p);
591
592 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
593 LTO_symtab_variable);
594 streamer_write_hwi_stream (ob->main_stream, node->order);
595 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
596 bp = bitpack_create (ob->main_stream);
597 bp_pack_value (&bp, node->externally_visible, 1);
598 bp_pack_value (&bp, node->no_reorder, 1);
599 bp_pack_value (&bp, node->force_output, 1);
600 bp_pack_value (&bp, node->forced_by_abi, 1);
601 bp_pack_value (&bp, node->unique_name, 1);
602 bp_pack_value (&bp,
603 node->body_removed
604 || (!encode_initializer_p && !node->alias && node->definition),
605 1);
606 bp_pack_value (&bp, node->implicit_section, 1);
607 bp_pack_value (&bp, node->writeonly, 1);
608 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
609 1);
610 bp_pack_value (&bp, node->alias, 1);
611 bp_pack_value (&bp, node->transparent_alias, 1);
612 bp_pack_value (&bp, node->weakref, 1);
613 bp_pack_value (&bp, node->symver, 1);
614 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
615 gcc_assert (node->definition || !node->analyzed);
616 /* Constant pool initializers can be de-unified into individual ltrans units.
617 FIXME: Alternatively at -Os we may want to avoid generating for them the local
618 labels and share them across LTRANS partitions. */
619 if (node->get_partitioning_class () != SYMBOL_PARTITION)
620 {
621 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
622 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
623 }
624 else
625 {
626 bp_pack_value (&bp, node->definition
627 && referenced_from_other_partition_p (node, encoder), 1);
628 bp_pack_value (&bp, node->analyzed
629 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
630 /* in_other_partition. */
631 }
632 bp_pack_value (&bp, node->tls_model, 3);
633 bp_pack_value (&bp, node->used_by_single_function, 1);
634 bp_pack_value (&bp, node->dynamically_initialized, 1);
635 streamer_write_bitpack (&bp);
636
637 group = node->get_comdat_group ();
638 if (group)
639 comdat = IDENTIFIER_POINTER (group);
640 else
641 comdat = "";
642 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
643
644 if (group)
645 {
646 if (node->same_comdat_group)
647 {
648 ref = LCC_NOT_FOUND;
649 for (struct symtab_node *n = node->same_comdat_group;
650 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
651 ref = lto_symtab_encoder_lookup (encoder, n);
652 }
653 else
654 ref = LCC_NOT_FOUND;
655 streamer_write_hwi_stream (ob->main_stream, ref);
656 }
657
658 section = node->get_section ();
659 if (!section)
660 section = "";
661 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
662
663 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
664 LDPR_NUM_KNOWN, node->resolution);
665 }
666
667 /* Output the varpool NODE to OB.
668 If NODE is not in SET, then NODE is a boundary. */
669
670 static void
671 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
672 lto_symtab_encoder_t encoder)
673 {
674 struct bitpack_d bp;
675 int nref;
676 int uid = ref->lto_stmt_uid;
677 struct cgraph_node *node;
678
679 bp = bitpack_create (ob->main_stream);
680 bp_pack_value (&bp, ref->use, 3);
681 bp_pack_value (&bp, ref->speculative, 1);
682 streamer_write_bitpack (&bp);
683 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
684 gcc_assert (nref != LCC_NOT_FOUND);
685 streamer_write_hwi_stream (ob->main_stream, nref);
686
687 node = dyn_cast <cgraph_node *> (ref->referring);
688 if (node)
689 {
690 if (ref->stmt)
691 uid = gimple_uid (ref->stmt) + 1;
692 streamer_write_hwi_stream (ob->main_stream, uid);
693 }
694 }
695
696 /* Stream out profile_summary to OB. */
697
698 static void
699 output_profile_summary (struct lto_simple_output_block *ob)
700 {
701 if (profile_info)
702 {
703 /* We do not output num and run_max, they are not used by
704 GCC profile feedback and they are difficult to merge from multiple
705 units. */
706 unsigned runs = (profile_info->runs);
707 streamer_write_uhwi_stream (ob->main_stream, runs);
708
709 /* IPA-profile computes hot bb threshold based on cumulated
710 whole program profile. We need to stream it down to ltrans. */
711 if (flag_wpa)
712 streamer_write_gcov_count_stream (ob->main_stream,
713 get_hot_bb_threshold ());
714 }
715 else
716 streamer_write_uhwi_stream (ob->main_stream, 0);
717 }
718
719 /* Output all callees or indirect outgoing edges. EDGE must be the first such
720 edge. */
721
722 static void
723 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
724 struct lto_simple_output_block *ob,
725 lto_symtab_encoder_t encoder)
726 {
727 if (!edge)
728 return;
729
730 /* Output edges in backward direction, so the reconstructed callgraph match
731 and it is easy to associate call sites in the IPA pass summaries. */
732 while (edge->next_callee)
733 edge = edge->next_callee;
734 for (; edge; edge = edge->prev_callee)
735 lto_output_edge (ob, edge, encoder);
736 }
737
738 /* Output the part of the cgraph in SET. */
739
740 static void
741 output_refs (lto_symtab_encoder_t encoder)
742 {
743 struct lto_simple_output_block *ob;
744 int count;
745 struct ipa_ref *ref;
746
747 ob = lto_create_simple_output_block (LTO_section_refs);
748
749 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
750 {
751 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
752
753 /* IPA_REF_ALIAS references are always preserved
754 in the boundary. Alias node can't have other references and
755 can be always handled as if it's not in the boundary. */
756 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
757 continue;
758
759 count = node->ref_list.nreferences ();
760 if (count)
761 {
762 streamer_write_gcov_count_stream (ob->main_stream, count);
763 streamer_write_uhwi_stream (ob->main_stream,
764 lto_symtab_encoder_lookup (encoder, node));
765 for (int i = 0; node->iterate_reference (i, ref); i++)
766 lto_output_ref (ob, ref, encoder);
767 }
768 }
769
770 streamer_write_uhwi_stream (ob->main_stream, 0);
771
772 lto_destroy_simple_output_block (ob);
773 }
774
775 /* Add NODE into encoder as well as nodes it is cloned from.
776 Do it in a way so clones appear first. */
777
778 static void
779 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
780 bool include_body)
781 {
782 if (node->clone_of)
783 add_node_to (encoder, node->clone_of, include_body);
784 else if (include_body)
785 lto_set_symtab_encoder_encode_body (encoder, node);
786 lto_symtab_encoder_encode (encoder, node);
787 }
788
789 /* Add all references in NODE to encoders. */
790
791 static void
792 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
793 {
794 int i;
795 struct ipa_ref *ref = NULL;
796 for (i = 0; node->iterate_reference (i, ref); i++)
797 if (is_a <cgraph_node *> (ref->referred))
798 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
799 else
800 lto_symtab_encoder_encode (encoder, ref->referred);
801 }
802
803 /* Select what needs to be streamed out. In regular lto mode stream everything.
804 In offload lto mode stream only nodes marked as offloadable. */
805 void
806 select_what_to_stream (void)
807 {
808 struct symtab_node *snode;
809 FOR_EACH_SYMBOL (snode)
810 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
811 }
812
813 /* Find all symbols we want to stream into given partition and insert them
814 to encoders.
815
816 The function actually replaces IN_ENCODER by new one. The reason is that
817 streaming code needs clone's origin to be streamed before clone. This
818 means that we need to insert the nodes in specific order. This order is
819 ignored by the partitioning logic earlier. */
820
821 lto_symtab_encoder_t
822 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
823 {
824 struct cgraph_edge *edge;
825 int i;
826 lto_symtab_encoder_t encoder;
827 lto_symtab_encoder_iterator lsei;
828 hash_set<void *> reachable_call_targets;
829
830 encoder = lto_symtab_encoder_new (false);
831
832 /* Go over all entries in the IN_ENCODER and duplicate them to
833 ENCODER. At the same time insert masters of clones so
834 every master appears before clone. */
835 for (lsei = lsei_start_function_in_partition (in_encoder);
836 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
837 {
838 struct cgraph_node *node = lsei_cgraph_node (lsei);
839 if (!node->need_lto_streaming)
840 continue;
841 add_node_to (encoder, node, true);
842 lto_set_symtab_encoder_in_partition (encoder, node);
843 create_references (encoder, node);
844 }
845 for (lsei = lsei_start_variable_in_partition (in_encoder);
846 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
847 {
848 varpool_node *vnode = lsei_varpool_node (lsei);
849
850 if (!vnode->need_lto_streaming)
851 continue;
852 lto_set_symtab_encoder_in_partition (encoder, vnode);
853 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
854 create_references (encoder, vnode);
855 }
856 /* Pickle in also the initializer of all referenced readonly variables
857 to help folding. Constant pool variables are not shared, so we must
858 pickle those too. */
859 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
860 {
861 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
862 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
863 {
864 if (!lto_symtab_encoder_encode_initializer_p (encoder,
865 vnode)
866 && (((vnode->ctor_useable_for_folding_p ()
867 && (!DECL_VIRTUAL_P (vnode->decl)
868 || !flag_wpa
869 || flag_ltrans_devirtualize)))))
870 {
871 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
872 create_references (encoder, vnode);
873 }
874 }
875 }
876
877 /* Go over all the nodes again to include callees that are not in
878 SET. */
879 for (lsei = lsei_start_function_in_partition (encoder);
880 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
881 {
882 struct cgraph_node *node = lsei_cgraph_node (lsei);
883 for (edge = node->callees; edge; edge = edge->next_callee)
884 {
885 struct cgraph_node *callee = edge->callee;
886 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
887 {
888 /* We should have moved all the inlines. */
889 gcc_assert (!callee->inlined_to);
890 add_node_to (encoder, callee, false);
891 }
892 }
893 /* Add all possible targets for late devirtualization. */
894 if (flag_ltrans_devirtualize || !flag_wpa)
895 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
896 if (edge->indirect_info->polymorphic)
897 {
898 unsigned int i;
899 void *cache_token;
900 bool final;
901 vec <cgraph_node *>targets
902 = possible_polymorphic_call_targets
903 (edge, &final, &cache_token);
904 if (!reachable_call_targets.add (cache_token))
905 {
906 for (i = 0; i < targets.length (); i++)
907 {
908 struct cgraph_node *callee = targets[i];
909
910 /* Adding an external declarations into the unit serves
911 no purpose and just increases its boundary. */
912 if (callee->definition
913 && !lto_symtab_encoder_in_partition_p
914 (encoder, callee))
915 {
916 gcc_assert (!callee->inlined_to);
917 add_node_to (encoder, callee, false);
918 }
919 }
920 }
921 }
922 }
923 /* Be sure to also insert alias targert and thunk callees. These needs
924 to stay to aid local calling conventions. */
925 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
926 {
927 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
928 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
929
930 if (node->alias && node->analyzed)
931 create_references (encoder, node);
932 if (cnode
933 && cnode->thunk.thunk_p && !cnode->inlined_to)
934 add_node_to (encoder, cnode->callees->callee, false);
935 while (node->transparent_alias && node->analyzed)
936 {
937 node = node->get_alias_target ();
938 if (is_a <cgraph_node *> (node))
939 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
940 false);
941 else
942 lto_symtab_encoder_encode (encoder, node);
943 }
944 }
945 lto_symtab_encoder_delete (in_encoder);
946 return encoder;
947 }
948
949 /* Output the part of the symtab in SET and VSET. */
950
951 void
952 output_symtab (void)
953 {
954 struct cgraph_node *node;
955 struct lto_simple_output_block *ob;
956 int i, n_nodes;
957 lto_symtab_encoder_t encoder;
958
959 if (flag_wpa)
960 output_cgraph_opt_summary ();
961
962 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
963
964 output_profile_summary (ob);
965
966 /* An encoder for cgraph nodes should have been created by
967 ipa_write_summaries_1. */
968 gcc_assert (ob->decl_state->symtab_node_encoder);
969 encoder = ob->decl_state->symtab_node_encoder;
970
971 /* Write out the nodes. We must first output a node and then its clones,
972 otherwise at a time reading back the node there would be nothing to clone
973 from. */
974 n_nodes = lto_symtab_encoder_size (encoder);
975 for (i = 0; i < n_nodes; i++)
976 {
977 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
978 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
979 lto_output_node (ob, cnode, encoder);
980 else
981 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
982 }
983
984 /* Go over the nodes in SET again to write edges. */
985 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
986 {
987 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
988 if (node
989 && ((node->thunk.thunk_p && !node->inlined_to)
990 || lto_symtab_encoder_in_partition_p (encoder, node)))
991 {
992 output_outgoing_cgraph_edges (node->callees, ob, encoder);
993 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
994 }
995 }
996
997 streamer_write_uhwi_stream (ob->main_stream, 0);
998
999 lto_destroy_simple_output_block (ob);
1000
1001 /* Emit toplevel asms.
1002 When doing WPA we must output every asm just once. Since we do not partition asm
1003 nodes at all, output them to first output. This is kind of hack, but should work
1004 well. */
1005 if (!asm_nodes_output)
1006 {
1007 asm_nodes_output = true;
1008 lto_output_toplevel_asms ();
1009 }
1010
1011 output_refs (encoder);
1012 }
1013
1014 /* Return identifier encoded in IB as a plain string. */
1015
1016 static tree
1017 read_identifier (class lto_input_block *ib)
1018 {
1019 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1020 tree id;
1021
1022 if (ib->data[ib->p + len])
1023 lto_section_overrun (ib);
1024 if (!len)
1025 {
1026 ib->p++;
1027 return NULL;
1028 }
1029 id = get_identifier (ib->data + ib->p);
1030 ib->p += len + 1;
1031 return id;
1032 }
1033
1034 /* Return string encoded in IB, NULL if string is empty. */
1035
1036 static const char *
1037 read_string (class lto_input_block *ib)
1038 {
1039 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1040 const char *str;
1041
1042 if (ib->data[ib->p + len])
1043 lto_section_overrun (ib);
1044 if (!len)
1045 {
1046 ib->p++;
1047 return NULL;
1048 }
1049 str = ib->data + ib->p;
1050 ib->p += len + 1;
1051 return str;
1052 }
1053
1054 /* Output function/variable tables that will allow libgomp to look up offload
1055 target code.
1056 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1057 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1058 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1059
1060 void
1061 output_offload_tables (void)
1062 {
1063 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1064 return;
1065
1066 struct lto_simple_output_block *ob
1067 = lto_create_simple_output_block (LTO_section_offload_table);
1068
1069 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1070 {
1071 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1072 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1073 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1074 (*offload_funcs)[i]);
1075 }
1076
1077 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1078 {
1079 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1080 LTO_symtab_last_tag, LTO_symtab_variable);
1081 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1082 (*offload_vars)[i]);
1083 }
1084
1085 streamer_write_uhwi_stream (ob->main_stream, 0);
1086 lto_destroy_simple_output_block (ob);
1087
1088 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1089 streamed to one partition only. That's why we free offload_funcs and
1090 offload_vars after the first call of output_offload_tables. */
1091 if (flag_wpa)
1092 {
1093 vec_free (offload_funcs);
1094 vec_free (offload_vars);
1095 }
1096 }
1097
1098 /* Verify the partitioning of NODE. */
1099
1100 static inline void
1101 verify_node_partition (symtab_node *node)
1102 {
1103 if (flag_ltrans)
1104 return;
1105
1106 #ifdef ACCEL_COMPILER
1107 if (node->in_other_partition)
1108 {
1109 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1110 error_at (DECL_SOURCE_LOCATION (node->decl),
1111 "function %qs has been referenced in offloaded code but"
1112 " hasn%'t been marked to be included in the offloaded code",
1113 node->name ());
1114 else if (VAR_P (node->decl))
1115 error_at (DECL_SOURCE_LOCATION (node->decl),
1116 "variable %qs has been referenced in offloaded code but"
1117 " hasn%'t been marked to be included in the offloaded code",
1118 node->name ());
1119 else
1120 gcc_unreachable ();
1121 }
1122 #else
1123 gcc_assert (!node->in_other_partition
1124 && !node->used_from_other_partition);
1125 #endif
1126 }
1127
1128 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1129 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1130 NODE or to replace the values in it, for instance because the first
1131 time we saw it, the function body was not available but now it
1132 is. BP is a bitpack with all the bitflags for NODE read from the
1133 stream. */
1134
1135 static void
1136 input_overwrite_node (struct lto_file_decl_data *file_data,
1137 struct cgraph_node *node,
1138 enum LTO_symtab_tags tag,
1139 struct bitpack_d *bp)
1140 {
1141 node->aux = (void *) tag;
1142 node->lto_file_data = file_data;
1143
1144 node->local = bp_unpack_value (bp, 1);
1145 node->externally_visible = bp_unpack_value (bp, 1);
1146 node->no_reorder = bp_unpack_value (bp, 1);
1147 node->definition = bp_unpack_value (bp, 1);
1148 node->versionable = bp_unpack_value (bp, 1);
1149 node->can_change_signature = bp_unpack_value (bp, 1);
1150 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1151 node->force_output = bp_unpack_value (bp, 1);
1152 node->forced_by_abi = bp_unpack_value (bp, 1);
1153 node->unique_name = bp_unpack_value (bp, 1);
1154 node->body_removed = bp_unpack_value (bp, 1);
1155 node->implicit_section = bp_unpack_value (bp, 1);
1156 node->address_taken = bp_unpack_value (bp, 1);
1157 node->used_from_other_partition = bp_unpack_value (bp, 1);
1158 node->lowered = bp_unpack_value (bp, 1);
1159 node->analyzed = tag == LTO_symtab_analyzed_node;
1160 node->in_other_partition = bp_unpack_value (bp, 1);
1161 if (node->in_other_partition
1162 /* Avoid updating decl when we are seeing just inline clone.
1163 When inlining function that has functions already inlined into it,
1164 we produce clones of inline clones.
1165
1166 WPA partitioning might put each clone into different unit and
1167 we might end up streaming inline clone from other partition
1168 to support clone we are interested in. */
1169 && (!node->clone_of
1170 || node->clone_of->decl != node->decl))
1171 {
1172 DECL_EXTERNAL (node->decl) = 1;
1173 TREE_STATIC (node->decl) = 0;
1174 }
1175 node->alias = bp_unpack_value (bp, 1);
1176 node->transparent_alias = bp_unpack_value (bp, 1);
1177 node->weakref = bp_unpack_value (bp, 1);
1178 node->symver = bp_unpack_value (bp, 1);
1179 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1180 node->only_called_at_startup = bp_unpack_value (bp, 1);
1181 node->only_called_at_exit = bp_unpack_value (bp, 1);
1182 node->tm_clone = bp_unpack_value (bp, 1);
1183 node->calls_comdat_local = bp_unpack_value (bp, 1);
1184 node->icf_merged = bp_unpack_value (bp, 1);
1185 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1186 node->merged_comdat = bp_unpack_value (bp, 1);
1187 node->merged_extern_inline = bp_unpack_value (bp, 1);
1188 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1189 node->parallelized_function = bp_unpack_value (bp, 1);
1190 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1191 LDPR_NUM_KNOWN);
1192 node->split_part = bp_unpack_value (bp, 1);
1193 verify_node_partition (node);
1194 }
1195
1196 /* Return string alias is alias of. */
1197
1198 static tree
1199 get_alias_symbol (tree decl)
1200 {
1201 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1202 return get_identifier (TREE_STRING_POINTER
1203 (TREE_VALUE (TREE_VALUE (alias))));
1204 }
1205
1206 /* Read a node from input_block IB. TAG is the node's tag just read.
1207 Return the node read or overwriten. */
1208
1209 static struct cgraph_node *
1210 input_node (struct lto_file_decl_data *file_data,
1211 class lto_input_block *ib,
1212 enum LTO_symtab_tags tag,
1213 vec<symtab_node *> nodes)
1214 {
1215 gcc::pass_manager *passes = g->get_passes ();
1216 tree fn_decl;
1217 struct cgraph_node *node;
1218 struct bitpack_d bp;
1219 unsigned decl_index;
1220 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1221 int clone_ref;
1222 int order;
1223 int i, count;
1224 tree group;
1225 const char *section;
1226 order = streamer_read_hwi (ib) + file_data->order_base;
1227 clone_ref = streamer_read_hwi (ib);
1228
1229 decl_index = streamer_read_uhwi (ib);
1230 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1231
1232 if (clone_ref != LCC_NOT_FOUND)
1233 {
1234 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1235 profile_count::uninitialized (), false,
1236 vNULL, false, NULL, NULL);
1237 }
1238 else
1239 {
1240 /* Declaration of functions can be already merged with a declaration
1241 from other input file. We keep cgraph unmerged until after streaming
1242 of ipa passes is done. Alays forcingly create a fresh node. */
1243 node = symtab->create_empty ();
1244 node->decl = fn_decl;
1245 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1246 node->ifunc_resolver = 1;
1247 node->register_symbol ();
1248 }
1249
1250 node->order = order;
1251 if (order >= symtab->order)
1252 symtab->order = order + 1;
1253
1254 node->count = profile_count::stream_in (ib);
1255 node->count_materialization_scale = streamer_read_hwi (ib);
1256
1257 count = streamer_read_hwi (ib);
1258 node->ipa_transforms_to_apply = vNULL;
1259 for (i = 0; i < count; i++)
1260 {
1261 opt_pass *pass;
1262 int pid = streamer_read_hwi (ib);
1263
1264 gcc_assert (pid < passes->passes_by_id_size);
1265 pass = passes->passes_by_id[pid];
1266 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1267 }
1268
1269 if (tag == LTO_symtab_analyzed_node)
1270 ref = streamer_read_hwi (ib);
1271
1272 group = read_identifier (ib);
1273 if (group)
1274 ref2 = streamer_read_hwi (ib);
1275
1276 /* Make sure that we have not read this node before. Nodes that
1277 have already been read will have their tag stored in the 'aux'
1278 field. Since built-in functions can be referenced in multiple
1279 functions, they are expected to be read more than once. */
1280 if (node->aux && !fndecl_built_in_p (node->decl))
1281 internal_error ("bytecode stream: found multiple instances of cgraph "
1282 "node with uid %d", node->get_uid ());
1283
1284 node->tp_first_run = streamer_read_uhwi (ib);
1285
1286 bp = streamer_read_bitpack (ib);
1287
1288 input_overwrite_node (file_data, node, tag, &bp);
1289
1290 /* Store a reference for now, and fix up later to be a pointer. */
1291 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1292
1293 if (group)
1294 {
1295 node->set_comdat_group (group);
1296 /* Store a reference for now, and fix up later to be a pointer. */
1297 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1298 }
1299 else
1300 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1301 section = read_string (ib);
1302 if (section)
1303 node->set_section_for_node (section);
1304
1305 if (node->definition)
1306 {
1307 int type = streamer_read_uhwi (ib);
1308 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1309 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1310 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1311
1312 node->thunk.fixed_offset = fixed_offset;
1313 node->thunk.virtual_value = virtual_value;
1314 node->thunk.indirect_offset = indirect_offset;
1315 node->thunk.this_adjusting = (type & 2);
1316 node->thunk.virtual_offset_p = (type & 4);
1317 }
1318 if (node->alias && !node->analyzed && node->weakref)
1319 node->alias_target = get_alias_symbol (node->decl);
1320 node->profile_id = streamer_read_hwi (ib);
1321 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1322 if (symtab->max_unit < node->unit_id)
1323 symtab->max_unit = node->unit_id;
1324 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1325 node->set_init_priority (streamer_read_hwi (ib));
1326 if (DECL_STATIC_DESTRUCTOR (node->decl))
1327 node->set_fini_priority (streamer_read_hwi (ib));
1328
1329 return node;
1330 }
1331
1332 /* Read a node from input_block IB. TAG is the node's tag just read.
1333 Return the node read or overwriten. */
1334
1335 static varpool_node *
1336 input_varpool_node (struct lto_file_decl_data *file_data,
1337 class lto_input_block *ib)
1338 {
1339 int decl_index;
1340 tree var_decl;
1341 varpool_node *node;
1342 struct bitpack_d bp;
1343 int ref = LCC_NOT_FOUND;
1344 int order;
1345 tree group;
1346 const char *section;
1347
1348 order = streamer_read_hwi (ib) + file_data->order_base;
1349 decl_index = streamer_read_uhwi (ib);
1350 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1351
1352 /* Declaration of functions can be already merged with a declaration
1353 from other input file. We keep cgraph unmerged until after streaming
1354 of ipa passes is done. Alays forcingly create a fresh node. */
1355 node = varpool_node::create_empty ();
1356 node->decl = var_decl;
1357 node->register_symbol ();
1358
1359 node->order = order;
1360 if (order >= symtab->order)
1361 symtab->order = order + 1;
1362 node->lto_file_data = file_data;
1363
1364 bp = streamer_read_bitpack (ib);
1365 node->externally_visible = bp_unpack_value (&bp, 1);
1366 node->no_reorder = bp_unpack_value (&bp, 1);
1367 node->force_output = bp_unpack_value (&bp, 1);
1368 node->forced_by_abi = bp_unpack_value (&bp, 1);
1369 node->unique_name = bp_unpack_value (&bp, 1);
1370 node->body_removed = bp_unpack_value (&bp, 1);
1371 node->implicit_section = bp_unpack_value (&bp, 1);
1372 node->writeonly = bp_unpack_value (&bp, 1);
1373 node->definition = bp_unpack_value (&bp, 1);
1374 node->alias = bp_unpack_value (&bp, 1);
1375 node->transparent_alias = bp_unpack_value (&bp, 1);
1376 node->weakref = bp_unpack_value (&bp, 1);
1377 node->symver = bp_unpack_value (&bp, 1);
1378 node->analyzed = bp_unpack_value (&bp, 1);
1379 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1380 node->in_other_partition = bp_unpack_value (&bp, 1);
1381 if (node->in_other_partition)
1382 {
1383 DECL_EXTERNAL (node->decl) = 1;
1384 TREE_STATIC (node->decl) = 0;
1385 }
1386 if (node->alias && !node->analyzed && node->weakref)
1387 node->alias_target = get_alias_symbol (node->decl);
1388 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1389 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1390 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1391 group = read_identifier (ib);
1392 if (group)
1393 {
1394 node->set_comdat_group (group);
1395 ref = streamer_read_hwi (ib);
1396 /* Store a reference for now, and fix up later to be a pointer. */
1397 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1398 }
1399 else
1400 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1401 section = read_string (ib);
1402 if (section)
1403 node->set_section_for_node (section);
1404 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1405 LDPR_NUM_KNOWN);
1406 verify_node_partition (node);
1407 return node;
1408 }
1409
1410 /* Read a node from input_block IB. TAG is the node's tag just read.
1411 Return the node read or overwriten. */
1412
1413 static void
1414 input_ref (class lto_input_block *ib,
1415 symtab_node *referring_node,
1416 vec<symtab_node *> nodes)
1417 {
1418 symtab_node *node = NULL;
1419 struct bitpack_d bp;
1420 enum ipa_ref_use use;
1421 bool speculative;
1422 struct ipa_ref *ref;
1423
1424 bp = streamer_read_bitpack (ib);
1425 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1426 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1427 node = nodes[streamer_read_hwi (ib)];
1428 ref = referring_node->create_reference (node, use);
1429 ref->speculative = speculative;
1430 if (is_a <cgraph_node *> (referring_node))
1431 ref->lto_stmt_uid = streamer_read_hwi (ib);
1432 }
1433
1434 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1435 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1436 edge being read is indirect (in the sense that it has
1437 indirect_unknown_callee set). */
1438
1439 static void
1440 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1441 bool indirect)
1442 {
1443 struct cgraph_node *caller, *callee;
1444 struct cgraph_edge *edge;
1445 unsigned int stmt_id;
1446 profile_count count;
1447 cgraph_inline_failed_t inline_failed;
1448 struct bitpack_d bp;
1449 int ecf_flags = 0;
1450
1451 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1452 if (caller == NULL || caller->decl == NULL_TREE)
1453 internal_error ("bytecode stream: no caller found while reading edge");
1454
1455 if (!indirect)
1456 {
1457 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1458 if (callee == NULL || callee->decl == NULL_TREE)
1459 internal_error ("bytecode stream: no callee found while reading edge");
1460 }
1461 else
1462 callee = NULL;
1463
1464 count = profile_count::stream_in (ib);
1465
1466 bp = streamer_read_bitpack (ib);
1467 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1468 stmt_id = bp_unpack_var_len_unsigned (&bp);
1469
1470 if (indirect)
1471 edge = caller->create_indirect_edge (NULL, 0, count);
1472 else
1473 edge = caller->create_edge (callee, NULL, count);
1474
1475 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1476 edge->speculative = bp_unpack_value (&bp, 1);
1477 edge->lto_stmt_uid = stmt_id;
1478 edge->inline_failed = inline_failed;
1479 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1480 edge->can_throw_external = bp_unpack_value (&bp, 1);
1481 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1482 if (indirect)
1483 {
1484 if (bp_unpack_value (&bp, 1))
1485 ecf_flags |= ECF_CONST;
1486 if (bp_unpack_value (&bp, 1))
1487 ecf_flags |= ECF_PURE;
1488 if (bp_unpack_value (&bp, 1))
1489 ecf_flags |= ECF_NORETURN;
1490 if (bp_unpack_value (&bp, 1))
1491 ecf_flags |= ECF_MALLOC;
1492 if (bp_unpack_value (&bp, 1))
1493 ecf_flags |= ECF_NOTHROW;
1494 if (bp_unpack_value (&bp, 1))
1495 ecf_flags |= ECF_RETURNS_TWICE;
1496 edge->indirect_info->ecf_flags = ecf_flags;
1497 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1498 if (edge->indirect_info->common_target_id)
1499 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1500 }
1501 }
1502
1503
1504 /* Read a cgraph from IB using the info in FILE_DATA. */
1505
1506 static vec<symtab_node *>
1507 input_cgraph_1 (struct lto_file_decl_data *file_data,
1508 class lto_input_block *ib)
1509 {
1510 enum LTO_symtab_tags tag;
1511 vec<symtab_node *> nodes = vNULL;
1512 symtab_node *node;
1513 unsigned i;
1514
1515 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1516 file_data->order_base = symtab->order;
1517 file_data->unit_base = symtab->max_unit + 1;
1518 while (tag)
1519 {
1520 if (tag == LTO_symtab_edge)
1521 input_edge (ib, nodes, false);
1522 else if (tag == LTO_symtab_indirect_edge)
1523 input_edge (ib, nodes, true);
1524 else if (tag == LTO_symtab_variable)
1525 {
1526 node = input_varpool_node (file_data, ib);
1527 nodes.safe_push (node);
1528 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1529 }
1530 else
1531 {
1532 node = input_node (file_data, ib, tag, nodes);
1533 if (node == NULL || node->decl == NULL_TREE)
1534 internal_error ("bytecode stream: found empty cgraph node");
1535 nodes.safe_push (node);
1536 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1537 }
1538
1539 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1540 }
1541
1542 lto_input_toplevel_asms (file_data, file_data->order_base);
1543
1544 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1545 if (flag_checking)
1546 {
1547 FOR_EACH_VEC_ELT (nodes, i, node)
1548 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1549 }
1550 FOR_EACH_VEC_ELT (nodes, i, node)
1551 {
1552 int ref;
1553 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1554 {
1555 ref = (int) (intptr_t) cnode->inlined_to;
1556
1557 /* We share declaration of builtins, so we may read same node twice. */
1558 if (!node->aux)
1559 continue;
1560 node->aux = NULL;
1561
1562 /* Fixup inlined_to from reference to pointer. */
1563 if (ref != LCC_NOT_FOUND)
1564 dyn_cast<cgraph_node *> (node)->inlined_to
1565 = dyn_cast<cgraph_node *> (nodes[ref]);
1566 else
1567 cnode->inlined_to = NULL;
1568 }
1569
1570 ref = (int) (intptr_t) node->same_comdat_group;
1571
1572 /* Fixup same_comdat_group from reference to pointer. */
1573 if (ref != LCC_NOT_FOUND)
1574 node->same_comdat_group = nodes[ref];
1575 else
1576 node->same_comdat_group = NULL;
1577 }
1578 FOR_EACH_VEC_ELT (nodes, i, node)
1579 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1580 return nodes;
1581 }
1582
1583 /* Input ipa_refs. */
1584
1585 static void
1586 input_refs (class lto_input_block *ib,
1587 vec<symtab_node *> nodes)
1588 {
1589 int count;
1590 int idx;
1591 while (true)
1592 {
1593 symtab_node *node;
1594 count = streamer_read_uhwi (ib);
1595 if (!count)
1596 break;
1597 idx = streamer_read_uhwi (ib);
1598 node = nodes[idx];
1599 while (count)
1600 {
1601 input_ref (ib, node, nodes);
1602 count--;
1603 }
1604 }
1605 }
1606
1607 /* Input profile_info from IB. */
1608 static void
1609 input_profile_summary (class lto_input_block *ib,
1610 struct lto_file_decl_data *file_data)
1611 {
1612 unsigned int runs = streamer_read_uhwi (ib);
1613 if (runs)
1614 {
1615 file_data->profile_info.runs = runs;
1616
1617 /* IPA-profile computes hot bb threshold based on cumulated
1618 whole program profile. We need to stream it down to ltrans. */
1619 if (flag_ltrans)
1620 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1621 }
1622
1623 }
1624
1625 /* Rescale profile summaries to the same number of runs in the whole unit. */
1626
1627 static void
1628 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1629 {
1630 struct lto_file_decl_data *file_data;
1631 unsigned int j;
1632 gcov_unsigned_t max_runs = 0;
1633 struct cgraph_node *node;
1634 struct cgraph_edge *edge;
1635
1636 /* Find unit with maximal number of runs. If we ever get serious about
1637 roundoff errors, we might also consider computing smallest common
1638 multiply. */
1639 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1640 if (max_runs < file_data->profile_info.runs)
1641 max_runs = file_data->profile_info.runs;
1642
1643 if (!max_runs)
1644 return;
1645
1646 /* Simple overflow check. We probably don't need to support that many train
1647 runs. Such a large value probably imply data corruption anyway. */
1648 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1649 {
1650 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1651 INT_MAX / REG_BR_PROB_BASE);
1652 return;
1653 }
1654
1655 profile_info = XCNEW (gcov_summary);
1656 profile_info->runs = max_runs;
1657
1658 /* If merging already happent at WPA time, we are done. */
1659 if (flag_ltrans)
1660 return;
1661
1662 /* Now compute count_materialization_scale of each node.
1663 During LTRANS we already have values of count_materialization_scale
1664 computed, so just update them. */
1665 FOR_EACH_FUNCTION (node)
1666 if (node->lto_file_data
1667 && node->lto_file_data->profile_info.runs)
1668 {
1669 int scale;
1670
1671 scale = RDIV (node->count_materialization_scale * max_runs,
1672 node->lto_file_data->profile_info.runs);
1673 node->count_materialization_scale = scale;
1674 if (scale < 0)
1675 fatal_error (input_location, "Profile information in %s corrupted",
1676 file_data->file_name);
1677
1678 if (scale == REG_BR_PROB_BASE)
1679 continue;
1680 for (edge = node->callees; edge; edge = edge->next_callee)
1681 if (edge->count.ipa ().nonzero_p ())
1682 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1683 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1684 if (edge->count.ipa ().nonzero_p ())
1685 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1686 if (node->count.ipa ().nonzero_p ())
1687 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1688 }
1689 }
1690
1691 /* Input and merge the symtab from each of the .o files passed to
1692 lto1. */
1693
1694 void
1695 input_symtab (void)
1696 {
1697 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1698 struct lto_file_decl_data *file_data;
1699 unsigned int j = 0;
1700 struct cgraph_node *node;
1701
1702 while ((file_data = file_data_vec[j++]))
1703 {
1704 const char *data;
1705 size_t len;
1706 class lto_input_block *ib;
1707 vec<symtab_node *> nodes;
1708
1709 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1710 &data, &len);
1711 if (!ib)
1712 fatal_error (input_location,
1713 "cannot find LTO cgraph in %s", file_data->file_name);
1714 input_profile_summary (ib, file_data);
1715 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1716 nodes = input_cgraph_1 (file_data, ib);
1717 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1718 ib, data, len);
1719
1720 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1721 &data, &len);
1722 if (!ib)
1723 fatal_error (input_location, "cannot find LTO section refs in %s",
1724 file_data->file_name);
1725 input_refs (ib, nodes);
1726 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1727 ib, data, len);
1728 if (flag_ltrans)
1729 input_cgraph_opt_summary (nodes);
1730 nodes.release ();
1731 }
1732
1733 merge_profile_summaries (file_data_vec);
1734
1735 /* Clear out the aux field that was used to store enough state to
1736 tell which nodes should be overwritten. */
1737 FOR_EACH_FUNCTION (node)
1738 {
1739 /* Some nodes may have been created by cgraph_node. This
1740 happens when the callgraph contains nested functions. If the
1741 node for the parent function was never emitted to the gimple
1742 file, cgraph_node will create a node for it when setting the
1743 context of the nested function. */
1744 if (node->lto_file_data)
1745 node->aux = NULL;
1746 }
1747 }
1748
1749 /* Input function/variable tables that will allow libgomp to look up offload
1750 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1751
1752 void
1753 input_offload_tables (bool do_force_output)
1754 {
1755 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1756 struct lto_file_decl_data *file_data;
1757 unsigned int j = 0;
1758
1759 while ((file_data = file_data_vec[j++]))
1760 {
1761 const char *data;
1762 size_t len;
1763 class lto_input_block *ib
1764 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1765 &data, &len);
1766 if (!ib)
1767 continue;
1768
1769 enum LTO_symtab_tags tag
1770 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1771 while (tag)
1772 {
1773 if (tag == LTO_symtab_unavail_node)
1774 {
1775 int decl_index = streamer_read_uhwi (ib);
1776 tree fn_decl
1777 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1778 vec_safe_push (offload_funcs, fn_decl);
1779
1780 /* Prevent IPA from removing fn_decl as unreachable, since there
1781 may be no refs from the parent function to child_fn in offload
1782 LTO mode. */
1783 if (do_force_output)
1784 cgraph_node::get (fn_decl)->mark_force_output ();
1785 }
1786 else if (tag == LTO_symtab_variable)
1787 {
1788 int decl_index = streamer_read_uhwi (ib);
1789 tree var_decl
1790 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1791 vec_safe_push (offload_vars, var_decl);
1792
1793 /* Prevent IPA from removing var_decl as unused, since there
1794 may be no refs to var_decl in offload LTO mode. */
1795 if (do_force_output)
1796 varpool_node::get (var_decl)->force_output = 1;
1797 }
1798 else
1799 fatal_error (input_location,
1800 "invalid offload table in %s", file_data->file_name);
1801
1802 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1803 }
1804
1805 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1806 ib, data, len);
1807 }
1808 }
1809
1810 /* True when we need optimization summary for NODE. */
1811
1812 static int
1813 output_cgraph_opt_summary_p (struct cgraph_node *node)
1814 {
1815 return ((node->clone_of || node->former_clone_of)
1816 && (node->clone.tree_map
1817 || node->clone.param_adjustments));
1818 }
1819
1820 /* Output optimization summary for EDGE to OB. */
1821 static void
1822 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1823 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1824 {
1825 }
1826
1827 /* Output optimization summary for NODE to OB. */
1828
1829 static void
1830 output_node_opt_summary (struct output_block *ob,
1831 struct cgraph_node *node,
1832 lto_symtab_encoder_t encoder)
1833 {
1834 struct ipa_replace_map *map;
1835 int i;
1836 struct cgraph_edge *e;
1837
1838 /* TODO: Should this code be moved to ipa-param-manipulation? */
1839 struct bitpack_d bp;
1840 bp = bitpack_create (ob->main_stream);
1841 bp_pack_value (&bp, (node->clone.param_adjustments != NULL), 1);
1842 streamer_write_bitpack (&bp);
1843 if (ipa_param_adjustments *adjustments = node->clone.param_adjustments)
1844 {
1845 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1846 ipa_adjusted_param *adj;
1847 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1848 {
1849 bp = bitpack_create (ob->main_stream);
1850 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1851 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1852 bp_pack_value (&bp, adj->op, 2);
1853 bp_pack_value (&bp, adj->param_prefix_index, 2);
1854 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1855 bp_pack_value (&bp, adj->reverse, 1);
1856 bp_pack_value (&bp, adj->user_flag, 1);
1857 streamer_write_bitpack (&bp);
1858 if (adj->op == IPA_PARAM_OP_SPLIT
1859 || adj->op == IPA_PARAM_OP_NEW)
1860 {
1861 stream_write_tree (ob, adj->type, true);
1862 if (adj->op == IPA_PARAM_OP_SPLIT)
1863 {
1864 stream_write_tree (ob, adj->alias_ptr_type, true);
1865 streamer_write_uhwi (ob, adj->unit_offset);
1866 }
1867 }
1868 }
1869 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1870 bp = bitpack_create (ob->main_stream);
1871 bp_pack_value (&bp, node->clone.param_adjustments->m_skip_return, 1);
1872 streamer_write_bitpack (&bp);
1873 }
1874
1875 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1876 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1877 {
1878 streamer_write_uhwi (ob, map->parm_num);
1879 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1880 stream_write_tree (ob, map->new_tree, true);
1881 }
1882
1883 if (lto_symtab_encoder_in_partition_p (encoder, node))
1884 {
1885 for (e = node->callees; e; e = e->next_callee)
1886 output_edge_opt_summary (ob, e);
1887 for (e = node->indirect_calls; e; e = e->next_callee)
1888 output_edge_opt_summary (ob, e);
1889 }
1890 }
1891
1892 /* Output optimization summaries stored in callgraph.
1893 At the moment it is the clone info structure. */
1894
1895 static void
1896 output_cgraph_opt_summary (void)
1897 {
1898 int i, n_nodes;
1899 lto_symtab_encoder_t encoder;
1900 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1901 unsigned count = 0;
1902
1903 ob->symbol = NULL;
1904 encoder = ob->decl_state->symtab_node_encoder;
1905 n_nodes = lto_symtab_encoder_size (encoder);
1906 for (i = 0; i < n_nodes; i++)
1907 {
1908 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1909 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1910 if (cnode && output_cgraph_opt_summary_p (cnode))
1911 count++;
1912 }
1913 streamer_write_uhwi (ob, count);
1914 for (i = 0; i < n_nodes; i++)
1915 {
1916 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1917 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1918 if (cnode && output_cgraph_opt_summary_p (cnode))
1919 {
1920 streamer_write_uhwi (ob, i);
1921 output_node_opt_summary (ob, cnode, encoder);
1922 }
1923 }
1924 produce_asm (ob, NULL);
1925 destroy_output_block (ob);
1926 }
1927
1928 /* Input optimisation summary of EDGE. */
1929
1930 static void
1931 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1932 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1933 {
1934 }
1935
1936 /* Input optimisation summary of NODE. */
1937
1938 static void
1939 input_node_opt_summary (struct cgraph_node *node,
1940 class lto_input_block *ib_main,
1941 class data_in *data_in)
1942 {
1943 int i;
1944 int count;
1945 struct cgraph_edge *e;
1946
1947 /* TODO: Should this code be moved to ipa-param-manipulation? */
1948 struct bitpack_d bp;
1949 bp = streamer_read_bitpack (ib_main);
1950 bool have_adjustments = bp_unpack_value (&bp, 1);
1951 if (have_adjustments)
1952 {
1953 count = streamer_read_uhwi (ib_main);
1954 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1955 for (i = 0; i < count; i++)
1956 {
1957 ipa_adjusted_param adj;
1958 memset (&adj, 0, sizeof (adj));
1959 bp = streamer_read_bitpack (ib_main);
1960 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1961 adj.prev_clone_index
1962 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1963 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1964 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1965 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1966 adj.reverse = bp_unpack_value (&bp, 1);
1967 adj.user_flag = bp_unpack_value (&bp, 1);
1968 if (adj.op == IPA_PARAM_OP_SPLIT
1969 || adj.op == IPA_PARAM_OP_NEW)
1970 {
1971 adj.type = stream_read_tree (ib_main, data_in);
1972 if (adj.op == IPA_PARAM_OP_SPLIT)
1973 {
1974 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1975 adj.unit_offset = streamer_read_uhwi (ib_main);
1976 }
1977 }
1978 vec_safe_push (new_params, adj);
1979 }
1980 int always_copy_start = streamer_read_hwi (ib_main);
1981 bp = streamer_read_bitpack (ib_main);
1982 bool skip_return = bp_unpack_value (&bp, 1);
1983 node->clone.param_adjustments
1984 = (new (ggc_alloc <ipa_param_adjustments> ())
1985 ipa_param_adjustments (new_params, always_copy_start, skip_return));
1986 }
1987
1988 count = streamer_read_uhwi (ib_main);
1989 for (i = 0; i < count; i++)
1990 {
1991 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1992
1993 vec_safe_push (node->clone.tree_map, map);
1994 map->parm_num = streamer_read_uhwi (ib_main);
1995 map->new_tree = stream_read_tree (ib_main, data_in);
1996 }
1997 for (e = node->callees; e; e = e->next_callee)
1998 input_edge_opt_summary (e, ib_main);
1999 for (e = node->indirect_calls; e; e = e->next_callee)
2000 input_edge_opt_summary (e, ib_main);
2001 }
2002
2003 /* Read section in file FILE_DATA of length LEN with data DATA. */
2004
2005 static void
2006 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2007 const char *data, size_t len,
2008 vec<symtab_node *> nodes)
2009 {
2010 const struct lto_function_header *header =
2011 (const struct lto_function_header *) data;
2012 const int cfg_offset = sizeof (struct lto_function_header);
2013 const int main_offset = cfg_offset + header->cfg_size;
2014 const int string_offset = main_offset + header->main_size;
2015 class data_in *data_in;
2016 unsigned int i;
2017 unsigned int count;
2018
2019 lto_input_block ib_main ((const char *) data + main_offset,
2020 header->main_size, file_data->mode_table);
2021
2022 data_in =
2023 lto_data_in_create (file_data, (const char *) data + string_offset,
2024 header->string_size, vNULL);
2025 count = streamer_read_uhwi (&ib_main);
2026
2027 for (i = 0; i < count; i++)
2028 {
2029 int ref = streamer_read_uhwi (&ib_main);
2030 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2031 &ib_main, data_in);
2032 }
2033 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2034 len);
2035 lto_data_in_delete (data_in);
2036 }
2037
2038 /* Input optimization summary of cgraph. */
2039
2040 static void
2041 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2042 {
2043 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2044 struct lto_file_decl_data *file_data;
2045 unsigned int j = 0;
2046
2047 while ((file_data = file_data_vec[j++]))
2048 {
2049 size_t len;
2050 const char *data
2051 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2052 &len);
2053 if (data)
2054 input_cgraph_opt_section (file_data, data, len, nodes);
2055 }
2056 }