Fix ICE in speculative_call_info
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2020 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
45
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51
52 /* Cgraph streaming is organized as set of record whose type
53 is indicated by a tag. */
54 enum LTO_symtab_tags
55 {
56 /* Must leave 0 for the stopper. */
57
58 /* Cgraph node without body available. */
59 LTO_symtab_unavail_node = 1,
60 /* Cgraph node with function body. */
61 LTO_symtab_analyzed_node,
62 /* Cgraph edges. */
63 LTO_symtab_edge,
64 LTO_symtab_indirect_edge,
65 LTO_symtab_variable,
66 LTO_symtab_last_tag
67 };
68
69 /* Create a new symtab encoder.
70 if FOR_INPUT, the encoder allocate only datastructures needed
71 to read the symtab. */
72
73 lto_symtab_encoder_t
74 lto_symtab_encoder_new (bool for_input)
75 {
76 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
77
78 if (!for_input)
79 encoder->map = new hash_map<symtab_node *, size_t>;
80 encoder->nodes.create (0);
81 return encoder;
82 }
83
84
85 /* Delete ENCODER and its components. */
86
87 void
88 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
89 {
90 encoder->nodes.release ();
91 if (encoder->map)
92 delete encoder->map;
93 free (encoder);
94 }
95
96
97 /* Return the existing reference number of NODE in the symtab encoder in
98 output block OB. Assign a new reference if this is the first time
99 NODE is encoded. */
100
101 int
102 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
103 symtab_node *node)
104 {
105 int ref;
106
107 if (!encoder->map)
108 {
109 lto_encoder_entry entry = {node, false, false, false};
110
111 ref = encoder->nodes.length ();
112 encoder->nodes.safe_push (entry);
113 return ref;
114 }
115
116 size_t *slot = encoder->map->get (node);
117 if (!slot || !*slot)
118 {
119 lto_encoder_entry entry = {node, false, false, false};
120 ref = encoder->nodes.length ();
121 if (!slot)
122 encoder->map->put (node, ref + 1);
123 encoder->nodes.safe_push (entry);
124 }
125 else
126 ref = *slot - 1;
127
128 return ref;
129 }
130
131 /* Remove NODE from encoder. */
132
133 bool
134 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
135 symtab_node *node)
136 {
137 int index;
138 lto_encoder_entry last_node;
139
140 size_t *slot = encoder->map->get (node);
141 if (slot == NULL || !*slot)
142 return false;
143
144 index = *slot - 1;
145 gcc_checking_assert (encoder->nodes[index].node == node);
146
147 /* Remove from vector. We do this by swapping node with the last element
148 of the vector. */
149 last_node = encoder->nodes.pop ();
150 if (last_node.node != node)
151 {
152 gcc_assert (encoder->map->put (last_node.node, index + 1));
153
154 /* Move the last element to the original spot of NODE. */
155 encoder->nodes[index] = last_node;
156 }
157
158 /* Remove element from hash table. */
159 encoder->map->remove (node);
160 return true;
161 }
162
163
164 /* Return TRUE if we should encode the body of NODE (if any). */
165
166 bool
167 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
168 struct cgraph_node *node)
169 {
170 int index = lto_symtab_encoder_lookup (encoder, node);
171 return encoder->nodes[index].body;
172 }
173
174 /* Specify that we encode the body of NODE in this partition. */
175
176 static void
177 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
178 struct cgraph_node *node)
179 {
180 int index = lto_symtab_encoder_encode (encoder, node);
181 gcc_checking_assert (encoder->nodes[index].node == node);
182 encoder->nodes[index].body = true;
183 }
184
185 /* Return TRUE if we should encode initializer of NODE (if any). */
186
187 bool
188 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
189 varpool_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 if (index == LCC_NOT_FOUND)
193 return false;
194 return encoder->nodes[index].initializer;
195 }
196
197 /* Specify that we should encode initializer of NODE (if any). */
198
199 static void
200 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
201 varpool_node *node)
202 {
203 int index = lto_symtab_encoder_lookup (encoder, node);
204 encoder->nodes[index].initializer = true;
205 }
206
207 /* Return TRUE if NODE is in this partition. */
208
209 bool
210 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
211 symtab_node *node)
212 {
213 int index = lto_symtab_encoder_lookup (encoder, node);
214 if (index == LCC_NOT_FOUND)
215 return false;
216 return encoder->nodes[index].in_partition;
217 }
218
219 /* Specify that NODE is in this partition. */
220
221 void
222 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
223 symtab_node *node)
224 {
225 int index = lto_symtab_encoder_encode (encoder, node);
226 encoder->nodes[index].in_partition = true;
227 }
228
229 /* Output the cgraph EDGE to OB using ENCODER. */
230
231 static void
232 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
233 lto_symtab_encoder_t encoder)
234 {
235 unsigned int uid;
236 intptr_t ref;
237 struct bitpack_d bp;
238
239 if (edge->indirect_unknown_callee)
240 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
241 LTO_symtab_indirect_edge);
242 else
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_edge);
245
246 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
247 gcc_assert (ref != LCC_NOT_FOUND);
248 streamer_write_hwi_stream (ob->main_stream, ref);
249
250 if (!edge->indirect_unknown_callee)
251 {
252 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
255 }
256
257 edge->count.stream_out (ob->main_stream);
258
259 bp = bitpack_create (ob->main_stream);
260 uid = !edge->call_stmt ? edge->lto_stmt_uid
261 : gimple_uid (edge->call_stmt) + 1;
262 bp_pack_enum (&bp, cgraph_inline_failed_t,
263 CIF_N_REASONS, edge->inline_failed);
264 gcc_checking_assert (uid || edge->caller->thunk.thunk_p);
265 bp_pack_var_len_unsigned (&bp, uid);
266 bp_pack_value (&bp, edge->speculative_id, 16);
267 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
268 bp_pack_value (&bp, edge->speculative, 1);
269 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
270 gcc_assert (!edge->call_stmt_cannot_inline_p
271 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
272 bp_pack_value (&bp, edge->can_throw_external, 1);
273 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
274 if (edge->indirect_unknown_callee)
275 {
276 int flags = edge->indirect_info->ecf_flags;
277 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
278 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
281 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
283 /* Flags that should not appear on indirect calls. */
284 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
285 | ECF_MAY_BE_ALLOCA
286 | ECF_SIBCALL
287 | ECF_LEAF
288 | ECF_NOVOPS)));
289
290 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
291 16);
292 }
293 streamer_write_bitpack (&bp);
294 }
295
296 /* Return if NODE contain references from other partitions. */
297
298 bool
299 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
300 {
301 int i;
302 struct ipa_ref *ref = NULL;
303
304 for (i = 0; node->iterate_referring (i, ref); i++)
305 {
306 /* Ignore references from non-offloadable nodes while streaming NODE into
307 offload LTO section. */
308 if (!ref->referring->need_lto_streaming)
309 continue;
310
311 if (ref->referring->in_other_partition
312 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
313 return true;
314 }
315 return false;
316 }
317
318 /* Return true when node is reachable from other partition. */
319
320 bool
321 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
322 {
323 struct cgraph_edge *e;
324 if (!node->definition)
325 return false;
326 if (node->inlined_to)
327 return false;
328 for (e = node->callers; e; e = e->next_caller)
329 {
330 /* Ignore references from non-offloadable nodes while streaming NODE into
331 offload LTO section. */
332 if (!e->caller->need_lto_streaming)
333 continue;
334
335 if (e->caller->in_other_partition
336 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
337 return true;
338 }
339 return false;
340 }
341
342 /* Return if NODE contain references from other partitions. */
343
344 bool
345 referenced_from_this_partition_p (symtab_node *node,
346 lto_symtab_encoder_t encoder)
347 {
348 int i;
349 struct ipa_ref *ref = NULL;
350
351 for (i = 0; node->iterate_referring (i, ref); i++)
352 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
353 return true;
354 return false;
355 }
356
357 /* Return true when node is reachable from other partition. */
358
359 bool
360 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
361 {
362 struct cgraph_edge *e;
363 for (e = node->callers; e; e = e->next_caller)
364 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
365 return true;
366 return false;
367 }
368
369 /* Output the cgraph NODE to OB. ENCODER is used to find the
370 reference number of NODE->inlined_to. SET is the set of nodes we
371 are writing to the current file. If NODE is not in SET, then NODE
372 is a boundary of a cgraph_node_set and we pretend NODE just has a
373 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
374 that have had their callgraph node written so far. This is used to
375 determine if NODE is a clone of a previously written node. */
376
377 static void
378 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
379 lto_symtab_encoder_t encoder)
380 {
381 unsigned int tag;
382 struct bitpack_d bp;
383 bool boundary_p;
384 intptr_t ref;
385 bool in_other_partition = false;
386 struct cgraph_node *clone_of, *ultimate_clone_of;
387 ipa_opt_pass_d *pass;
388 int i;
389 const char *comdat;
390 const char *section;
391 tree group;
392
393 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
394
395 if (node->analyzed && (!boundary_p || node->alias
396 || (node->thunk.thunk_p && !node->inlined_to)))
397 tag = LTO_symtab_analyzed_node;
398 else
399 tag = LTO_symtab_unavail_node;
400
401 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
402 tag);
403 streamer_write_hwi_stream (ob->main_stream, node->order);
404
405 /* In WPA mode, we only output part of the call-graph. Also, we
406 fake cgraph node attributes. There are two cases that we care.
407
408 Boundary nodes: There are nodes that are not part of SET but are
409 called from within SET. We artificially make them look like
410 externally visible nodes with no function body.
411
412 Cherry-picked nodes: These are nodes we pulled from other
413 translation units into SET during IPA-inlining. We make them as
414 local static nodes to prevent clashes with other local statics. */
415 if (boundary_p && node->analyzed
416 && node->get_partitioning_class () == SYMBOL_PARTITION)
417 {
418 /* Inline clones cannot be part of boundary.
419 gcc_assert (!node->inlined_to);
420
421 FIXME: At the moment they can be, when partition contains an inline
422 clone that is clone of inline clone from outside partition. We can
423 reshape the clone tree and make other tree to be the root, but it
424 needs a bit extra work and will be promplty done by cgraph_remove_node
425 after reading back. */
426 in_other_partition = 1;
427 }
428
429 clone_of = node->clone_of;
430 while (clone_of
431 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
432 if (clone_of->prev_sibling_clone)
433 clone_of = clone_of->prev_sibling_clone;
434 else
435 clone_of = clone_of->clone_of;
436
437 /* See if body of the master function is output. If not, we are seeing only
438 an declaration and we do not need to pass down clone tree. */
439 ultimate_clone_of = clone_of;
440 while (ultimate_clone_of && ultimate_clone_of->clone_of)
441 ultimate_clone_of = ultimate_clone_of->clone_of;
442
443 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
444 clone_of = NULL;
445
446 if (tag == LTO_symtab_analyzed_node)
447 gcc_assert (clone_of || !node->clone_of);
448 if (!clone_of)
449 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
450 else
451 streamer_write_hwi_stream (ob->main_stream, ref);
452
453
454 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
455 node->count.stream_out (ob->main_stream);
456 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
457
458 streamer_write_hwi_stream (ob->main_stream,
459 node->ipa_transforms_to_apply.length ());
460 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
461 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
462
463 if (tag == LTO_symtab_analyzed_node)
464 {
465 if (node->inlined_to)
466 {
467 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
468 gcc_assert (ref != LCC_NOT_FOUND);
469 }
470 else
471 ref = LCC_NOT_FOUND;
472
473 streamer_write_hwi_stream (ob->main_stream, ref);
474 }
475
476 group = node->get_comdat_group ();
477 if (group)
478 comdat = IDENTIFIER_POINTER (group);
479 else
480 comdat = "";
481 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
482
483 if (group)
484 {
485 if (node->same_comdat_group)
486 {
487 ref = LCC_NOT_FOUND;
488 for (struct symtab_node *n = node->same_comdat_group;
489 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
490 ref = lto_symtab_encoder_lookup (encoder, n);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 streamer_write_hwi_stream (ob->main_stream, ref);
495 }
496
497 section = node->get_section ();
498 if (!section)
499 section = "";
500
501 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
502
503 bp = bitpack_create (ob->main_stream);
504 bp_pack_value (&bp, node->local, 1);
505 bp_pack_value (&bp, node->externally_visible, 1);
506 bp_pack_value (&bp, node->no_reorder, 1);
507 bp_pack_value (&bp, node->definition, 1);
508 bp_pack_value (&bp, node->versionable, 1);
509 bp_pack_value (&bp, node->can_change_signature, 1);
510 bp_pack_value (&bp, node->redefined_extern_inline, 1);
511 bp_pack_value (&bp, node->force_output, 1);
512 bp_pack_value (&bp, node->forced_by_abi, 1);
513 bp_pack_value (&bp, node->unique_name, 1);
514 bp_pack_value (&bp, node->body_removed, 1);
515 bp_pack_value (&bp, node->implicit_section, 1);
516 bp_pack_value (&bp, node->address_taken, 1);
517 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
518 && node->get_partitioning_class () == SYMBOL_PARTITION
519 && (reachable_from_other_partition_p (node, encoder)
520 || referenced_from_other_partition_p (node, encoder)), 1);
521 bp_pack_value (&bp, node->lowered, 1);
522 bp_pack_value (&bp, in_other_partition, 1);
523 bp_pack_value (&bp, node->alias, 1);
524 bp_pack_value (&bp, node->transparent_alias, 1);
525 bp_pack_value (&bp, node->weakref, 1);
526 bp_pack_value (&bp, node->symver, 1);
527 bp_pack_value (&bp, node->frequency, 2);
528 bp_pack_value (&bp, node->only_called_at_startup, 1);
529 bp_pack_value (&bp, node->only_called_at_exit, 1);
530 bp_pack_value (&bp, node->tm_clone, 1);
531 bp_pack_value (&bp, node->calls_comdat_local, 1);
532 bp_pack_value (&bp, node->icf_merged, 1);
533 bp_pack_value (&bp, node->nonfreeing_fn, 1);
534 bp_pack_value (&bp, node->merged_comdat, 1);
535 bp_pack_value (&bp, node->merged_extern_inline, 1);
536 bp_pack_value (&bp, node->thunk.thunk_p, 1);
537 bp_pack_value (&bp, node->parallelized_function, 1);
538 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
539 LDPR_NUM_KNOWN,
540 /* When doing incremental link, we will get new resolution
541 info next time we process the file. */
542 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
543 bp_pack_value (&bp, node->split_part, 1);
544 streamer_write_bitpack (&bp);
545 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
546
547 /* Stream thunk info always because we use it in
548 ipa_polymorphic_call_context::ipa_polymorphic_call_context
549 to properly interpret THIS pointers for thunks that has been converted
550 to Gimple. */
551 if (node->definition)
552 {
553 streamer_write_uhwi_stream
554 (ob->main_stream,
555 1 + (node->thunk.this_adjusting != 0) * 2
556 + (node->thunk.virtual_offset_p != 0) * 4);
557 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
558 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
559 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
560 }
561 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
562 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
563 if (DECL_STATIC_CONSTRUCTOR (node->decl))
564 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
565 if (DECL_STATIC_DESTRUCTOR (node->decl))
566 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
567 }
568
569 /* Output the varpool NODE to OB.
570 If NODE is not in SET, then NODE is a boundary. */
571
572 static void
573 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
574 lto_symtab_encoder_t encoder)
575 {
576 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
577 bool encode_initializer_p
578 = (node->definition
579 && lto_symtab_encoder_encode_initializer_p (encoder, node));
580 struct bitpack_d bp;
581 int ref;
582 const char *comdat;
583 const char *section;
584 tree group;
585
586 gcc_assert (!encode_initializer_p || node->definition);
587 gcc_assert (boundary_p || encode_initializer_p);
588
589 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
590 LTO_symtab_variable);
591 streamer_write_hwi_stream (ob->main_stream, node->order);
592 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
593 bp = bitpack_create (ob->main_stream);
594 bp_pack_value (&bp, node->externally_visible, 1);
595 bp_pack_value (&bp, node->no_reorder, 1);
596 bp_pack_value (&bp, node->force_output, 1);
597 bp_pack_value (&bp, node->forced_by_abi, 1);
598 bp_pack_value (&bp, node->unique_name, 1);
599 bp_pack_value (&bp,
600 node->body_removed
601 || (!encode_initializer_p && !node->alias && node->definition),
602 1);
603 bp_pack_value (&bp, node->implicit_section, 1);
604 bp_pack_value (&bp, node->writeonly, 1);
605 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
606 1);
607 bp_pack_value (&bp, node->alias, 1);
608 bp_pack_value (&bp, node->transparent_alias, 1);
609 bp_pack_value (&bp, node->weakref, 1);
610 bp_pack_value (&bp, node->symver, 1);
611 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
612 gcc_assert (node->definition || !node->analyzed);
613 /* Constant pool initializers can be de-unified into individual ltrans units.
614 FIXME: Alternatively at -Os we may want to avoid generating for them the local
615 labels and share them across LTRANS partitions. */
616 if (node->get_partitioning_class () != SYMBOL_PARTITION)
617 {
618 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
619 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
620 }
621 else
622 {
623 bp_pack_value (&bp, node->definition
624 && referenced_from_other_partition_p (node, encoder), 1);
625 bp_pack_value (&bp, node->analyzed
626 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
627 /* in_other_partition. */
628 }
629 bp_pack_value (&bp, node->tls_model, 3);
630 bp_pack_value (&bp, node->used_by_single_function, 1);
631 bp_pack_value (&bp, node->dynamically_initialized, 1);
632 streamer_write_bitpack (&bp);
633
634 group = node->get_comdat_group ();
635 if (group)
636 comdat = IDENTIFIER_POINTER (group);
637 else
638 comdat = "";
639 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
640
641 if (group)
642 {
643 if (node->same_comdat_group)
644 {
645 ref = LCC_NOT_FOUND;
646 for (struct symtab_node *n = node->same_comdat_group;
647 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
648 ref = lto_symtab_encoder_lookup (encoder, n);
649 }
650 else
651 ref = LCC_NOT_FOUND;
652 streamer_write_hwi_stream (ob->main_stream, ref);
653 }
654
655 section = node->get_section ();
656 if (!section)
657 section = "";
658 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
659
660 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
661 LDPR_NUM_KNOWN, node->resolution);
662 }
663
664 /* Output the varpool NODE to OB.
665 If NODE is not in SET, then NODE is a boundary. */
666
667 static void
668 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
669 lto_symtab_encoder_t encoder)
670 {
671 struct bitpack_d bp;
672 int nref;
673 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
674 struct cgraph_node *node;
675
676 bp = bitpack_create (ob->main_stream);
677 bp_pack_value (&bp, ref->use, 3);
678 bp_pack_value (&bp, ref->speculative, 1);
679 streamer_write_bitpack (&bp);
680 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
681 gcc_assert (nref != LCC_NOT_FOUND);
682 streamer_write_hwi_stream (ob->main_stream, nref);
683
684 node = dyn_cast <cgraph_node *> (ref->referring);
685 if (node)
686 {
687 if (ref->stmt)
688 uid = gimple_uid (ref->stmt) + 1;
689 streamer_write_hwi_stream (ob->main_stream, uid);
690 bp_pack_value (&bp, ref->speculative_id, 16);
691 streamer_write_bitpack (&bp);
692 }
693 }
694
695 /* Stream out profile_summary to OB. */
696
697 static void
698 output_profile_summary (struct lto_simple_output_block *ob)
699 {
700 if (profile_info)
701 {
702 /* We do not output num and run_max, they are not used by
703 GCC profile feedback and they are difficult to merge from multiple
704 units. */
705 unsigned runs = (profile_info->runs);
706 streamer_write_uhwi_stream (ob->main_stream, runs);
707
708 /* IPA-profile computes hot bb threshold based on cumulated
709 whole program profile. We need to stream it down to ltrans. */
710 if (flag_wpa)
711 streamer_write_gcov_count_stream (ob->main_stream,
712 get_hot_bb_threshold ());
713 }
714 else
715 streamer_write_uhwi_stream (ob->main_stream, 0);
716 }
717
718 /* Output all callees or indirect outgoing edges. EDGE must be the first such
719 edge. */
720
721 static void
722 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
723 struct lto_simple_output_block *ob,
724 lto_symtab_encoder_t encoder)
725 {
726 if (!edge)
727 return;
728
729 /* Output edges in backward direction, so the reconstructed callgraph match
730 and it is easy to associate call sites in the IPA pass summaries. */
731 while (edge->next_callee)
732 edge = edge->next_callee;
733 for (; edge; edge = edge->prev_callee)
734 lto_output_edge (ob, edge, encoder);
735 }
736
737 /* Output the part of the cgraph in SET. */
738
739 static void
740 output_refs (lto_symtab_encoder_t encoder)
741 {
742 struct lto_simple_output_block *ob;
743 int count;
744 struct ipa_ref *ref;
745
746 ob = lto_create_simple_output_block (LTO_section_refs);
747
748 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
749 {
750 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
751
752 /* IPA_REF_ALIAS references are always preserved
753 in the boundary. Alias node can't have other references and
754 can be always handled as if it's not in the boundary. */
755 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
756 continue;
757
758 count = node->ref_list.nreferences ();
759 if (count)
760 {
761 streamer_write_gcov_count_stream (ob->main_stream, count);
762 streamer_write_uhwi_stream (ob->main_stream,
763 lto_symtab_encoder_lookup (encoder, node));
764 for (int i = 0; node->iterate_reference (i, ref); i++)
765 lto_output_ref (ob, ref, encoder);
766 }
767 }
768
769 streamer_write_uhwi_stream (ob->main_stream, 0);
770
771 lto_destroy_simple_output_block (ob);
772 }
773
774 /* Add NODE into encoder as well as nodes it is cloned from.
775 Do it in a way so clones appear first. */
776
777 static void
778 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
779 bool include_body)
780 {
781 if (node->clone_of)
782 add_node_to (encoder, node->clone_of, include_body);
783 else if (include_body)
784 lto_set_symtab_encoder_encode_body (encoder, node);
785 lto_symtab_encoder_encode (encoder, node);
786 }
787
788 /* Add all references in NODE to encoders. */
789
790 static void
791 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
792 {
793 int i;
794 struct ipa_ref *ref = NULL;
795 for (i = 0; node->iterate_reference (i, ref); i++)
796 if (is_a <cgraph_node *> (ref->referred))
797 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
798 else
799 lto_symtab_encoder_encode (encoder, ref->referred);
800 }
801
802 /* Select what needs to be streamed out. In regular lto mode stream everything.
803 In offload lto mode stream only nodes marked as offloadable. */
804 void
805 select_what_to_stream (void)
806 {
807 struct symtab_node *snode;
808 FOR_EACH_SYMBOL (snode)
809 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
810 }
811
812 /* Find all symbols we want to stream into given partition and insert them
813 to encoders.
814
815 The function actually replaces IN_ENCODER by new one. The reason is that
816 streaming code needs clone's origin to be streamed before clone. This
817 means that we need to insert the nodes in specific order. This order is
818 ignored by the partitioning logic earlier. */
819
820 lto_symtab_encoder_t
821 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
822 {
823 struct cgraph_edge *edge;
824 int i;
825 lto_symtab_encoder_t encoder;
826 lto_symtab_encoder_iterator lsei;
827 hash_set<void *> reachable_call_targets;
828
829 encoder = lto_symtab_encoder_new (false);
830
831 /* Go over all entries in the IN_ENCODER and duplicate them to
832 ENCODER. At the same time insert masters of clones so
833 every master appears before clone. */
834 for (lsei = lsei_start_function_in_partition (in_encoder);
835 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
836 {
837 struct cgraph_node *node = lsei_cgraph_node (lsei);
838 if (!node->need_lto_streaming)
839 continue;
840 add_node_to (encoder, node, true);
841 lto_set_symtab_encoder_in_partition (encoder, node);
842 create_references (encoder, node);
843 }
844 for (lsei = lsei_start_variable_in_partition (in_encoder);
845 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
846 {
847 varpool_node *vnode = lsei_varpool_node (lsei);
848
849 if (!vnode->need_lto_streaming)
850 continue;
851 lto_set_symtab_encoder_in_partition (encoder, vnode);
852 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
853 create_references (encoder, vnode);
854 }
855 /* Pickle in also the initializer of all referenced readonly variables
856 to help folding. Constant pool variables are not shared, so we must
857 pickle those too. */
858 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
859 {
860 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
861 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
862 {
863 if (!lto_symtab_encoder_encode_initializer_p (encoder,
864 vnode)
865 && (((vnode->ctor_useable_for_folding_p ()
866 && (!DECL_VIRTUAL_P (vnode->decl)
867 || !flag_wpa
868 || flag_ltrans_devirtualize)))))
869 {
870 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
871 create_references (encoder, vnode);
872 }
873 }
874 }
875
876 /* Go over all the nodes again to include callees that are not in
877 SET. */
878 for (lsei = lsei_start_function_in_partition (encoder);
879 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
880 {
881 struct cgraph_node *node = lsei_cgraph_node (lsei);
882 for (edge = node->callees; edge; edge = edge->next_callee)
883 {
884 struct cgraph_node *callee = edge->callee;
885 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
886 {
887 /* We should have moved all the inlines. */
888 gcc_assert (!callee->inlined_to);
889 add_node_to (encoder, callee, false);
890 }
891 }
892 /* Add all possible targets for late devirtualization. */
893 if (flag_ltrans_devirtualize || !flag_wpa)
894 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
895 if (edge->indirect_info->polymorphic)
896 {
897 unsigned int i;
898 void *cache_token;
899 bool final;
900 vec <cgraph_node *>targets
901 = possible_polymorphic_call_targets
902 (edge, &final, &cache_token);
903 if (!reachable_call_targets.add (cache_token))
904 {
905 for (i = 0; i < targets.length (); i++)
906 {
907 struct cgraph_node *callee = targets[i];
908
909 /* Adding an external declarations into the unit serves
910 no purpose and just increases its boundary. */
911 if (callee->definition
912 && !lto_symtab_encoder_in_partition_p
913 (encoder, callee))
914 {
915 gcc_assert (!callee->inlined_to);
916 add_node_to (encoder, callee, false);
917 }
918 }
919 }
920 }
921 }
922 /* Be sure to also insert alias targert and thunk callees. These needs
923 to stay to aid local calling conventions. */
924 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
925 {
926 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
927 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
928
929 if (node->alias && node->analyzed)
930 create_references (encoder, node);
931 if (cnode
932 && cnode->thunk.thunk_p && !cnode->inlined_to)
933 add_node_to (encoder, cnode->callees->callee, false);
934 while (node->transparent_alias && node->analyzed)
935 {
936 node = node->get_alias_target ();
937 if (is_a <cgraph_node *> (node))
938 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
939 false);
940 else
941 lto_symtab_encoder_encode (encoder, node);
942 }
943 }
944 lto_symtab_encoder_delete (in_encoder);
945 return encoder;
946 }
947
948 /* Output the part of the symtab in SET and VSET. */
949
950 void
951 output_symtab (void)
952 {
953 struct cgraph_node *node;
954 struct lto_simple_output_block *ob;
955 int i, n_nodes;
956 lto_symtab_encoder_t encoder;
957
958 if (flag_wpa)
959 output_cgraph_opt_summary ();
960
961 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
962
963 output_profile_summary (ob);
964
965 /* An encoder for cgraph nodes should have been created by
966 ipa_write_summaries_1. */
967 gcc_assert (ob->decl_state->symtab_node_encoder);
968 encoder = ob->decl_state->symtab_node_encoder;
969
970 /* Write out the nodes. We must first output a node and then its clones,
971 otherwise at a time reading back the node there would be nothing to clone
972 from. */
973 n_nodes = lto_symtab_encoder_size (encoder);
974 for (i = 0; i < n_nodes; i++)
975 {
976 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
977 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
978 lto_output_node (ob, cnode, encoder);
979 else
980 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
981 }
982
983 /* Go over the nodes in SET again to write edges. */
984 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
985 {
986 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
987 if (node
988 && ((node->thunk.thunk_p && !node->inlined_to)
989 || lto_symtab_encoder_in_partition_p (encoder, node)))
990 {
991 output_outgoing_cgraph_edges (node->callees, ob, encoder);
992 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
993 }
994 }
995
996 streamer_write_uhwi_stream (ob->main_stream, 0);
997
998 lto_destroy_simple_output_block (ob);
999
1000 /* Emit toplevel asms.
1001 When doing WPA we must output every asm just once. Since we do not partition asm
1002 nodes at all, output them to first output. This is kind of hack, but should work
1003 well. */
1004 if (!asm_nodes_output)
1005 {
1006 asm_nodes_output = true;
1007 lto_output_toplevel_asms ();
1008 }
1009
1010 output_refs (encoder);
1011 }
1012
1013 /* Return identifier encoded in IB as a plain string. */
1014
1015 static tree
1016 read_identifier (class lto_input_block *ib)
1017 {
1018 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1019 tree id;
1020
1021 if (ib->data[ib->p + len])
1022 lto_section_overrun (ib);
1023 if (!len)
1024 {
1025 ib->p++;
1026 return NULL;
1027 }
1028 id = get_identifier (ib->data + ib->p);
1029 ib->p += len + 1;
1030 return id;
1031 }
1032
1033 /* Return string encoded in IB, NULL if string is empty. */
1034
1035 static const char *
1036 read_string (class lto_input_block *ib)
1037 {
1038 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1039 const char *str;
1040
1041 if (ib->data[ib->p + len])
1042 lto_section_overrun (ib);
1043 if (!len)
1044 {
1045 ib->p++;
1046 return NULL;
1047 }
1048 str = ib->data + ib->p;
1049 ib->p += len + 1;
1050 return str;
1051 }
1052
1053 /* Output function/variable tables that will allow libgomp to look up offload
1054 target code.
1055 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1056 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1057 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1058
1059 void
1060 output_offload_tables (void)
1061 {
1062 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1063 return;
1064
1065 struct lto_simple_output_block *ob
1066 = lto_create_simple_output_block (LTO_section_offload_table);
1067
1068 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1069 {
1070 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1071 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1072 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1073 (*offload_funcs)[i]);
1074 }
1075
1076 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1077 {
1078 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1079 LTO_symtab_last_tag, LTO_symtab_variable);
1080 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1081 (*offload_vars)[i]);
1082 }
1083
1084 streamer_write_uhwi_stream (ob->main_stream, 0);
1085 lto_destroy_simple_output_block (ob);
1086
1087 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1088 streamed to one partition only. That's why we free offload_funcs and
1089 offload_vars after the first call of output_offload_tables. */
1090 if (flag_wpa)
1091 {
1092 vec_free (offload_funcs);
1093 vec_free (offload_vars);
1094 }
1095 }
1096
1097 /* Verify the partitioning of NODE. */
1098
1099 static inline void
1100 verify_node_partition (symtab_node *node)
1101 {
1102 if (flag_ltrans)
1103 return;
1104
1105 #ifdef ACCEL_COMPILER
1106 if (node->in_other_partition)
1107 {
1108 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1109 error_at (DECL_SOURCE_LOCATION (node->decl),
1110 "function %qs has been referenced in offloaded code but"
1111 " hasn%'t been marked to be included in the offloaded code",
1112 node->name ());
1113 else if (VAR_P (node->decl))
1114 error_at (DECL_SOURCE_LOCATION (node->decl),
1115 "variable %qs has been referenced in offloaded code but"
1116 " hasn%'t been marked to be included in the offloaded code",
1117 node->name ());
1118 else
1119 gcc_unreachable ();
1120 }
1121 #else
1122 gcc_assert (!node->in_other_partition
1123 && !node->used_from_other_partition);
1124 #endif
1125 }
1126
1127 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1128 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1129 NODE or to replace the values in it, for instance because the first
1130 time we saw it, the function body was not available but now it
1131 is. BP is a bitpack with all the bitflags for NODE read from the
1132 stream. */
1133
1134 static void
1135 input_overwrite_node (struct lto_file_decl_data *file_data,
1136 struct cgraph_node *node,
1137 enum LTO_symtab_tags tag,
1138 struct bitpack_d *bp)
1139 {
1140 node->aux = (void *) tag;
1141 node->lto_file_data = file_data;
1142
1143 node->local = bp_unpack_value (bp, 1);
1144 node->externally_visible = bp_unpack_value (bp, 1);
1145 node->no_reorder = bp_unpack_value (bp, 1);
1146 node->definition = bp_unpack_value (bp, 1);
1147 node->versionable = bp_unpack_value (bp, 1);
1148 node->can_change_signature = bp_unpack_value (bp, 1);
1149 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1150 node->force_output = bp_unpack_value (bp, 1);
1151 node->forced_by_abi = bp_unpack_value (bp, 1);
1152 node->unique_name = bp_unpack_value (bp, 1);
1153 node->body_removed = bp_unpack_value (bp, 1);
1154 node->implicit_section = bp_unpack_value (bp, 1);
1155 node->address_taken = bp_unpack_value (bp, 1);
1156 node->used_from_other_partition = bp_unpack_value (bp, 1);
1157 node->lowered = bp_unpack_value (bp, 1);
1158 node->analyzed = tag == LTO_symtab_analyzed_node;
1159 node->in_other_partition = bp_unpack_value (bp, 1);
1160 if (node->in_other_partition
1161 /* Avoid updating decl when we are seeing just inline clone.
1162 When inlining function that has functions already inlined into it,
1163 we produce clones of inline clones.
1164
1165 WPA partitioning might put each clone into different unit and
1166 we might end up streaming inline clone from other partition
1167 to support clone we are interested in. */
1168 && (!node->clone_of
1169 || node->clone_of->decl != node->decl))
1170 {
1171 DECL_EXTERNAL (node->decl) = 1;
1172 TREE_STATIC (node->decl) = 0;
1173 }
1174 node->alias = bp_unpack_value (bp, 1);
1175 node->transparent_alias = bp_unpack_value (bp, 1);
1176 node->weakref = bp_unpack_value (bp, 1);
1177 node->symver = bp_unpack_value (bp, 1);
1178 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1179 node->only_called_at_startup = bp_unpack_value (bp, 1);
1180 node->only_called_at_exit = bp_unpack_value (bp, 1);
1181 node->tm_clone = bp_unpack_value (bp, 1);
1182 node->calls_comdat_local = bp_unpack_value (bp, 1);
1183 node->icf_merged = bp_unpack_value (bp, 1);
1184 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1185 node->merged_comdat = bp_unpack_value (bp, 1);
1186 node->merged_extern_inline = bp_unpack_value (bp, 1);
1187 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1188 node->parallelized_function = bp_unpack_value (bp, 1);
1189 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1190 LDPR_NUM_KNOWN);
1191 node->split_part = bp_unpack_value (bp, 1);
1192 verify_node_partition (node);
1193 }
1194
1195 /* Return string alias is alias of. */
1196
1197 static tree
1198 get_alias_symbol (tree decl)
1199 {
1200 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1201 return get_identifier (TREE_STRING_POINTER
1202 (TREE_VALUE (TREE_VALUE (alias))));
1203 }
1204
1205 /* Read a node from input_block IB. TAG is the node's tag just read.
1206 Return the node read or overwriten. */
1207
1208 static struct cgraph_node *
1209 input_node (struct lto_file_decl_data *file_data,
1210 class lto_input_block *ib,
1211 enum LTO_symtab_tags tag,
1212 vec<symtab_node *> nodes)
1213 {
1214 gcc::pass_manager *passes = g->get_passes ();
1215 tree fn_decl;
1216 struct cgraph_node *node;
1217 struct bitpack_d bp;
1218 unsigned decl_index;
1219 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1220 int clone_ref;
1221 int order;
1222 int i, count;
1223 tree group;
1224 const char *section;
1225 order = streamer_read_hwi (ib) + file_data->order_base;
1226 clone_ref = streamer_read_hwi (ib);
1227
1228 decl_index = streamer_read_uhwi (ib);
1229 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1230
1231 if (clone_ref != LCC_NOT_FOUND)
1232 {
1233 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1234 profile_count::uninitialized (), false,
1235 vNULL, false, NULL, NULL);
1236 }
1237 else
1238 {
1239 /* Declaration of functions can be already merged with a declaration
1240 from other input file. We keep cgraph unmerged until after streaming
1241 of ipa passes is done. Alays forcingly create a fresh node. */
1242 node = symtab->create_empty ();
1243 node->decl = fn_decl;
1244 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1245 node->ifunc_resolver = 1;
1246 node->register_symbol ();
1247 }
1248
1249 node->order = order;
1250 if (order >= symtab->order)
1251 symtab->order = order + 1;
1252
1253 node->count = profile_count::stream_in (ib);
1254 node->count_materialization_scale = streamer_read_hwi (ib);
1255
1256 count = streamer_read_hwi (ib);
1257 node->ipa_transforms_to_apply = vNULL;
1258 for (i = 0; i < count; i++)
1259 {
1260 opt_pass *pass;
1261 int pid = streamer_read_hwi (ib);
1262
1263 gcc_assert (pid < passes->passes_by_id_size);
1264 pass = passes->passes_by_id[pid];
1265 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1266 }
1267
1268 if (tag == LTO_symtab_analyzed_node)
1269 ref = streamer_read_hwi (ib);
1270
1271 group = read_identifier (ib);
1272 if (group)
1273 ref2 = streamer_read_hwi (ib);
1274
1275 /* Make sure that we have not read this node before. Nodes that
1276 have already been read will have their tag stored in the 'aux'
1277 field. Since built-in functions can be referenced in multiple
1278 functions, they are expected to be read more than once. */
1279 if (node->aux && !fndecl_built_in_p (node->decl))
1280 internal_error ("bytecode stream: found multiple instances of cgraph "
1281 "node with uid %d", node->get_uid ());
1282
1283 node->tp_first_run = streamer_read_uhwi (ib);
1284
1285 bp = streamer_read_bitpack (ib);
1286
1287 input_overwrite_node (file_data, node, tag, &bp);
1288
1289 /* Store a reference for now, and fix up later to be a pointer. */
1290 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1291
1292 if (group)
1293 {
1294 node->set_comdat_group (group);
1295 /* Store a reference for now, and fix up later to be a pointer. */
1296 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1297 }
1298 else
1299 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1300 section = read_string (ib);
1301 if (section)
1302 node->set_section_for_node (section);
1303
1304 if (node->definition)
1305 {
1306 int type = streamer_read_uhwi (ib);
1307 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1308 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1309 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1310
1311 node->thunk.fixed_offset = fixed_offset;
1312 node->thunk.virtual_value = virtual_value;
1313 node->thunk.indirect_offset = indirect_offset;
1314 node->thunk.this_adjusting = (type & 2);
1315 node->thunk.virtual_offset_p = (type & 4);
1316 }
1317 if (node->alias && !node->analyzed && node->weakref)
1318 node->alias_target = get_alias_symbol (node->decl);
1319 node->profile_id = streamer_read_hwi (ib);
1320 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1321 if (symtab->max_unit < node->unit_id)
1322 symtab->max_unit = node->unit_id;
1323 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1324 node->set_init_priority (streamer_read_hwi (ib));
1325 if (DECL_STATIC_DESTRUCTOR (node->decl))
1326 node->set_fini_priority (streamer_read_hwi (ib));
1327
1328 return node;
1329 }
1330
1331 /* Read a node from input_block IB. TAG is the node's tag just read.
1332 Return the node read or overwriten. */
1333
1334 static varpool_node *
1335 input_varpool_node (struct lto_file_decl_data *file_data,
1336 class lto_input_block *ib)
1337 {
1338 int decl_index;
1339 tree var_decl;
1340 varpool_node *node;
1341 struct bitpack_d bp;
1342 int ref = LCC_NOT_FOUND;
1343 int order;
1344 tree group;
1345 const char *section;
1346
1347 order = streamer_read_hwi (ib) + file_data->order_base;
1348 decl_index = streamer_read_uhwi (ib);
1349 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1350
1351 /* Declaration of functions can be already merged with a declaration
1352 from other input file. We keep cgraph unmerged until after streaming
1353 of ipa passes is done. Alays forcingly create a fresh node. */
1354 node = varpool_node::create_empty ();
1355 node->decl = var_decl;
1356 node->register_symbol ();
1357
1358 node->order = order;
1359 if (order >= symtab->order)
1360 symtab->order = order + 1;
1361 node->lto_file_data = file_data;
1362
1363 bp = streamer_read_bitpack (ib);
1364 node->externally_visible = bp_unpack_value (&bp, 1);
1365 node->no_reorder = bp_unpack_value (&bp, 1);
1366 node->force_output = bp_unpack_value (&bp, 1);
1367 node->forced_by_abi = bp_unpack_value (&bp, 1);
1368 node->unique_name = bp_unpack_value (&bp, 1);
1369 node->body_removed = bp_unpack_value (&bp, 1);
1370 node->implicit_section = bp_unpack_value (&bp, 1);
1371 node->writeonly = bp_unpack_value (&bp, 1);
1372 node->definition = bp_unpack_value (&bp, 1);
1373 node->alias = bp_unpack_value (&bp, 1);
1374 node->transparent_alias = bp_unpack_value (&bp, 1);
1375 node->weakref = bp_unpack_value (&bp, 1);
1376 node->symver = bp_unpack_value (&bp, 1);
1377 node->analyzed = bp_unpack_value (&bp, 1);
1378 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1379 node->in_other_partition = bp_unpack_value (&bp, 1);
1380 if (node->in_other_partition)
1381 {
1382 DECL_EXTERNAL (node->decl) = 1;
1383 TREE_STATIC (node->decl) = 0;
1384 }
1385 if (node->alias && !node->analyzed && node->weakref)
1386 node->alias_target = get_alias_symbol (node->decl);
1387 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1388 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1389 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1390 group = read_identifier (ib);
1391 if (group)
1392 {
1393 node->set_comdat_group (group);
1394 ref = streamer_read_hwi (ib);
1395 /* Store a reference for now, and fix up later to be a pointer. */
1396 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1397 }
1398 else
1399 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1400 section = read_string (ib);
1401 if (section)
1402 node->set_section_for_node (section);
1403 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1404 LDPR_NUM_KNOWN);
1405 verify_node_partition (node);
1406 return node;
1407 }
1408
1409 /* Read a node from input_block IB. TAG is the node's tag just read.
1410 Return the node read or overwriten. */
1411
1412 static void
1413 input_ref (class lto_input_block *ib,
1414 symtab_node *referring_node,
1415 vec<symtab_node *> nodes)
1416 {
1417 symtab_node *node = NULL;
1418 struct bitpack_d bp;
1419 enum ipa_ref_use use;
1420 bool speculative;
1421 struct ipa_ref *ref;
1422
1423 bp = streamer_read_bitpack (ib);
1424 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1425 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1426 node = nodes[streamer_read_hwi (ib)];
1427 ref = referring_node->create_reference (node, use);
1428 ref->speculative = speculative;
1429 if (is_a <cgraph_node *> (referring_node))
1430 {
1431 ref->lto_stmt_uid = streamer_read_hwi (ib);
1432 bp = streamer_read_bitpack (ib);
1433 ref->speculative_id = bp_unpack_value (&bp, 16);
1434 }
1435 }
1436
1437 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1438 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1439 edge being read is indirect (in the sense that it has
1440 indirect_unknown_callee set). */
1441
1442 static void
1443 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1444 bool indirect)
1445 {
1446 struct cgraph_node *caller, *callee;
1447 struct cgraph_edge *edge;
1448 unsigned int stmt_id, speculative_id;
1449 profile_count count;
1450 cgraph_inline_failed_t inline_failed;
1451 struct bitpack_d bp;
1452 int ecf_flags = 0;
1453
1454 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1455 if (caller == NULL || caller->decl == NULL_TREE)
1456 internal_error ("bytecode stream: no caller found while reading edge");
1457
1458 if (!indirect)
1459 {
1460 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1461 if (callee == NULL || callee->decl == NULL_TREE)
1462 internal_error ("bytecode stream: no callee found while reading edge");
1463 }
1464 else
1465 callee = NULL;
1466
1467 count = profile_count::stream_in (ib);
1468
1469 bp = streamer_read_bitpack (ib);
1470 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1471 stmt_id = bp_unpack_var_len_unsigned (&bp);
1472 speculative_id = bp_unpack_value (&bp, 16);
1473
1474 if (indirect)
1475 edge = caller->create_indirect_edge (NULL, 0, count);
1476 else
1477 edge = caller->create_edge (callee, NULL, count);
1478
1479 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1480 edge->speculative = bp_unpack_value (&bp, 1);
1481 edge->lto_stmt_uid = stmt_id;
1482 edge->speculative_id = speculative_id;
1483 edge->inline_failed = inline_failed;
1484 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1485 edge->can_throw_external = bp_unpack_value (&bp, 1);
1486 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1487 if (indirect)
1488 {
1489 if (bp_unpack_value (&bp, 1))
1490 ecf_flags |= ECF_CONST;
1491 if (bp_unpack_value (&bp, 1))
1492 ecf_flags |= ECF_PURE;
1493 if (bp_unpack_value (&bp, 1))
1494 ecf_flags |= ECF_NORETURN;
1495 if (bp_unpack_value (&bp, 1))
1496 ecf_flags |= ECF_MALLOC;
1497 if (bp_unpack_value (&bp, 1))
1498 ecf_flags |= ECF_NOTHROW;
1499 if (bp_unpack_value (&bp, 1))
1500 ecf_flags |= ECF_RETURNS_TWICE;
1501 edge->indirect_info->ecf_flags = ecf_flags;
1502
1503 edge->indirect_info->num_speculative_call_targets
1504 = bp_unpack_value (&bp, 16);
1505 }
1506 }
1507
1508
1509 /* Read a cgraph from IB using the info in FILE_DATA. */
1510
1511 static vec<symtab_node *>
1512 input_cgraph_1 (struct lto_file_decl_data *file_data,
1513 class lto_input_block *ib)
1514 {
1515 enum LTO_symtab_tags tag;
1516 vec<symtab_node *> nodes = vNULL;
1517 symtab_node *node;
1518 unsigned i;
1519
1520 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1521 file_data->order_base = symtab->order;
1522 file_data->unit_base = symtab->max_unit + 1;
1523 while (tag)
1524 {
1525 if (tag == LTO_symtab_edge)
1526 input_edge (ib, nodes, false);
1527 else if (tag == LTO_symtab_indirect_edge)
1528 input_edge (ib, nodes, true);
1529 else if (tag == LTO_symtab_variable)
1530 {
1531 node = input_varpool_node (file_data, ib);
1532 nodes.safe_push (node);
1533 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1534 }
1535 else
1536 {
1537 node = input_node (file_data, ib, tag, nodes);
1538 if (node == NULL || node->decl == NULL_TREE)
1539 internal_error ("bytecode stream: found empty cgraph node");
1540 nodes.safe_push (node);
1541 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1542 }
1543
1544 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1545 }
1546
1547 lto_input_toplevel_asms (file_data, file_data->order_base);
1548
1549 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1550 if (flag_checking)
1551 {
1552 FOR_EACH_VEC_ELT (nodes, i, node)
1553 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1554 }
1555 FOR_EACH_VEC_ELT (nodes, i, node)
1556 {
1557 int ref;
1558 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1559 {
1560 ref = (int) (intptr_t) cnode->inlined_to;
1561
1562 /* We share declaration of builtins, so we may read same node twice. */
1563 if (!node->aux)
1564 continue;
1565 node->aux = NULL;
1566
1567 /* Fixup inlined_to from reference to pointer. */
1568 if (ref != LCC_NOT_FOUND)
1569 dyn_cast<cgraph_node *> (node)->inlined_to
1570 = dyn_cast<cgraph_node *> (nodes[ref]);
1571 else
1572 cnode->inlined_to = NULL;
1573 }
1574
1575 ref = (int) (intptr_t) node->same_comdat_group;
1576
1577 /* Fixup same_comdat_group from reference to pointer. */
1578 if (ref != LCC_NOT_FOUND)
1579 node->same_comdat_group = nodes[ref];
1580 else
1581 node->same_comdat_group = NULL;
1582 }
1583 FOR_EACH_VEC_ELT (nodes, i, node)
1584 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1585 return nodes;
1586 }
1587
1588 /* Input ipa_refs. */
1589
1590 static void
1591 input_refs (class lto_input_block *ib,
1592 vec<symtab_node *> nodes)
1593 {
1594 int count;
1595 int idx;
1596 while (true)
1597 {
1598 symtab_node *node;
1599 count = streamer_read_uhwi (ib);
1600 if (!count)
1601 break;
1602 idx = streamer_read_uhwi (ib);
1603 node = nodes[idx];
1604 while (count)
1605 {
1606 input_ref (ib, node, nodes);
1607 count--;
1608 }
1609 }
1610 }
1611
1612 /* Input profile_info from IB. */
1613 static void
1614 input_profile_summary (class lto_input_block *ib,
1615 struct lto_file_decl_data *file_data)
1616 {
1617 unsigned int runs = streamer_read_uhwi (ib);
1618 if (runs)
1619 {
1620 file_data->profile_info.runs = runs;
1621
1622 /* IPA-profile computes hot bb threshold based on cumulated
1623 whole program profile. We need to stream it down to ltrans. */
1624 if (flag_ltrans)
1625 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1626 }
1627
1628 }
1629
1630 /* Rescale profile summaries to the same number of runs in the whole unit. */
1631
1632 static void
1633 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1634 {
1635 struct lto_file_decl_data *file_data;
1636 unsigned int j;
1637 gcov_unsigned_t max_runs = 0;
1638 struct cgraph_node *node;
1639 struct cgraph_edge *edge;
1640
1641 /* Find unit with maximal number of runs. If we ever get serious about
1642 roundoff errors, we might also consider computing smallest common
1643 multiply. */
1644 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1645 if (max_runs < file_data->profile_info.runs)
1646 max_runs = file_data->profile_info.runs;
1647
1648 if (!max_runs)
1649 return;
1650
1651 /* Simple overflow check. We probably don't need to support that many train
1652 runs. Such a large value probably imply data corruption anyway. */
1653 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1654 {
1655 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1656 INT_MAX / REG_BR_PROB_BASE);
1657 return;
1658 }
1659
1660 profile_info = XCNEW (gcov_summary);
1661 profile_info->runs = max_runs;
1662
1663 /* If merging already happent at WPA time, we are done. */
1664 if (flag_ltrans)
1665 return;
1666
1667 /* Now compute count_materialization_scale of each node.
1668 During LTRANS we already have values of count_materialization_scale
1669 computed, so just update them. */
1670 FOR_EACH_FUNCTION (node)
1671 if (node->lto_file_data
1672 && node->lto_file_data->profile_info.runs)
1673 {
1674 int scale;
1675
1676 scale = RDIV (node->count_materialization_scale * max_runs,
1677 node->lto_file_data->profile_info.runs);
1678 node->count_materialization_scale = scale;
1679 if (scale < 0)
1680 fatal_error (input_location, "Profile information in %s corrupted",
1681 file_data->file_name);
1682
1683 if (scale == REG_BR_PROB_BASE)
1684 continue;
1685 for (edge = node->callees; edge; edge = edge->next_callee)
1686 if (edge->count.ipa ().nonzero_p ())
1687 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1688 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1689 if (edge->count.ipa ().nonzero_p ())
1690 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1691 if (node->count.ipa ().nonzero_p ())
1692 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1693 }
1694 }
1695
1696 /* Input and merge the symtab from each of the .o files passed to
1697 lto1. */
1698
1699 void
1700 input_symtab (void)
1701 {
1702 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1703 struct lto_file_decl_data *file_data;
1704 unsigned int j = 0;
1705 struct cgraph_node *node;
1706
1707 while ((file_data = file_data_vec[j++]))
1708 {
1709 const char *data;
1710 size_t len;
1711 class lto_input_block *ib;
1712 vec<symtab_node *> nodes;
1713
1714 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1715 &data, &len);
1716 if (!ib)
1717 fatal_error (input_location,
1718 "cannot find LTO cgraph in %s", file_data->file_name);
1719 input_profile_summary (ib, file_data);
1720 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1721 nodes = input_cgraph_1 (file_data, ib);
1722 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1723 ib, data, len);
1724
1725 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1726 &data, &len);
1727 if (!ib)
1728 fatal_error (input_location, "cannot find LTO section refs in %s",
1729 file_data->file_name);
1730 input_refs (ib, nodes);
1731 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1732 ib, data, len);
1733 if (flag_ltrans)
1734 input_cgraph_opt_summary (nodes);
1735 nodes.release ();
1736 }
1737
1738 merge_profile_summaries (file_data_vec);
1739
1740 /* Clear out the aux field that was used to store enough state to
1741 tell which nodes should be overwritten. */
1742 FOR_EACH_FUNCTION (node)
1743 {
1744 /* Some nodes may have been created by cgraph_node. This
1745 happens when the callgraph contains nested functions. If the
1746 node for the parent function was never emitted to the gimple
1747 file, cgraph_node will create a node for it when setting the
1748 context of the nested function. */
1749 if (node->lto_file_data)
1750 node->aux = NULL;
1751 }
1752 }
1753
1754 /* Input function/variable tables that will allow libgomp to look up offload
1755 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1756
1757 void
1758 input_offload_tables (bool do_force_output)
1759 {
1760 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1761 struct lto_file_decl_data *file_data;
1762 unsigned int j = 0;
1763
1764 while ((file_data = file_data_vec[j++]))
1765 {
1766 const char *data;
1767 size_t len;
1768 class lto_input_block *ib
1769 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1770 &data, &len);
1771 if (!ib)
1772 continue;
1773
1774 enum LTO_symtab_tags tag
1775 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1776 while (tag)
1777 {
1778 if (tag == LTO_symtab_unavail_node)
1779 {
1780 int decl_index = streamer_read_uhwi (ib);
1781 tree fn_decl
1782 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1783 vec_safe_push (offload_funcs, fn_decl);
1784
1785 /* Prevent IPA from removing fn_decl as unreachable, since there
1786 may be no refs from the parent function to child_fn in offload
1787 LTO mode. */
1788 if (do_force_output)
1789 cgraph_node::get (fn_decl)->mark_force_output ();
1790 }
1791 else if (tag == LTO_symtab_variable)
1792 {
1793 int decl_index = streamer_read_uhwi (ib);
1794 tree var_decl
1795 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1796 vec_safe_push (offload_vars, var_decl);
1797
1798 /* Prevent IPA from removing var_decl as unused, since there
1799 may be no refs to var_decl in offload LTO mode. */
1800 if (do_force_output)
1801 varpool_node::get (var_decl)->force_output = 1;
1802 }
1803 else
1804 fatal_error (input_location,
1805 "invalid offload table in %s", file_data->file_name);
1806
1807 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1808 }
1809
1810 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1811 ib, data, len);
1812 }
1813 }
1814
1815 /* True when we need optimization summary for NODE. */
1816
1817 static int
1818 output_cgraph_opt_summary_p (struct cgraph_node *node)
1819 {
1820 return ((node->clone_of || node->former_clone_of)
1821 && (node->clone.tree_map
1822 || node->clone.param_adjustments));
1823 }
1824
1825 /* Output optimization summary for EDGE to OB. */
1826 static void
1827 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1828 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1829 {
1830 }
1831
1832 /* Output optimization summary for NODE to OB. */
1833
1834 static void
1835 output_node_opt_summary (struct output_block *ob,
1836 struct cgraph_node *node,
1837 lto_symtab_encoder_t encoder)
1838 {
1839 struct ipa_replace_map *map;
1840 int i;
1841 struct cgraph_edge *e;
1842
1843 /* TODO: Should this code be moved to ipa-param-manipulation? */
1844 struct bitpack_d bp;
1845 bp = bitpack_create (ob->main_stream);
1846 bp_pack_value (&bp, (node->clone.param_adjustments != NULL), 1);
1847 streamer_write_bitpack (&bp);
1848 if (ipa_param_adjustments *adjustments = node->clone.param_adjustments)
1849 {
1850 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1851 ipa_adjusted_param *adj;
1852 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1853 {
1854 bp = bitpack_create (ob->main_stream);
1855 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1856 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1857 bp_pack_value (&bp, adj->op, 2);
1858 bp_pack_value (&bp, adj->param_prefix_index, 2);
1859 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1860 bp_pack_value (&bp, adj->reverse, 1);
1861 bp_pack_value (&bp, adj->user_flag, 1);
1862 streamer_write_bitpack (&bp);
1863 if (adj->op == IPA_PARAM_OP_SPLIT
1864 || adj->op == IPA_PARAM_OP_NEW)
1865 {
1866 stream_write_tree (ob, adj->type, true);
1867 if (adj->op == IPA_PARAM_OP_SPLIT)
1868 {
1869 stream_write_tree (ob, adj->alias_ptr_type, true);
1870 streamer_write_uhwi (ob, adj->unit_offset);
1871 }
1872 }
1873 }
1874 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1875 bp = bitpack_create (ob->main_stream);
1876 bp_pack_value (&bp, node->clone.param_adjustments->m_skip_return, 1);
1877 streamer_write_bitpack (&bp);
1878 }
1879
1880 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1881 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1882 {
1883 streamer_write_uhwi (ob, map->parm_num);
1884 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1885 stream_write_tree (ob, map->new_tree, true);
1886 }
1887
1888 if (lto_symtab_encoder_in_partition_p (encoder, node))
1889 {
1890 for (e = node->callees; e; e = e->next_callee)
1891 output_edge_opt_summary (ob, e);
1892 for (e = node->indirect_calls; e; e = e->next_callee)
1893 output_edge_opt_summary (ob, e);
1894 }
1895 }
1896
1897 /* Output optimization summaries stored in callgraph.
1898 At the moment it is the clone info structure. */
1899
1900 static void
1901 output_cgraph_opt_summary (void)
1902 {
1903 int i, n_nodes;
1904 lto_symtab_encoder_t encoder;
1905 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1906 unsigned count = 0;
1907
1908 ob->symbol = NULL;
1909 encoder = ob->decl_state->symtab_node_encoder;
1910 n_nodes = lto_symtab_encoder_size (encoder);
1911 for (i = 0; i < n_nodes; i++)
1912 {
1913 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1914 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1915 if (cnode && output_cgraph_opt_summary_p (cnode))
1916 count++;
1917 }
1918 streamer_write_uhwi (ob, count);
1919 for (i = 0; i < n_nodes; i++)
1920 {
1921 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1922 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1923 if (cnode && output_cgraph_opt_summary_p (cnode))
1924 {
1925 streamer_write_uhwi (ob, i);
1926 output_node_opt_summary (ob, cnode, encoder);
1927 }
1928 }
1929 produce_asm (ob, NULL);
1930 destroy_output_block (ob);
1931 }
1932
1933 /* Input optimisation summary of EDGE. */
1934
1935 static void
1936 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1937 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1938 {
1939 }
1940
1941 /* Input optimisation summary of NODE. */
1942
1943 static void
1944 input_node_opt_summary (struct cgraph_node *node,
1945 class lto_input_block *ib_main,
1946 class data_in *data_in)
1947 {
1948 int i;
1949 int count;
1950 struct cgraph_edge *e;
1951
1952 /* TODO: Should this code be moved to ipa-param-manipulation? */
1953 struct bitpack_d bp;
1954 bp = streamer_read_bitpack (ib_main);
1955 bool have_adjustments = bp_unpack_value (&bp, 1);
1956 if (have_adjustments)
1957 {
1958 count = streamer_read_uhwi (ib_main);
1959 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1960 for (i = 0; i < count; i++)
1961 {
1962 ipa_adjusted_param adj;
1963 memset (&adj, 0, sizeof (adj));
1964 bp = streamer_read_bitpack (ib_main);
1965 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1966 adj.prev_clone_index
1967 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1968 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1969 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1970 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1971 adj.reverse = bp_unpack_value (&bp, 1);
1972 adj.user_flag = bp_unpack_value (&bp, 1);
1973 if (adj.op == IPA_PARAM_OP_SPLIT
1974 || adj.op == IPA_PARAM_OP_NEW)
1975 {
1976 adj.type = stream_read_tree (ib_main, data_in);
1977 if (adj.op == IPA_PARAM_OP_SPLIT)
1978 {
1979 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1980 adj.unit_offset = streamer_read_uhwi (ib_main);
1981 }
1982 }
1983 vec_safe_push (new_params, adj);
1984 }
1985 int always_copy_start = streamer_read_hwi (ib_main);
1986 bp = streamer_read_bitpack (ib_main);
1987 bool skip_return = bp_unpack_value (&bp, 1);
1988 node->clone.param_adjustments
1989 = (new (ggc_alloc <ipa_param_adjustments> ())
1990 ipa_param_adjustments (new_params, always_copy_start, skip_return));
1991 }
1992
1993 count = streamer_read_uhwi (ib_main);
1994 for (i = 0; i < count; i++)
1995 {
1996 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1997
1998 vec_safe_push (node->clone.tree_map, map);
1999 map->parm_num = streamer_read_uhwi (ib_main);
2000 map->new_tree = stream_read_tree (ib_main, data_in);
2001 }
2002 for (e = node->callees; e; e = e->next_callee)
2003 input_edge_opt_summary (e, ib_main);
2004 for (e = node->indirect_calls; e; e = e->next_callee)
2005 input_edge_opt_summary (e, ib_main);
2006 }
2007
2008 /* Read section in file FILE_DATA of length LEN with data DATA. */
2009
2010 static void
2011 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2012 const char *data, size_t len,
2013 vec<symtab_node *> nodes)
2014 {
2015 const struct lto_function_header *header =
2016 (const struct lto_function_header *) data;
2017 const int cfg_offset = sizeof (struct lto_function_header);
2018 const int main_offset = cfg_offset + header->cfg_size;
2019 const int string_offset = main_offset + header->main_size;
2020 class data_in *data_in;
2021 unsigned int i;
2022 unsigned int count;
2023
2024 lto_input_block ib_main ((const char *) data + main_offset,
2025 header->main_size, file_data->mode_table);
2026
2027 data_in =
2028 lto_data_in_create (file_data, (const char *) data + string_offset,
2029 header->string_size, vNULL);
2030 count = streamer_read_uhwi (&ib_main);
2031
2032 for (i = 0; i < count; i++)
2033 {
2034 int ref = streamer_read_uhwi (&ib_main);
2035 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2036 &ib_main, data_in);
2037 }
2038 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2039 len);
2040 lto_data_in_delete (data_in);
2041 }
2042
2043 /* Input optimization summary of cgraph. */
2044
2045 static void
2046 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2047 {
2048 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2049 struct lto_file_decl_data *file_data;
2050 unsigned int j = 0;
2051
2052 while ((file_data = file_data_vec[j++]))
2053 {
2054 size_t len;
2055 const char *data
2056 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2057 &len);
2058 if (data)
2059 input_cgraph_opt_section (file_data, data, len, nodes);
2060 }
2061 }