c++: Handle COMPOUND_EXPRs in ocp_convert [PR94339]
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename() */
48 #include "output.h"
49
50
51 static void lto_write_tree (struct output_block*, tree, bool);
52
53 /* Clear the line info stored in DATA_IN. */
54
55 static void
56 clear_line_info (struct output_block *ob)
57 {
58 ob->current_file = NULL;
59 ob->current_line = 0;
60 ob->current_col = 0;
61 ob->current_sysp = false;
62 }
63
64
65 /* Create the output block and return it. SECTION_TYPE is
66 LTO_section_function_body or LTO_static_initializer. */
67
68 struct output_block *
69 create_output_block (enum lto_section_type section_type)
70 {
71 struct output_block *ob = XCNEW (struct output_block);
72 if (streamer_dump_file)
73 fprintf (streamer_dump_file, "Creating output block for %s\n",
74 lto_section_name [section_type]);
75
76 ob->section_type = section_type;
77 ob->decl_state = lto_get_out_decl_state ();
78 ob->main_stream = XCNEW (struct lto_output_stream);
79 ob->string_stream = XCNEW (struct lto_output_stream);
80 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
81
82 if (section_type == LTO_section_function_body)
83 ob->cfg_stream = XCNEW (struct lto_output_stream);
84
85 clear_line_info (ob);
86
87 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
88 gcc_obstack_init (&ob->obstack);
89
90 return ob;
91 }
92
93
94 /* Destroy the output block OB. */
95
96 void
97 destroy_output_block (struct output_block *ob)
98 {
99 enum lto_section_type section_type = ob->section_type;
100
101 delete ob->string_hash_table;
102 ob->string_hash_table = NULL;
103
104 free (ob->main_stream);
105 free (ob->string_stream);
106 if (section_type == LTO_section_function_body)
107 free (ob->cfg_stream);
108
109 streamer_tree_cache_delete (ob->writer_cache);
110 obstack_free (&ob->obstack, NULL);
111
112 free (ob);
113 }
114
115
116 /* Look up NODE in the type table and write the index for it to OB. */
117
118 static void
119 output_type_ref (struct output_block *ob, tree node)
120 {
121 streamer_write_record_start (ob, LTO_type_ref);
122 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
123 }
124
125 /* Wrapper around variably_modified_type_p avoiding type modification
126 during WPA streaming. */
127
128 static bool
129 lto_variably_modified_type_p (tree type)
130 {
131 return (in_lto_p
132 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
133 : variably_modified_type_p (type, NULL_TREE));
134 }
135
136
137 /* Return true if tree node T is written to various tables. For these
138 nodes, we sometimes want to write their phyiscal representation
139 (via lto_output_tree), and sometimes we need to emit an index
140 reference into a table (via lto_output_tree_ref). */
141
142 static bool
143 tree_is_indexable (tree t)
144 {
145 /* Parameters and return values of functions of variably modified types
146 must go to global stream, because they may be used in the type
147 definition. */
148 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
149 && DECL_CONTEXT (t))
150 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
151 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
152 We should no longer need to stream it. */
153 else if (TREE_CODE (t) == IMPORTED_DECL)
154 gcc_unreachable ();
155 else if (TREE_CODE (t) == LABEL_DECL)
156 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
157 else if (((VAR_P (t) && !TREE_STATIC (t))
158 || TREE_CODE (t) == TYPE_DECL
159 || TREE_CODE (t) == CONST_DECL
160 || TREE_CODE (t) == NAMELIST_DECL)
161 && decl_function_context (t))
162 return false;
163 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
164 return false;
165 /* Variably modified types need to be streamed alongside function
166 bodies because they can refer to local entities. Together with
167 them we have to localize their members as well.
168 ??? In theory that includes non-FIELD_DECLs as well. */
169 else if (TYPE_P (t)
170 && lto_variably_modified_type_p (t))
171 return false;
172 else if (TREE_CODE (t) == FIELD_DECL
173 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
174 return false;
175 else
176 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
177 }
178
179
180 /* Output info about new location into bitpack BP.
181 After outputting bitpack, lto_output_location_data has
182 to be done to output actual data. */
183
184 void
185 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
186 location_t loc)
187 {
188 expanded_location xloc;
189
190 loc = LOCATION_LOCUS (loc);
191 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
192 loc < RESERVED_LOCATION_COUNT
193 ? loc : RESERVED_LOCATION_COUNT);
194 if (loc < RESERVED_LOCATION_COUNT)
195 return;
196
197 xloc = expand_location (loc);
198
199 bp_pack_value (bp, ob->current_file != xloc.file, 1);
200 bp_pack_value (bp, ob->current_line != xloc.line, 1);
201 bp_pack_value (bp, ob->current_col != xloc.column, 1);
202
203 if (ob->current_file != xloc.file)
204 {
205 bp_pack_string (ob, bp, remap_debug_filename (xloc.file), true);
206 bp_pack_value (bp, xloc.sysp, 1);
207 }
208 ob->current_file = xloc.file;
209 ob->current_sysp = xloc.sysp;
210
211 if (ob->current_line != xloc.line)
212 bp_pack_var_len_unsigned (bp, xloc.line);
213 ob->current_line = xloc.line;
214
215 if (ob->current_col != xloc.column)
216 bp_pack_var_len_unsigned (bp, xloc.column);
217 ob->current_col = xloc.column;
218 }
219
220
221 /* If EXPR is an indexable tree node, output a reference to it to
222 output block OB. Otherwise, output the physical representation of
223 EXPR to OB. */
224
225 static void
226 lto_output_tree_ref (struct output_block *ob, tree expr)
227 {
228 enum tree_code code;
229
230 if (TYPE_P (expr))
231 {
232 output_type_ref (ob, expr);
233 return;
234 }
235
236 code = TREE_CODE (expr);
237 switch (code)
238 {
239 case SSA_NAME:
240 streamer_write_record_start (ob, LTO_ssa_name_ref);
241 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
242 break;
243
244 case FIELD_DECL:
245 streamer_write_record_start (ob, LTO_field_decl_ref);
246 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case FUNCTION_DECL:
250 streamer_write_record_start (ob, LTO_function_decl_ref);
251 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case VAR_DECL:
255 case DEBUG_EXPR_DECL:
256 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
257 /* FALLTHRU */
258 case PARM_DECL:
259 streamer_write_record_start (ob, LTO_global_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case CONST_DECL:
264 streamer_write_record_start (ob, LTO_const_decl_ref);
265 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case IMPORTED_DECL:
269 gcc_assert (decl_function_context (expr) == NULL);
270 streamer_write_record_start (ob, LTO_imported_decl_ref);
271 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case TYPE_DECL:
275 streamer_write_record_start (ob, LTO_type_decl_ref);
276 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
278
279 case NAMELIST_DECL:
280 streamer_write_record_start (ob, LTO_namelist_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
283
284 case NAMESPACE_DECL:
285 streamer_write_record_start (ob, LTO_namespace_decl_ref);
286 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
288
289 case LABEL_DECL:
290 streamer_write_record_start (ob, LTO_label_decl_ref);
291 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
292 break;
293
294 case RESULT_DECL:
295 streamer_write_record_start (ob, LTO_result_decl_ref);
296 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
297 break;
298
299 case TRANSLATION_UNIT_DECL:
300 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
301 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
302 break;
303
304 default:
305 /* No other node is indexable, so it should have been handled by
306 lto_output_tree. */
307 gcc_unreachable ();
308 }
309 }
310
311
312 /* Return true if EXPR is a tree node that can be written to disk. */
313
314 static inline bool
315 lto_is_streamable (tree expr)
316 {
317 enum tree_code code = TREE_CODE (expr);
318
319 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
320 name version in lto_output_tree_ref (see output_ssa_names). */
321 return !is_lang_specific (expr)
322 && code != SSA_NAME
323 && code != LANG_TYPE
324 && code != MODIFY_EXPR
325 && code != INIT_EXPR
326 && code != TARGET_EXPR
327 && code != BIND_EXPR
328 && code != WITH_CLEANUP_EXPR
329 && code != STATEMENT_LIST
330 && (code == CASE_LABEL_EXPR
331 || code == DECL_EXPR
332 || TREE_CODE_CLASS (code) != tcc_statement);
333 }
334
335 /* Very rough estimate of streaming size of the initializer. If we ignored
336 presence of strings, we could simply just count number of non-indexable
337 tree nodes and number of references to indexable nodes. Strings however
338 may be very large and we do not want to dump them int othe global stream.
339
340 Count the size of initializer until the size in DATA is positive. */
341
342 static tree
343 subtract_estimated_size (tree *tp, int *ws, void *data)
344 {
345 long *sum = (long *)data;
346 if (tree_is_indexable (*tp))
347 {
348 /* Indexable tree is one reference to global stream.
349 Guess it may be about 4 bytes. */
350 *sum -= 4;
351 *ws = 0;
352 }
353 /* String table entry + base of tree node needs to be streamed. */
354 if (TREE_CODE (*tp) == STRING_CST)
355 *sum -= TREE_STRING_LENGTH (*tp) + 8;
356 else
357 {
358 /* Identifiers are also variable length but should not appear
359 naked in constructor. */
360 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
361 /* We do not really make attempt to work out size of pickled tree, as
362 it is very variable. Make it bigger than the reference. */
363 *sum -= 16;
364 }
365 if (*sum < 0)
366 return *tp;
367 return NULL_TREE;
368 }
369
370
371 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
372
373 static tree
374 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
375 {
376 gcc_checking_assert (DECL_P (expr)
377 && TREE_CODE (expr) != FUNCTION_DECL
378 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
379
380 /* Handle DECL_INITIAL for symbols. */
381 tree initial = DECL_INITIAL (expr);
382 if (VAR_P (expr)
383 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
384 && !DECL_IN_CONSTANT_POOL (expr)
385 && initial)
386 {
387 varpool_node *vnode;
388 /* Extra section needs about 30 bytes; do not produce it for simple
389 scalar values. */
390 if (!(vnode = varpool_node::get (expr))
391 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
392 initial = error_mark_node;
393 if (initial != error_mark_node)
394 {
395 long max_size = 30;
396 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
397 NULL))
398 initial = error_mark_node;
399 }
400 }
401
402 return initial;
403 }
404
405
406 /* Write a physical representation of tree node EXPR to output block
407 OB. If REF_P is true, the leaves of EXPR are emitted as references
408 via lto_output_tree_ref. IX is the index into the streamer cache
409 where EXPR is stored. */
410
411 static void
412 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
413 {
414 /* Pack all the non-pointer fields in EXPR into a bitpack and write
415 the resulting bitpack. */
416 streamer_write_tree_bitfields (ob, expr);
417
418 /* Write all the pointer fields in EXPR. */
419 streamer_write_tree_body (ob, expr, ref_p);
420
421 /* Write any LTO-specific data to OB. */
422 if (DECL_P (expr)
423 && TREE_CODE (expr) != FUNCTION_DECL
424 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
425 {
426 /* Handle DECL_INITIAL for symbols. */
427 tree initial = get_symbol_initial_value
428 (ob->decl_state->symtab_node_encoder, expr);
429 stream_write_tree (ob, initial, ref_p);
430 }
431
432 /* Stream references to early generated DIEs. Keep in sync with the
433 trees handled in dwarf2out_die_ref_for_decl. */
434 if ((DECL_P (expr)
435 && TREE_CODE (expr) != FIELD_DECL
436 && TREE_CODE (expr) != DEBUG_EXPR_DECL
437 && TREE_CODE (expr) != TYPE_DECL)
438 || TREE_CODE (expr) == BLOCK)
439 {
440 const char *sym;
441 unsigned HOST_WIDE_INT off;
442 if (debug_info_level > DINFO_LEVEL_NONE
443 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
444 {
445 streamer_write_string (ob, ob->main_stream, sym, true);
446 streamer_write_uhwi (ob, off);
447 }
448 else
449 streamer_write_string (ob, ob->main_stream, NULL, true);
450 }
451 }
452
453 /* Write a physical representation of tree node EXPR to output block
454 OB. If REF_P is true, the leaves of EXPR are emitted as references
455 via lto_output_tree_ref. IX is the index into the streamer cache
456 where EXPR is stored. */
457
458 static void
459 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
460 {
461 if (!lto_is_streamable (expr))
462 internal_error ("tree code %qs is not supported in LTO streams",
463 get_tree_code_name (TREE_CODE (expr)));
464
465 /* Write the header, containing everything needed to materialize
466 EXPR on the reading side. */
467 streamer_write_tree_header (ob, expr);
468
469 lto_write_tree_1 (ob, expr, ref_p);
470
471 /* Mark the end of EXPR. */
472 streamer_write_zero (ob);
473 }
474
475 /* Emit the physical representation of tree node EXPR to output block OB,
476 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
477 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
478
479 static void
480 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
481 bool ref_p, bool this_ref_p)
482 {
483 unsigned ix;
484
485 gcc_checking_assert (expr != NULL_TREE
486 && !(this_ref_p && tree_is_indexable (expr)));
487
488 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
489 expr, hash, &ix);
490 gcc_assert (!exists_p);
491 if (TREE_CODE (expr) == INTEGER_CST
492 && !TREE_OVERFLOW (expr))
493 {
494 /* Shared INTEGER_CST nodes are special because they need their
495 original type to be materialized by the reader (to implement
496 TYPE_CACHED_VALUES). */
497 streamer_write_integer_cst (ob, expr, ref_p);
498 }
499 else
500 {
501 /* This is the first time we see EXPR, write its fields
502 to OB. */
503 lto_write_tree (ob, expr, ref_p);
504 }
505 }
506
507 class DFS
508 {
509 public:
510 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
511 bool single_p);
512 ~DFS ();
513
514 struct scc_entry
515 {
516 tree t;
517 hashval_t hash;
518 };
519 auto_vec<scc_entry,32> sccstack;
520
521 private:
522 struct sccs
523 {
524 unsigned int dfsnum;
525 unsigned int low;
526 };
527 struct worklist
528 {
529 tree expr;
530 sccs *from_state;
531 sccs *cstate;
532 bool ref_p;
533 bool this_ref_p;
534 };
535
536 static int scc_entry_compare (const void *, const void *);
537
538 void DFS_write_tree_body (struct output_block *ob,
539 tree expr, sccs *expr_state, bool ref_p);
540
541 void DFS_write_tree (struct output_block *ob, sccs *from_state,
542 tree expr, bool ref_p, bool this_ref_p);
543
544 hashval_t
545 hash_scc (struct output_block *ob, unsigned first, unsigned size,
546 bool ref_p, bool this_ref_p);
547
548 hash_map<tree, sccs *> sccstate;
549 auto_vec<worklist, 32> worklist_vec;
550 struct obstack sccstate_obstack;
551 };
552
553 /* Emit the physical representation of tree node EXPR to output block OB,
554 using depth-first search on the subgraph. If THIS_REF_P is true, the
555 leaves of EXPR are emitted as references via lto_output_tree_ref.
556 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
557 this is for a rewalk of a single leaf SCC. */
558
559 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
560 bool single_p)
561 {
562 unsigned int next_dfs_num = 1;
563 gcc_obstack_init (&sccstate_obstack);
564 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
565 while (!worklist_vec.is_empty ())
566 {
567 worklist &w = worklist_vec.last ();
568 expr = w.expr;
569 sccs *from_state = w.from_state;
570 sccs *cstate = w.cstate;
571 ref_p = w.ref_p;
572 this_ref_p = w.this_ref_p;
573 if (cstate == NULL)
574 {
575 sccs **slot = &sccstate.get_or_insert (expr);
576 cstate = *slot;
577 if (cstate)
578 {
579 gcc_checking_assert (from_state);
580 if (cstate->dfsnum < from_state->dfsnum)
581 from_state->low = MIN (cstate->dfsnum, from_state->low);
582 worklist_vec.pop ();
583 continue;
584 }
585
586 scc_entry e = { expr, 0 };
587 /* Not yet visited. DFS recurse and push it onto the stack. */
588 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
589 sccstack.safe_push (e);
590 cstate->dfsnum = next_dfs_num++;
591 cstate->low = cstate->dfsnum;
592 w.cstate = cstate;
593
594 if (TREE_CODE (expr) == INTEGER_CST
595 && !TREE_OVERFLOW (expr))
596 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
597 else
598 {
599 DFS_write_tree_body (ob, expr, cstate, ref_p);
600
601 /* Walk any LTO-specific edges. */
602 if (DECL_P (expr)
603 && TREE_CODE (expr) != FUNCTION_DECL
604 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
605 {
606 /* Handle DECL_INITIAL for symbols. */
607 tree initial
608 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
609 expr);
610 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
611 }
612 }
613 continue;
614 }
615
616 /* See if we found an SCC. */
617 if (cstate->low == cstate->dfsnum)
618 {
619 unsigned first, size;
620 tree x;
621
622 /* If we are re-walking a single leaf SCC just pop it,
623 let earlier worklist item access the sccstack. */
624 if (single_p)
625 {
626 worklist_vec.pop ();
627 continue;
628 }
629
630 /* Pop the SCC and compute its size. */
631 first = sccstack.length ();
632 do
633 {
634 x = sccstack[--first].t;
635 }
636 while (x != expr);
637 size = sccstack.length () - first;
638
639 /* No need to compute hashes for LTRANS units, we don't perform
640 any merging there. */
641 hashval_t scc_hash = 0;
642 unsigned scc_entry_len = 0;
643 if (!flag_wpa)
644 {
645 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
646
647 /* Put the entries with the least number of collisions first. */
648 unsigned entry_start = 0;
649 scc_entry_len = size + 1;
650 for (unsigned i = 0; i < size;)
651 {
652 unsigned from = i;
653 for (i = i + 1; i < size
654 && (sccstack[first + i].hash
655 == sccstack[first + from].hash); ++i)
656 ;
657 if (i - from < scc_entry_len)
658 {
659 scc_entry_len = i - from;
660 entry_start = from;
661 }
662 }
663 for (unsigned i = 0; i < scc_entry_len; ++i)
664 std::swap (sccstack[first + i],
665 sccstack[first + entry_start + i]);
666
667 /* We already sorted SCC deterministically in hash_scc. */
668
669 /* Check that we have only one SCC.
670 Naturally we may have conflicts if hash function is not
671 strong enough. Lets see how far this gets. */
672 gcc_checking_assert (scc_entry_len == 1);
673 }
674
675 /* Write LTO_tree_scc. */
676 streamer_write_record_start (ob, LTO_tree_scc);
677 streamer_write_uhwi (ob, size);
678 streamer_write_uhwi (ob, scc_hash);
679
680 /* Write size-1 SCCs without wrapping them inside SCC bundles.
681 All INTEGER_CSTs need to be handled this way as we need
682 their type to materialize them. Also builtins are handled
683 this way.
684 ??? We still wrap these in LTO_tree_scc so at the
685 input side we can properly identify the tree we want
686 to ultimatively return. */
687 if (size == 1)
688 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
689 else
690 {
691 /* Write the size of the SCC entry candidates. */
692 streamer_write_uhwi (ob, scc_entry_len);
693
694 /* Write all headers and populate the streamer cache. */
695 for (unsigned i = 0; i < size; ++i)
696 {
697 hashval_t hash = sccstack[first+i].hash;
698 tree t = sccstack[first+i].t;
699 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
700 t, hash, NULL);
701 gcc_assert (!exists_p);
702
703 if (!lto_is_streamable (t))
704 internal_error ("tree code %qs is not supported "
705 "in LTO streams",
706 get_tree_code_name (TREE_CODE (t)));
707
708 /* Write the header, containing everything needed to
709 materialize EXPR on the reading side. */
710 streamer_write_tree_header (ob, t);
711 }
712
713 /* Write the bitpacks and tree references. */
714 for (unsigned i = 0; i < size; ++i)
715 {
716 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
717
718 /* Mark the end of the tree. */
719 streamer_write_zero (ob);
720 }
721 }
722
723 /* Finally truncate the vector. */
724 sccstack.truncate (first);
725
726 if (from_state)
727 from_state->low = MIN (from_state->low, cstate->low);
728 worklist_vec.pop ();
729 continue;
730 }
731
732 gcc_checking_assert (from_state);
733 from_state->low = MIN (from_state->low, cstate->low);
734 if (cstate->dfsnum < from_state->dfsnum)
735 from_state->low = MIN (cstate->dfsnum, from_state->low);
736 worklist_vec.pop ();
737 }
738 }
739
740 DFS::~DFS ()
741 {
742 obstack_free (&sccstate_obstack, NULL);
743 }
744
745 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
746 DFS recurse for all tree edges originating from it. */
747
748 void
749 DFS::DFS_write_tree_body (struct output_block *ob,
750 tree expr, sccs *expr_state, bool ref_p)
751 {
752 #define DFS_follow_tree_edge(DEST) \
753 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
754
755 enum tree_code code;
756
757 if (streamer_dump_file)
758 {
759 print_node_brief (streamer_dump_file, " Streaming ",
760 expr, 4);
761 fprintf (streamer_dump_file, " to %s\n",
762 lto_section_name [ob->section_type]);
763 }
764
765 code = TREE_CODE (expr);
766
767 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
768 {
769 if (TREE_CODE (expr) != IDENTIFIER_NODE)
770 DFS_follow_tree_edge (TREE_TYPE (expr));
771 }
772
773 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
774 {
775 unsigned int count = vector_cst_encoded_nelts (expr);
776 for (unsigned int i = 0; i < count; ++i)
777 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
781 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
782 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
783
784 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
785 {
786 DFS_follow_tree_edge (TREE_REALPART (expr));
787 DFS_follow_tree_edge (TREE_IMAGPART (expr));
788 }
789
790 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
791 {
792 /* Drop names that were created for anonymous entities. */
793 if (DECL_NAME (expr)
794 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
795 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
796 ;
797 else
798 DFS_follow_tree_edge (DECL_NAME (expr));
799 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
800 && ! DECL_CONTEXT (expr))
801 DFS_follow_tree_edge ((*all_translation_units)[0]);
802 else
803 DFS_follow_tree_edge (DECL_CONTEXT (expr));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
807 {
808 DFS_follow_tree_edge (DECL_SIZE (expr));
809 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
810
811 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
812 special handling in LTO, it must be handled by streamer hooks. */
813
814 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
815
816 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
817 declarations which should be eliminated by decl merging. Be sure none
818 leaks to this point. */
819 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
820 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
821
822 if ((VAR_P (expr)
823 || TREE_CODE (expr) == PARM_DECL)
824 && DECL_HAS_VALUE_EXPR_P (expr))
825 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
826 if (VAR_P (expr)
827 && DECL_HAS_DEBUG_EXPR_P (expr))
828 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
829 }
830
831 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
832 {
833 /* Make sure we don't inadvertently set the assembler name. */
834 if (DECL_ASSEMBLER_NAME_SET_P (expr))
835 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
836 }
837
838 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
839 {
840 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
841 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
842 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
843 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
844 gcc_checking_assert (!DECL_FCONTEXT (expr));
845 }
846
847 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
848 {
849 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
850 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
851 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
852 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
853 }
854
855 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
856 {
857 DFS_follow_tree_edge (TYPE_SIZE (expr));
858 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
859 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
860 DFS_follow_tree_edge (TYPE_NAME (expr));
861 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
862 reconstructed during fixup. */
863 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
864 during fixup. */
865 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
866 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
867 /* TYPE_CANONICAL is re-computed during type merging, so no need
868 to follow it here. */
869 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
870 it cannot be freed by free_lang_data without triggering ICEs in
871 langhooks. */
872 }
873
874 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
875 {
876 if (TREE_CODE (expr) == ENUMERAL_TYPE)
877 DFS_follow_tree_edge (TYPE_VALUES (expr));
878 else if (TREE_CODE (expr) == ARRAY_TYPE)
879 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
880 else if (RECORD_OR_UNION_TYPE_P (expr))
881 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
882 DFS_follow_tree_edge (t);
883 else if (TREE_CODE (expr) == FUNCTION_TYPE
884 || TREE_CODE (expr) == METHOD_TYPE)
885 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
886
887 if (!POINTER_TYPE_P (expr))
888 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
889 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
890 }
891
892 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
893 {
894 DFS_follow_tree_edge (TREE_PURPOSE (expr));
895 DFS_follow_tree_edge (TREE_VALUE (expr));
896 DFS_follow_tree_edge (TREE_CHAIN (expr));
897 }
898
899 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
900 {
901 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
902 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
903 }
904
905 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
906 {
907 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
908 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
909 DFS_follow_tree_edge (TREE_BLOCK (expr));
910 }
911
912 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
913 {
914 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
915 {
916 /* We would have to stream externals in the block chain as
917 non-references but we should have dropped them in
918 free-lang-data. */
919 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
920 DFS_follow_tree_edge (t);
921 }
922
923 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
924 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
925
926 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
927 information for early inlined BLOCKs so drop it on the floor instead
928 of ICEing in dwarf2out.c. */
929
930 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
931 streaming time. */
932
933 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
934 list is re-constructed from BLOCK_SUPERCONTEXT. */
935 }
936
937 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
938 {
939 unsigned i;
940 tree t;
941
942 /* Note that the number of BINFO slots has already been emitted in
943 EXPR's header (see streamer_write_tree_header) because this length
944 is needed to build the empty BINFO node on the reader side. */
945 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
946 DFS_follow_tree_edge (t);
947 DFS_follow_tree_edge (BINFO_OFFSET (expr));
948 DFS_follow_tree_edge (BINFO_VTABLE (expr));
949
950 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
951 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
952 by C++ FE only. */
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
956 {
957 unsigned i;
958 tree index, value;
959
960 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
961 {
962 DFS_follow_tree_edge (index);
963 DFS_follow_tree_edge (value);
964 }
965 }
966
967 if (code == OMP_CLAUSE)
968 {
969 int i;
970 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
971 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
972 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
973 }
974
975 #undef DFS_follow_tree_edge
976 }
977
978 /* Return a hash value for the tree T.
979 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
980 may hold hash values if trees inside current SCC. */
981
982 static hashval_t
983 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
984 {
985 inchash::hash hstate;
986
987 #define visit(SIBLING) \
988 do { \
989 unsigned ix; \
990 if (!SIBLING) \
991 hstate.add_int (0); \
992 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
993 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
994 else if (map) \
995 hstate.add_int (*map->get (SIBLING)); \
996 else \
997 hstate.add_int (1); \
998 } while (0)
999
1000 /* Hash TS_BASE. */
1001 enum tree_code code = TREE_CODE (t);
1002 hstate.add_int (code);
1003 if (!TYPE_P (t))
1004 {
1005 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1006 hstate.add_flag (TREE_CONSTANT (t));
1007 hstate.add_flag (TREE_READONLY (t));
1008 hstate.add_flag (TREE_PUBLIC (t));
1009 }
1010 hstate.add_flag (TREE_ADDRESSABLE (t));
1011 hstate.add_flag (TREE_THIS_VOLATILE (t));
1012 if (DECL_P (t))
1013 hstate.add_flag (DECL_UNSIGNED (t));
1014 else if (TYPE_P (t))
1015 hstate.add_flag (TYPE_UNSIGNED (t));
1016 if (TYPE_P (t))
1017 hstate.add_flag (TYPE_ARTIFICIAL (t));
1018 else
1019 hstate.add_flag (TREE_NO_WARNING (t));
1020 hstate.add_flag (TREE_NOTHROW (t));
1021 hstate.add_flag (TREE_STATIC (t));
1022 hstate.add_flag (TREE_PROTECTED (t));
1023 hstate.add_flag (TREE_DEPRECATED (t));
1024 if (code != TREE_BINFO)
1025 hstate.add_flag (TREE_PRIVATE (t));
1026 if (TYPE_P (t))
1027 {
1028 hstate.add_flag (AGGREGATE_TYPE_P (t)
1029 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1030 hstate.add_flag (TYPE_ADDR_SPACE (t));
1031 }
1032 else if (code == SSA_NAME)
1033 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1034 hstate.commit_flag ();
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1037 hstate.add_wide_int (wi::to_widest (t));
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1040 {
1041 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1042 hstate.add_flag (r.cl);
1043 hstate.add_flag (r.sign);
1044 hstate.add_flag (r.signalling);
1045 hstate.add_flag (r.canonical);
1046 hstate.commit_flag ();
1047 hstate.add_int (r.uexp);
1048 hstate.add (r.sig, sizeof (r.sig));
1049 }
1050
1051 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1052 {
1053 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1054 hstate.add_int (f.mode);
1055 hstate.add_int (f.data.low);
1056 hstate.add_int (f.data.high);
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1060 {
1061 hstate.add_hwi (DECL_MODE (t));
1062 hstate.add_flag (DECL_NONLOCAL (t));
1063 hstate.add_flag (DECL_VIRTUAL_P (t));
1064 hstate.add_flag (DECL_IGNORED_P (t));
1065 hstate.add_flag (DECL_ABSTRACT_P (t));
1066 hstate.add_flag (DECL_ARTIFICIAL (t));
1067 hstate.add_flag (DECL_USER_ALIGN (t));
1068 hstate.add_flag (DECL_PRESERVE_P (t));
1069 hstate.add_flag (DECL_EXTERNAL (t));
1070 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1071 hstate.commit_flag ();
1072 hstate.add_int (DECL_ALIGN (t));
1073 if (code == LABEL_DECL)
1074 {
1075 hstate.add_int (EH_LANDING_PAD_NR (t));
1076 hstate.add_int (LABEL_DECL_UID (t));
1077 }
1078 else if (code == FIELD_DECL)
1079 {
1080 hstate.add_flag (DECL_PACKED (t));
1081 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1082 hstate.add_flag (DECL_PADDING_P (t));
1083 hstate.add_int (DECL_OFFSET_ALIGN (t));
1084 }
1085 else if (code == VAR_DECL)
1086 {
1087 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1088 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1089 }
1090 if (code == RESULT_DECL
1091 || code == PARM_DECL
1092 || code == VAR_DECL)
1093 {
1094 hstate.add_flag (DECL_BY_REFERENCE (t));
1095 if (code == VAR_DECL
1096 || code == PARM_DECL)
1097 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1098 }
1099 hstate.commit_flag ();
1100 }
1101
1102 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1103 hstate.add_int (DECL_REGISTER (t));
1104
1105 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1106 {
1107 hstate.add_flag (DECL_COMMON (t));
1108 hstate.add_flag (DECL_DLLIMPORT_P (t));
1109 hstate.add_flag (DECL_WEAK (t));
1110 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1111 hstate.add_flag (DECL_COMDAT (t));
1112 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1113 hstate.add_int (DECL_VISIBILITY (t));
1114 if (code == VAR_DECL)
1115 {
1116 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1117 hstate.add_flag (DECL_HARD_REGISTER (t));
1118 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1119 }
1120 if (TREE_CODE (t) == FUNCTION_DECL)
1121 {
1122 hstate.add_flag (DECL_FINAL_P (t));
1123 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1124 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1125 }
1126 hstate.commit_flag ();
1127 }
1128
1129 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1130 {
1131 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1132 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1133 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1134 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1135 hstate.add_flag (DECL_UNINLINABLE (t));
1136 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1137 hstate.add_flag (DECL_IS_NOVOPS (t));
1138 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1139 hstate.add_flag (DECL_IS_MALLOC (t));
1140 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1141 hstate.add_flag (DECL_STATIC_CHAIN (t));
1142 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1143 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1144 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1145 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1146 hstate.add_flag (DECL_PURE_P (t));
1147 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1148 hstate.commit_flag ();
1149 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1150 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1151 }
1152
1153 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1154 {
1155 hstate.add_hwi (TYPE_MODE (t));
1156 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1157 no streaming. */
1158 hstate.add_flag (TYPE_PACKED (t));
1159 hstate.add_flag (TYPE_RESTRICT (t));
1160 hstate.add_flag (TYPE_USER_ALIGN (t));
1161 hstate.add_flag (TYPE_READONLY (t));
1162 if (RECORD_OR_UNION_TYPE_P (t))
1163 {
1164 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1165 hstate.add_flag (TYPE_FINAL_P (t));
1166 hstate.add_flag (TYPE_CXX_ODR_P (t));
1167 }
1168 else if (code == ARRAY_TYPE)
1169 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1170 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1171 hstate.add_flag (TYPE_STRING_FLAG (t));
1172 if (AGGREGATE_TYPE_P (t))
1173 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1174 hstate.commit_flag ();
1175 hstate.add_int (TYPE_PRECISION (t));
1176 hstate.add_int (TYPE_ALIGN (t));
1177 hstate.add_int (TYPE_EMPTY_P (t));
1178 }
1179
1180 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1181 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1182 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1183
1184 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1185 /* We don't stream these when passing things to a different target. */
1186 && !lto_stream_offload_p)
1187 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1188
1189 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1190 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1191
1192 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1193 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1194
1195 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1196 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1197
1198 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1199 {
1200 if (code != IDENTIFIER_NODE)
1201 visit (TREE_TYPE (t));
1202 }
1203
1204 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1205 {
1206 unsigned int count = vector_cst_encoded_nelts (t);
1207 for (unsigned int i = 0; i < count; ++i)
1208 visit (VECTOR_CST_ENCODED_ELT (t, i));
1209 }
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1212 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1213 visit (POLY_INT_CST_COEFF (t, i));
1214
1215 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1216 {
1217 visit (TREE_REALPART (t));
1218 visit (TREE_IMAGPART (t));
1219 }
1220
1221 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1222 {
1223 /* Drop names that were created for anonymous entities. */
1224 if (DECL_NAME (t)
1225 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1226 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1227 ;
1228 else
1229 visit (DECL_NAME (t));
1230 if (DECL_FILE_SCOPE_P (t))
1231 ;
1232 else
1233 visit (DECL_CONTEXT (t));
1234 }
1235
1236 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1237 {
1238 visit (DECL_SIZE (t));
1239 visit (DECL_SIZE_UNIT (t));
1240 visit (DECL_ATTRIBUTES (t));
1241 if ((code == VAR_DECL
1242 || code == PARM_DECL)
1243 && DECL_HAS_VALUE_EXPR_P (t))
1244 visit (DECL_VALUE_EXPR (t));
1245 if (code == VAR_DECL
1246 && DECL_HAS_DEBUG_EXPR_P (t))
1247 visit (DECL_DEBUG_EXPR (t));
1248 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1249 be able to call get_symbol_initial_value. */
1250 }
1251
1252 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1253 {
1254 if (DECL_ASSEMBLER_NAME_SET_P (t))
1255 visit (DECL_ASSEMBLER_NAME (t));
1256 }
1257
1258 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1259 {
1260 visit (DECL_FIELD_OFFSET (t));
1261 visit (DECL_BIT_FIELD_TYPE (t));
1262 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1263 visit (DECL_FIELD_BIT_OFFSET (t));
1264 }
1265
1266 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1267 {
1268 visit (DECL_FUNCTION_PERSONALITY (t));
1269 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1270 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1271 }
1272
1273 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1274 {
1275 visit (TYPE_SIZE (t));
1276 visit (TYPE_SIZE_UNIT (t));
1277 visit (TYPE_ATTRIBUTES (t));
1278 visit (TYPE_NAME (t));
1279 visit (TYPE_MAIN_VARIANT (t));
1280 if (TYPE_FILE_SCOPE_P (t))
1281 ;
1282 else
1283 visit (TYPE_CONTEXT (t));
1284 }
1285
1286 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1287 {
1288 if (code == ENUMERAL_TYPE)
1289 visit (TYPE_VALUES (t));
1290 else if (code == ARRAY_TYPE)
1291 visit (TYPE_DOMAIN (t));
1292 else if (RECORD_OR_UNION_TYPE_P (t))
1293 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1294 visit (f);
1295 else if (code == FUNCTION_TYPE
1296 || code == METHOD_TYPE)
1297 visit (TYPE_ARG_TYPES (t));
1298 if (!POINTER_TYPE_P (t))
1299 visit (TYPE_MIN_VALUE_RAW (t));
1300 visit (TYPE_MAX_VALUE_RAW (t));
1301 }
1302
1303 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1304 {
1305 visit (TREE_PURPOSE (t));
1306 visit (TREE_VALUE (t));
1307 visit (TREE_CHAIN (t));
1308 }
1309
1310 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1311 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1312 visit (TREE_VEC_ELT (t, i));
1313
1314 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1315 {
1316 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1317 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1318 visit (TREE_OPERAND (t, i));
1319 }
1320
1321 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1322 {
1323 unsigned i;
1324 tree b;
1325 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1326 visit (b);
1327 visit (BINFO_OFFSET (t));
1328 visit (BINFO_VTABLE (t));
1329 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1330 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1331 by C++ FE only. */
1332 }
1333
1334 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1335 {
1336 unsigned i;
1337 tree index, value;
1338 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1339 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1340 {
1341 visit (index);
1342 visit (value);
1343 }
1344 }
1345
1346 if (code == OMP_CLAUSE)
1347 {
1348 int i;
1349 HOST_WIDE_INT val;
1350
1351 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1352 switch (OMP_CLAUSE_CODE (t))
1353 {
1354 case OMP_CLAUSE_DEFAULT:
1355 val = OMP_CLAUSE_DEFAULT_KIND (t);
1356 break;
1357 case OMP_CLAUSE_SCHEDULE:
1358 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1359 break;
1360 case OMP_CLAUSE_DEPEND:
1361 val = OMP_CLAUSE_DEPEND_KIND (t);
1362 break;
1363 case OMP_CLAUSE_MAP:
1364 val = OMP_CLAUSE_MAP_KIND (t);
1365 break;
1366 case OMP_CLAUSE_PROC_BIND:
1367 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1368 break;
1369 case OMP_CLAUSE_REDUCTION:
1370 case OMP_CLAUSE_TASK_REDUCTION:
1371 case OMP_CLAUSE_IN_REDUCTION:
1372 val = OMP_CLAUSE_REDUCTION_CODE (t);
1373 break;
1374 default:
1375 val = 0;
1376 break;
1377 }
1378 hstate.add_hwi (val);
1379 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1380 visit (OMP_CLAUSE_OPERAND (t, i));
1381 visit (OMP_CLAUSE_CHAIN (t));
1382 }
1383
1384 return hstate.end ();
1385
1386 #undef visit
1387 }
1388
1389 /* Compare two SCC entries by their hash value for qsorting them. */
1390
1391 int
1392 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1393 {
1394 const scc_entry *p1 = (const scc_entry *) p1_;
1395 const scc_entry *p2 = (const scc_entry *) p2_;
1396 if (p1->hash < p2->hash)
1397 return -1;
1398 else if (p1->hash > p2->hash)
1399 return 1;
1400 return 0;
1401 }
1402
1403 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1404 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1405
1406 hashval_t
1407 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1408 bool ref_p, bool this_ref_p)
1409 {
1410 unsigned int last_classes = 0, iterations = 0;
1411
1412 /* Compute hash values for the SCC members. */
1413 for (unsigned i = 0; i < size; ++i)
1414 sccstack[first+i].hash
1415 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1416
1417 if (size == 1)
1418 return sccstack[first].hash;
1419
1420 /* We aim to get unique hash for every tree within SCC and compute hash value
1421 of the whole SCC by combining all values together in a stable (entry-point
1422 independent) order. This guarantees that the same SCC regions within
1423 different translation units will get the same hash values and therefore
1424 will be merged at WPA time.
1425
1426 Often the hashes are already unique. In that case we compute the SCC hash
1427 by combining individual hash values in an increasing order.
1428
1429 If there are duplicates, we seek at least one tree with unique hash (and
1430 pick one with minimal hash and this property). Then we obtain a stable
1431 order by DFS walk starting from this unique tree and then use the index
1432 within this order to make individual hash values unique.
1433
1434 If there is no tree with unique hash, we iteratively propagate the hash
1435 values across the internal edges of SCC. This usually quickly leads
1436 to unique hashes. Consider, for example, an SCC containing two pointers
1437 that are identical except for the types they point to and assume that
1438 these types are also part of the SCC. The propagation will add the
1439 points-to type information into their hash values. */
1440 do
1441 {
1442 /* Sort the SCC so we can easily check for uniqueness. */
1443 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1444
1445 unsigned int classes = 1;
1446 int firstunique = -1;
1447
1448 /* Find the tree with lowest unique hash (if it exists) and compute
1449 the number of equivalence classes. */
1450 if (sccstack[first].hash != sccstack[first+1].hash)
1451 firstunique = 0;
1452 for (unsigned i = 1; i < size; ++i)
1453 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1454 {
1455 classes++;
1456 if (firstunique == -1
1457 && (i == size - 1
1458 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1459 firstunique = i;
1460 }
1461
1462 /* If we found a tree with unique hash, stop the iteration. */
1463 if (firstunique != -1
1464 /* Also terminate if we run out of iterations or if the number of
1465 equivalence classes is no longer increasing.
1466 For example a cyclic list of trees that are all equivalent will
1467 never have unique entry point; we however do not build such SCCs
1468 in our IL. */
1469 || classes <= last_classes || iterations > 16)
1470 {
1471 hashval_t scc_hash;
1472
1473 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1474 starting from FIRSTUNIQUE to obtain a stable order. */
1475 if (classes != size && firstunique != -1)
1476 {
1477 hash_map <tree, hashval_t> map(size*2);
1478
1479 /* Store hash values into a map, so we can associate them with
1480 the reordered SCC. */
1481 for (unsigned i = 0; i < size; ++i)
1482 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1483
1484 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1485 true);
1486 gcc_assert (again.sccstack.length () == size);
1487
1488 memcpy (sccstack.address () + first,
1489 again.sccstack.address (),
1490 sizeof (scc_entry) * size);
1491
1492 /* Update hash values of individual members by hashing in the
1493 index within the stable order. This ensures uniqueness.
1494 Also compute the SCC hash by mixing in all hash values in
1495 the stable order we obtained. */
1496 sccstack[first].hash = *map.get (sccstack[first].t);
1497 scc_hash = sccstack[first].hash;
1498 for (unsigned i = 1; i < size; ++i)
1499 {
1500 sccstack[first+i].hash
1501 = iterative_hash_hashval_t (i,
1502 *map.get (sccstack[first+i].t));
1503 scc_hash
1504 = iterative_hash_hashval_t (scc_hash,
1505 sccstack[first+i].hash);
1506 }
1507 }
1508 /* If we got a unique hash value for each tree, then sort already
1509 ensured entry-point independent order. Only compute the final
1510 SCC hash.
1511
1512 If we failed to find the unique entry point, we go by the same
1513 route. We will eventually introduce unwanted hash conflicts. */
1514 else
1515 {
1516 scc_hash = sccstack[first].hash;
1517 for (unsigned i = 1; i < size; ++i)
1518 scc_hash
1519 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1520
1521 /* We cannot 100% guarantee that the hash won't conflict so as
1522 to make it impossible to find a unique hash. This however
1523 should be an extremely rare case. ICE for now so possible
1524 issues are found and evaluated. */
1525 gcc_checking_assert (classes == size);
1526 }
1527
1528 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1529 hash into the hash of each element. */
1530 for (unsigned i = 0; i < size; ++i)
1531 sccstack[first+i].hash
1532 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1533 return scc_hash;
1534 }
1535
1536 last_classes = classes;
1537 iterations++;
1538
1539 /* We failed to identify the entry point; propagate hash values across
1540 the edges. */
1541 hash_map <tree, hashval_t> map(size*2);
1542
1543 for (unsigned i = 0; i < size; ++i)
1544 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1545
1546 for (unsigned i = 0; i < size; i++)
1547 sccstack[first+i].hash
1548 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1549 }
1550 while (true);
1551 }
1552
1553 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1554 already in the streamer cache. Main routine called for
1555 each visit of EXPR. */
1556
1557 void
1558 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1559 tree expr, bool ref_p, bool this_ref_p)
1560 {
1561 /* Handle special cases. */
1562 if (expr == NULL_TREE)
1563 return;
1564
1565 /* Do not DFS walk into indexable trees. */
1566 if (this_ref_p && tree_is_indexable (expr))
1567 return;
1568
1569 /* Check if we already streamed EXPR. */
1570 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1571 return;
1572
1573 worklist w;
1574 w.expr = expr;
1575 w.from_state = from_state;
1576 w.cstate = NULL;
1577 w.ref_p = ref_p;
1578 w.this_ref_p = this_ref_p;
1579 worklist_vec.safe_push (w);
1580 }
1581
1582
1583 /* Emit the physical representation of tree node EXPR to output block OB.
1584 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1585 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1586
1587 void
1588 lto_output_tree (struct output_block *ob, tree expr,
1589 bool ref_p, bool this_ref_p)
1590 {
1591 unsigned ix;
1592 bool existed_p;
1593
1594 if (expr == NULL_TREE)
1595 {
1596 streamer_write_record_start (ob, LTO_null);
1597 return;
1598 }
1599
1600 if (this_ref_p && tree_is_indexable (expr))
1601 {
1602 lto_output_tree_ref (ob, expr);
1603 return;
1604 }
1605
1606 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1607 if (existed_p)
1608 {
1609 /* If a node has already been streamed out, make sure that
1610 we don't write it more than once. Otherwise, the reader
1611 will instantiate two different nodes for the same object. */
1612 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1613 streamer_write_uhwi (ob, ix);
1614 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1615 lto_tree_code_to_tag (TREE_CODE (expr)));
1616 lto_stats.num_pickle_refs_output++;
1617 }
1618 else
1619 {
1620 /* This is the first time we see EXPR, write all reachable
1621 trees to OB. */
1622 static bool in_dfs_walk;
1623
1624 /* Protect against recursion which means disconnect between
1625 what tree edges we walk in the DFS walk and what edges
1626 we stream out. */
1627 gcc_assert (!in_dfs_walk);
1628
1629 if (streamer_dump_file)
1630 {
1631 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1632 expr, 4);
1633 fprintf (streamer_dump_file, "\n");
1634 }
1635
1636 /* Start the DFS walk. */
1637 /* Save ob state ... */
1638 /* let's see ... */
1639 in_dfs_walk = true;
1640 DFS (ob, expr, ref_p, this_ref_p, false);
1641 in_dfs_walk = false;
1642
1643 /* Finally append a reference to the tree we were writing.
1644 ??? If expr ended up as a singleton we could have
1645 inlined it here and avoid outputting a reference. */
1646 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1647 gcc_assert (existed_p);
1648 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1649 streamer_write_uhwi (ob, ix);
1650 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1651 lto_tree_code_to_tag (TREE_CODE (expr)));
1652 if (streamer_dump_file)
1653 {
1654 print_node_brief (streamer_dump_file, " Finished SCC of ",
1655 expr, 4);
1656 fprintf (streamer_dump_file, "\n\n");
1657 }
1658 lto_stats.num_pickle_refs_output++;
1659 }
1660 }
1661
1662
1663 /* Output to OB a list of try/catch handlers starting with FIRST. */
1664
1665 static void
1666 output_eh_try_list (struct output_block *ob, eh_catch first)
1667 {
1668 eh_catch n;
1669
1670 for (n = first; n; n = n->next_catch)
1671 {
1672 streamer_write_record_start (ob, LTO_eh_catch);
1673 stream_write_tree (ob, n->type_list, true);
1674 stream_write_tree (ob, n->filter_list, true);
1675 stream_write_tree (ob, n->label, true);
1676 }
1677
1678 streamer_write_record_start (ob, LTO_null);
1679 }
1680
1681
1682 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1683 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1684 detect EH region sharing. */
1685
1686 static void
1687 output_eh_region (struct output_block *ob, eh_region r)
1688 {
1689 enum LTO_tags tag;
1690
1691 if (r == NULL)
1692 {
1693 streamer_write_record_start (ob, LTO_null);
1694 return;
1695 }
1696
1697 if (r->type == ERT_CLEANUP)
1698 tag = LTO_ert_cleanup;
1699 else if (r->type == ERT_TRY)
1700 tag = LTO_ert_try;
1701 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1702 tag = LTO_ert_allowed_exceptions;
1703 else if (r->type == ERT_MUST_NOT_THROW)
1704 tag = LTO_ert_must_not_throw;
1705 else
1706 gcc_unreachable ();
1707
1708 streamer_write_record_start (ob, tag);
1709 streamer_write_hwi (ob, r->index);
1710
1711 if (r->outer)
1712 streamer_write_hwi (ob, r->outer->index);
1713 else
1714 streamer_write_zero (ob);
1715
1716 if (r->inner)
1717 streamer_write_hwi (ob, r->inner->index);
1718 else
1719 streamer_write_zero (ob);
1720
1721 if (r->next_peer)
1722 streamer_write_hwi (ob, r->next_peer->index);
1723 else
1724 streamer_write_zero (ob);
1725
1726 if (r->type == ERT_TRY)
1727 {
1728 output_eh_try_list (ob, r->u.eh_try.first_catch);
1729 }
1730 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1731 {
1732 stream_write_tree (ob, r->u.allowed.type_list, true);
1733 stream_write_tree (ob, r->u.allowed.label, true);
1734 streamer_write_uhwi (ob, r->u.allowed.filter);
1735 }
1736 else if (r->type == ERT_MUST_NOT_THROW)
1737 {
1738 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1739 bitpack_d bp = bitpack_create (ob->main_stream);
1740 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1741 streamer_write_bitpack (&bp);
1742 }
1743
1744 if (r->landing_pads)
1745 streamer_write_hwi (ob, r->landing_pads->index);
1746 else
1747 streamer_write_zero (ob);
1748 }
1749
1750
1751 /* Output landing pad LP to OB. */
1752
1753 static void
1754 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1755 {
1756 if (lp == NULL)
1757 {
1758 streamer_write_record_start (ob, LTO_null);
1759 return;
1760 }
1761
1762 streamer_write_record_start (ob, LTO_eh_landing_pad);
1763 streamer_write_hwi (ob, lp->index);
1764 if (lp->next_lp)
1765 streamer_write_hwi (ob, lp->next_lp->index);
1766 else
1767 streamer_write_zero (ob);
1768
1769 if (lp->region)
1770 streamer_write_hwi (ob, lp->region->index);
1771 else
1772 streamer_write_zero (ob);
1773
1774 stream_write_tree (ob, lp->post_landing_pad, true);
1775 }
1776
1777
1778 /* Output the existing eh_table to OB. */
1779
1780 static void
1781 output_eh_regions (struct output_block *ob, struct function *fn)
1782 {
1783 if (fn->eh && fn->eh->region_tree)
1784 {
1785 unsigned i;
1786 eh_region eh;
1787 eh_landing_pad lp;
1788 tree ttype;
1789
1790 streamer_write_record_start (ob, LTO_eh_table);
1791
1792 /* Emit the index of the root of the EH region tree. */
1793 streamer_write_hwi (ob, fn->eh->region_tree->index);
1794
1795 /* Emit all the EH regions in the region array. */
1796 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1797 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1798 output_eh_region (ob, eh);
1799
1800 /* Emit all landing pads. */
1801 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1802 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1803 output_eh_lp (ob, lp);
1804
1805 /* Emit all the runtime type data. */
1806 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1807 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1808 stream_write_tree (ob, ttype, true);
1809
1810 /* Emit the table of action chains. */
1811 if (targetm.arm_eabi_unwinder)
1812 {
1813 tree t;
1814 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1815 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1816 stream_write_tree (ob, t, true);
1817 }
1818 else
1819 {
1820 uchar c;
1821 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1822 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1823 streamer_write_char_stream (ob->main_stream, c);
1824 }
1825 }
1826
1827 /* The LTO_null either terminates the record or indicates that there
1828 are no eh_records at all. */
1829 streamer_write_record_start (ob, LTO_null);
1830 }
1831
1832
1833 /* Output all of the active ssa names to the ssa_names stream. */
1834
1835 static void
1836 output_ssa_names (struct output_block *ob, struct function *fn)
1837 {
1838 unsigned int i, len;
1839
1840 len = vec_safe_length (SSANAMES (fn));
1841 streamer_write_uhwi (ob, len);
1842
1843 for (i = 1; i < len; i++)
1844 {
1845 tree ptr = (*SSANAMES (fn))[i];
1846
1847 if (ptr == NULL_TREE
1848 || SSA_NAME_IN_FREE_LIST (ptr)
1849 || virtual_operand_p (ptr)
1850 /* Simply skip unreleased SSA names. */
1851 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1852 && (! SSA_NAME_DEF_STMT (ptr)
1853 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1854 continue;
1855
1856 streamer_write_uhwi (ob, i);
1857 streamer_write_char_stream (ob->main_stream,
1858 SSA_NAME_IS_DEFAULT_DEF (ptr));
1859 if (SSA_NAME_VAR (ptr))
1860 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1861 else
1862 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1863 stream_write_tree (ob, TREE_TYPE (ptr), true);
1864 }
1865
1866 streamer_write_zero (ob);
1867 }
1868
1869
1870
1871 /* Output the cfg. */
1872
1873 static void
1874 output_cfg (struct output_block *ob, struct function *fn)
1875 {
1876 struct lto_output_stream *tmp_stream = ob->main_stream;
1877 basic_block bb;
1878
1879 ob->main_stream = ob->cfg_stream;
1880
1881 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1882 profile_status_for_fn (fn));
1883
1884 /* Output the number of the highest basic block. */
1885 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1886
1887 FOR_ALL_BB_FN (bb, fn)
1888 {
1889 edge_iterator ei;
1890 edge e;
1891
1892 streamer_write_hwi (ob, bb->index);
1893
1894 /* Output the successors and the edge flags. */
1895 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1896 FOR_EACH_EDGE (e, ei, bb->succs)
1897 {
1898 streamer_write_uhwi (ob, e->dest->index);
1899 e->probability.stream_out (ob);
1900 streamer_write_uhwi (ob, e->flags);
1901 }
1902 }
1903
1904 streamer_write_hwi (ob, -1);
1905
1906 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1907 while (bb->next_bb)
1908 {
1909 streamer_write_hwi (ob, bb->next_bb->index);
1910 bb = bb->next_bb;
1911 }
1912
1913 streamer_write_hwi (ob, -1);
1914
1915 /* Output the number of loops. */
1916 streamer_write_uhwi (ob, number_of_loops (fn));
1917
1918 /* Output each loop, skipping the tree root which has number zero. */
1919 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1920 {
1921 class loop *loop = get_loop (fn, i);
1922
1923 /* Write the index of the loop header. That's enough to rebuild
1924 the loop tree on the reader side. Stream -1 for an unused
1925 loop entry. */
1926 if (!loop)
1927 {
1928 streamer_write_hwi (ob, -1);
1929 continue;
1930 }
1931 else
1932 streamer_write_hwi (ob, loop->header->index);
1933
1934 /* Write everything copy_loop_info copies. */
1935 streamer_write_enum (ob->main_stream,
1936 loop_estimation, EST_LAST, loop->estimate_state);
1937 streamer_write_hwi (ob, loop->any_upper_bound);
1938 if (loop->any_upper_bound)
1939 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1940 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1941 if (loop->any_likely_upper_bound)
1942 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1943 streamer_write_hwi (ob, loop->any_estimate);
1944 if (loop->any_estimate)
1945 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1946
1947 /* Write OMP SIMD related info. */
1948 streamer_write_hwi (ob, loop->safelen);
1949 streamer_write_hwi (ob, loop->unroll);
1950 streamer_write_hwi (ob, loop->owned_clique);
1951 streamer_write_hwi (ob, loop->dont_vectorize);
1952 streamer_write_hwi (ob, loop->force_vectorize);
1953 stream_write_tree (ob, loop->simduid, true);
1954 }
1955
1956 ob->main_stream = tmp_stream;
1957 }
1958
1959
1960 /* Create the header in the file using OB. If the section type is for
1961 a function, set FN to the decl for that function. */
1962
1963 void
1964 produce_asm (struct output_block *ob, tree fn)
1965 {
1966 enum lto_section_type section_type = ob->section_type;
1967 struct lto_function_header header;
1968 char *section_name;
1969
1970 if (section_type == LTO_section_function_body)
1971 {
1972 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1973 section_name = lto_get_section_name (section_type, name,
1974 symtab_node::get (fn)->order,
1975 NULL);
1976 }
1977 else
1978 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
1979
1980 lto_begin_section (section_name, !flag_wpa);
1981 free (section_name);
1982
1983 /* The entire header is stream computed here. */
1984 memset (&header, 0, sizeof (struct lto_function_header));
1985
1986 if (section_type == LTO_section_function_body)
1987 header.cfg_size = ob->cfg_stream->total_size;
1988 header.main_size = ob->main_stream->total_size;
1989 header.string_size = ob->string_stream->total_size;
1990 lto_write_data (&header, sizeof header);
1991
1992 /* Put all of the gimple and the string table out the asm file as a
1993 block of text. */
1994 if (section_type == LTO_section_function_body)
1995 lto_write_stream (ob->cfg_stream);
1996 lto_write_stream (ob->main_stream);
1997 lto_write_stream (ob->string_stream);
1998
1999 lto_end_section ();
2000 }
2001
2002
2003 /* Output the base body of struct function FN using output block OB. */
2004
2005 static void
2006 output_struct_function_base (struct output_block *ob, struct function *fn)
2007 {
2008 struct bitpack_d bp;
2009 unsigned i;
2010 tree t;
2011
2012 /* Output the static chain and non-local goto save area. */
2013 stream_write_tree (ob, fn->static_chain_decl, true);
2014 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2015
2016 /* Output all the local variables in the function. */
2017 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2018 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2019 stream_write_tree (ob, t, true);
2020
2021 /* Output current IL state of the function. */
2022 streamer_write_uhwi (ob, fn->curr_properties);
2023
2024 /* Write all the attributes for FN. */
2025 bp = bitpack_create (ob->main_stream);
2026 bp_pack_value (&bp, fn->is_thunk, 1);
2027 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2028 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2029 bp_pack_value (&bp, fn->returns_struct, 1);
2030 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2031 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2032 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2033 bp_pack_value (&bp, fn->after_inlining, 1);
2034 bp_pack_value (&bp, fn->stdarg, 1);
2035 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2036 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2037 bp_pack_value (&bp, fn->calls_alloca, 1);
2038 bp_pack_value (&bp, fn->calls_setjmp, 1);
2039 bp_pack_value (&bp, fn->calls_eh_return, 1);
2040 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2041 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2042 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2043 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2044 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2045
2046 /* Output the function start and end loci. */
2047 stream_output_location (ob, &bp, fn->function_start_locus);
2048 stream_output_location (ob, &bp, fn->function_end_locus);
2049
2050 /* Save the instance discriminator if present. */
2051 int *instance_number_p = NULL;
2052 if (decl_to_instance_map)
2053 instance_number_p = decl_to_instance_map->get (fn->decl);
2054 bp_pack_value (&bp, !!instance_number_p, 1);
2055 if (instance_number_p)
2056 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2057
2058 streamer_write_bitpack (&bp);
2059 }
2060
2061
2062 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2063
2064 static void
2065 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2066 {
2067 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2068 if (! BLOCK_SUBBLOCKS (root))
2069 leafs.safe_push (root);
2070 else
2071 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2072 }
2073
2074 /* This performs function body modifications that are needed for streaming
2075 to work. */
2076
2077 void
2078 lto_prepare_function_for_streaming (struct cgraph_node *node)
2079 {
2080 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2081 basic_block bb;
2082
2083 if (number_of_loops (fn))
2084 {
2085 push_cfun (fn);
2086 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2087 loop_optimizer_finalize ();
2088 pop_cfun ();
2089 }
2090 /* We will renumber the statements. The code that does this uses
2091 the same ordering that we use for serializing them so we can use
2092 the same code on the other end and not have to write out the
2093 statement numbers. We do not assign UIDs to PHIs here because
2094 virtual PHIs get re-computed on-the-fly which would make numbers
2095 inconsistent. */
2096 set_gimple_stmt_max_uid (fn, 0);
2097 FOR_ALL_BB_FN (bb, fn)
2098 {
2099 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2100 gsi_next (&gsi))
2101 {
2102 gphi *stmt = gsi.phi ();
2103
2104 /* Virtual PHIs are not going to be streamed. */
2105 if (!virtual_operand_p (gimple_phi_result (stmt)))
2106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2107 }
2108 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2109 gsi_next (&gsi))
2110 {
2111 gimple *stmt = gsi_stmt (gsi);
2112 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2113 }
2114 }
2115 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2116 virtual phis now. */
2117 FOR_ALL_BB_FN (bb, fn)
2118 {
2119 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2120 gsi_next (&gsi))
2121 {
2122 gphi *stmt = gsi.phi ();
2123 if (virtual_operand_p (gimple_phi_result (stmt)))
2124 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2125 }
2126 }
2127
2128 }
2129
2130 /* Output the body of function NODE->DECL. */
2131
2132 static void
2133 output_function (struct cgraph_node *node)
2134 {
2135 tree function;
2136 struct function *fn;
2137 basic_block bb;
2138 struct output_block *ob;
2139
2140 if (streamer_dump_file)
2141 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2142 node->dump_name ());
2143
2144 function = node->decl;
2145 fn = DECL_STRUCT_FUNCTION (function);
2146 ob = create_output_block (LTO_section_function_body);
2147
2148 clear_line_info (ob);
2149 ob->symbol = node;
2150
2151 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2152
2153 /* Make string 0 be a NULL string. */
2154 streamer_write_char_stream (ob->string_stream, 0);
2155
2156 streamer_write_record_start (ob, LTO_function);
2157
2158 /* Output decls for parameters and args. */
2159 stream_write_tree (ob, DECL_RESULT (function), true);
2160 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2161
2162 /* Output debug args if available. */
2163 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2164 if (! debugargs)
2165 streamer_write_uhwi (ob, 0);
2166 else
2167 {
2168 streamer_write_uhwi (ob, (*debugargs)->length ());
2169 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2170 stream_write_tree (ob, (**debugargs)[i], true);
2171 }
2172
2173 /* Output DECL_INITIAL for the function, which contains the tree of
2174 lexical scopes. */
2175 stream_write_tree (ob, DECL_INITIAL (function), true);
2176 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2177 collect block tree leafs and stream those. */
2178 auto_vec<tree> block_tree_leafs;
2179 if (DECL_INITIAL (function))
2180 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2181 streamer_write_uhwi (ob, block_tree_leafs.length ());
2182 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2183 stream_write_tree (ob, block_tree_leafs[i], true);
2184
2185 /* We also stream abstract functions where we stream only stuff needed for
2186 debug info. */
2187 if (gimple_has_body_p (function))
2188 {
2189 streamer_write_uhwi (ob, 1);
2190 output_struct_function_base (ob, fn);
2191
2192 /* Output all the SSA names used in the function. */
2193 output_ssa_names (ob, fn);
2194
2195 /* Output any exception handling regions. */
2196 output_eh_regions (ob, fn);
2197
2198 /* Output the code for the function. */
2199 FOR_ALL_BB_FN (bb, fn)
2200 output_bb (ob, bb, fn);
2201
2202 /* The terminator for this function. */
2203 streamer_write_record_start (ob, LTO_null);
2204
2205 output_cfg (ob, fn);
2206 }
2207 else
2208 streamer_write_uhwi (ob, 0);
2209
2210 /* Create a section to hold the pickled output of this function. */
2211 produce_asm (ob, function);
2212
2213 destroy_output_block (ob);
2214 if (streamer_dump_file)
2215 fprintf (streamer_dump_file, "Finished streaming %s\n",
2216 node->dump_name ());
2217 }
2218
2219 /* Output the body of function NODE->DECL. */
2220
2221 static void
2222 output_constructor (struct varpool_node *node)
2223 {
2224 tree var = node->decl;
2225 struct output_block *ob;
2226
2227 if (streamer_dump_file)
2228 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2229 node->dump_name ());
2230
2231 timevar_push (TV_IPA_LTO_CTORS_OUT);
2232 ob = create_output_block (LTO_section_function_body);
2233
2234 clear_line_info (ob);
2235 ob->symbol = node;
2236
2237 /* Make string 0 be a NULL string. */
2238 streamer_write_char_stream (ob->string_stream, 0);
2239
2240 /* Output DECL_INITIAL for the function, which contains the tree of
2241 lexical scopes. */
2242 stream_write_tree (ob, DECL_INITIAL (var), true);
2243
2244 /* Create a section to hold the pickled output of this function. */
2245 produce_asm (ob, var);
2246
2247 destroy_output_block (ob);
2248 if (streamer_dump_file)
2249 fprintf (streamer_dump_file, "Finished streaming %s\n",
2250 node->dump_name ());
2251 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2252 }
2253
2254
2255 /* Emit toplevel asms. */
2256
2257 void
2258 lto_output_toplevel_asms (void)
2259 {
2260 struct output_block *ob;
2261 struct asm_node *can;
2262 char *section_name;
2263 struct lto_simple_header_with_strings header;
2264
2265 if (!symtab->first_asm_symbol ())
2266 return;
2267
2268 ob = create_output_block (LTO_section_asm);
2269
2270 /* Make string 0 be a NULL string. */
2271 streamer_write_char_stream (ob->string_stream, 0);
2272
2273 for (can = symtab->first_asm_symbol (); can; can = can->next)
2274 {
2275 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2276 streamer_write_hwi (ob, can->order);
2277 }
2278
2279 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2280
2281 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2282 lto_begin_section (section_name, !flag_wpa);
2283 free (section_name);
2284
2285 /* The entire header stream is computed here. */
2286 memset (&header, 0, sizeof (header));
2287
2288 header.main_size = ob->main_stream->total_size;
2289 header.string_size = ob->string_stream->total_size;
2290 lto_write_data (&header, sizeof header);
2291
2292 /* Put all of the gimple and the string table out the asm file as a
2293 block of text. */
2294 lto_write_stream (ob->main_stream);
2295 lto_write_stream (ob->string_stream);
2296
2297 lto_end_section ();
2298
2299 destroy_output_block (ob);
2300 }
2301
2302
2303 /* Copy the function body or variable constructor of NODE without deserializing. */
2304
2305 static void
2306 copy_function_or_variable (struct symtab_node *node)
2307 {
2308 tree function = node->decl;
2309 struct lto_file_decl_data *file_data = node->lto_file_data;
2310 const char *data;
2311 size_t len;
2312 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2313 char *section_name =
2314 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2315 size_t i, j;
2316 struct lto_in_decl_state *in_state;
2317 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2318
2319 if (streamer_dump_file)
2320 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2321 lto_begin_section (section_name, false);
2322 free (section_name);
2323
2324 /* We may have renamed the declaration, e.g., a static function. */
2325 name = lto_get_decl_name_mapping (file_data, name);
2326
2327 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2328 name, node->order - file_data->order_base,
2329 &len);
2330 gcc_assert (data);
2331
2332 /* Do a bit copy of the function body. */
2333 lto_write_raw_data (data, len);
2334
2335 /* Copy decls. */
2336 in_state =
2337 lto_get_function_in_decl_state (node->lto_file_data, function);
2338 out_state->compressed = in_state->compressed;
2339 gcc_assert (in_state);
2340
2341 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2342 {
2343 size_t n = vec_safe_length (in_state->streams[i]);
2344 vec<tree, va_gc> *trees = in_state->streams[i];
2345 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2346
2347 /* The out state must have the same indices and the in state.
2348 So just copy the vector. All the encoders in the in state
2349 must be empty where we reach here. */
2350 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2351 encoder->trees.reserve_exact (n);
2352 for (j = 0; j < n; j++)
2353 encoder->trees.safe_push ((*trees)[j]);
2354 }
2355
2356 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2357 data, len);
2358 lto_end_section ();
2359 }
2360
2361 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2362
2363 static tree
2364 wrap_refs (tree *tp, int *ws, void *)
2365 {
2366 tree t = *tp;
2367 if (handled_component_p (t)
2368 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2369 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2370 {
2371 tree decl = TREE_OPERAND (t, 0);
2372 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2373 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2374 build1 (ADDR_EXPR, ptrtype, decl),
2375 build_int_cst (ptrtype, 0));
2376 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2377 *ws = 0;
2378 }
2379 else if (TREE_CODE (t) == CONSTRUCTOR)
2380 ;
2381 else if (!EXPR_P (t))
2382 *ws = 0;
2383 return NULL_TREE;
2384 }
2385
2386 /* Remove functions that are no longer used from offload_funcs, and mark the
2387 remaining ones with DECL_PRESERVE_P. */
2388
2389 static void
2390 prune_offload_funcs (void)
2391 {
2392 if (!offload_funcs)
2393 return;
2394
2395 unsigned ix, ix2;
2396 tree *elem_ptr;
2397 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2398 cgraph_node::get (*elem_ptr) == NULL);
2399
2400 tree fn_decl;
2401 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2402 DECL_PRESERVE_P (fn_decl) = 1;
2403 }
2404
2405 /* Produce LTO section that contains global information
2406 about LTO bytecode. */
2407
2408 static void
2409 produce_lto_section ()
2410 {
2411 /* Stream LTO meta section. */
2412 output_block *ob = create_output_block (LTO_section_lto);
2413
2414 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2415 lto_begin_section (section_name, false);
2416 free (section_name);
2417
2418 #ifdef HAVE_ZSTD_H
2419 lto_compression compression = ZSTD;
2420 #else
2421 lto_compression compression = ZLIB;
2422 #endif
2423
2424 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2425 lto_section s
2426 = { LTO_major_version, LTO_minor_version, slim_object, 0 };
2427 s.set_compression (compression);
2428 lto_write_data (&s, sizeof s);
2429 lto_end_section ();
2430 destroy_output_block (ob);
2431 }
2432
2433 /* Compare symbols to get them sorted by filename (to optimize streaming) */
2434
2435 static int
2436 cmp_symbol_files (const void *pn1, const void *pn2)
2437 {
2438 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2439 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2440
2441 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2442 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2443
2444 /* Order files same way as they appeared in the command line to reduce
2445 seeking while copying sections. */
2446 if (file_order1 != file_order2)
2447 return file_order1 - file_order2;
2448
2449 /* Order within static library. */
2450 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2451 {
2452 if (n1->lto_file_data->id > n2->lto_file_data->id)
2453 return 1;
2454 if (n1->lto_file_data->id < n2->lto_file_data->id)
2455 return -1;
2456 }
2457
2458 /* And finaly order by the definition order. */
2459 return n1->order - n2->order;
2460 }
2461
2462 /* Main entry point from the pass manager. */
2463
2464 void
2465 lto_output (void)
2466 {
2467 struct lto_out_decl_state *decl_state;
2468 bitmap output = NULL;
2469 bitmap_obstack output_obstack;
2470 unsigned int i, n_nodes;
2471 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2472 auto_vec<symtab_node *> symbols_to_copy;
2473
2474 prune_offload_funcs ();
2475
2476 if (flag_checking)
2477 {
2478 bitmap_obstack_initialize (&output_obstack);
2479 output = BITMAP_ALLOC (&output_obstack);
2480 }
2481
2482 /* Initialize the streamer. */
2483 lto_streamer_init ();
2484
2485 produce_lto_section ();
2486
2487 n_nodes = lto_symtab_encoder_size (encoder);
2488 /* Prepare vector of functions to output and then sort it to optimize
2489 section copying. */
2490 for (i = 0; i < n_nodes; i++)
2491 {
2492 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2493 if (snode->alias)
2494 continue;
2495 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2496 {
2497 if (lto_symtab_encoder_encode_body_p (encoder, node))
2498 symbols_to_copy.safe_push (node);
2499 }
2500 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2501 {
2502 /* Wrap symbol references inside the ctor in a type
2503 preserving MEM_REF. */
2504 tree ctor = DECL_INITIAL (node->decl);
2505 if (ctor && !in_lto_p)
2506 walk_tree (&ctor, wrap_refs, NULL, NULL);
2507 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2508 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2509 symbols_to_copy.safe_push (node);
2510 }
2511 }
2512 symbols_to_copy.qsort (cmp_symbol_files);
2513 for (i = 0; i < symbols_to_copy.length (); i++)
2514 {
2515 symtab_node *snode = symbols_to_copy[i];
2516 cgraph_node *cnode;
2517 varpool_node *vnode;
2518
2519 if (flag_checking)
2520 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2521
2522 decl_state = lto_new_out_decl_state ();
2523 lto_push_out_decl_state (decl_state);
2524
2525 if ((cnode = dyn_cast <cgraph_node *> (snode))
2526 && (gimple_has_body_p (cnode->decl)
2527 || (!flag_wpa
2528 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2529 /* Thunks have no body but they may be synthetized
2530 at WPA time. */
2531 || DECL_ARGUMENTS (cnode->decl)))
2532 output_function (cnode);
2533 else if ((vnode = dyn_cast <varpool_node *> (snode))
2534 && (DECL_INITIAL (vnode->decl) != error_mark_node
2535 || (!flag_wpa
2536 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2537 output_constructor (vnode);
2538 else
2539 copy_function_or_variable (snode);
2540 gcc_assert (lto_get_out_decl_state () == decl_state);
2541 lto_pop_out_decl_state ();
2542 lto_record_function_out_decl_state (snode->decl, decl_state);
2543 }
2544
2545 /* Emit the callgraph after emitting function bodies. This needs to
2546 be done now to make sure that all the statements in every function
2547 have been renumbered so that edges can be associated with call
2548 statements using the statement UIDs. */
2549 output_symtab ();
2550
2551 output_offload_tables ();
2552
2553 if (flag_checking)
2554 {
2555 BITMAP_FREE (output);
2556 bitmap_obstack_release (&output_obstack);
2557 }
2558 }
2559
2560 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2561 from it and required for correct representation of its semantics.
2562 Each node in ENCODER must be a global declaration or a type. A node
2563 is written only once, even if it appears multiple times in the
2564 vector. Certain transitively-reachable nodes, such as those
2565 representing expressions, may be duplicated, but such nodes
2566 must not appear in ENCODER itself. */
2567
2568 static void
2569 write_global_stream (struct output_block *ob,
2570 struct lto_tree_ref_encoder *encoder)
2571 {
2572 tree t;
2573 size_t index;
2574 const size_t size = lto_tree_ref_encoder_size (encoder);
2575
2576 for (index = 0; index < size; index++)
2577 {
2578 t = lto_tree_ref_encoder_get_tree (encoder, index);
2579 if (streamer_dump_file)
2580 {
2581 fprintf (streamer_dump_file, " %i:", (int)index);
2582 print_node_brief (streamer_dump_file, "", t, 4);
2583 fprintf (streamer_dump_file, "\n");
2584 }
2585 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2586 stream_write_tree (ob, t, false);
2587 }
2588 }
2589
2590
2591 /* Write a sequence of indices into the globals vector corresponding
2592 to the trees in ENCODER. These are used by the reader to map the
2593 indices used to refer to global entities within function bodies to
2594 their referents. */
2595
2596 static void
2597 write_global_references (struct output_block *ob,
2598 struct lto_tree_ref_encoder *encoder)
2599 {
2600 tree t;
2601 uint32_t index;
2602 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2603
2604 /* Write size and slot indexes as 32-bit unsigned numbers. */
2605 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2606 data[0] = size;
2607
2608 for (index = 0; index < size; index++)
2609 {
2610 unsigned slot_num;
2611
2612 t = lto_tree_ref_encoder_get_tree (encoder, index);
2613 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2614 gcc_assert (slot_num != (unsigned)-1);
2615 data[index + 1] = slot_num;
2616 }
2617
2618 lto_write_data (data, sizeof (int32_t) * (size + 1));
2619 free (data);
2620 }
2621
2622
2623 /* Write all the streams in an lto_out_decl_state STATE using
2624 output block OB and output stream OUT_STREAM. */
2625
2626 void
2627 lto_output_decl_state_streams (struct output_block *ob,
2628 struct lto_out_decl_state *state)
2629 {
2630 int i;
2631
2632 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2633 write_global_stream (ob, &state->streams[i]);
2634 }
2635
2636
2637 /* Write all the references in an lto_out_decl_state STATE using
2638 output block OB and output stream OUT_STREAM. */
2639
2640 void
2641 lto_output_decl_state_refs (struct output_block *ob,
2642 struct lto_out_decl_state *state)
2643 {
2644 unsigned i;
2645 unsigned ref;
2646 tree decl;
2647
2648 /* Write reference to FUNCTION_DECL. If there is not function,
2649 write reference to void_type_node. */
2650 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2651 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2652 gcc_assert (ref != (unsigned)-1);
2653 ref = ref * 2 + (state->compressed ? 1 : 0);
2654 lto_write_data (&ref, sizeof (uint32_t));
2655
2656 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2657 write_global_references (ob, &state->streams[i]);
2658 }
2659
2660
2661 /* Return the written size of STATE. */
2662
2663 static size_t
2664 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2665 {
2666 int i;
2667 size_t size;
2668
2669 size = sizeof (int32_t); /* fn_ref. */
2670 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2671 {
2672 size += sizeof (int32_t); /* vector size. */
2673 size += (lto_tree_ref_encoder_size (&state->streams[i])
2674 * sizeof (int32_t));
2675 }
2676 return size;
2677 }
2678
2679
2680 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2681 so far. */
2682
2683 static void
2684 write_symbol (struct streamer_tree_cache_d *cache,
2685 tree t, hash_set<const char *> *seen, bool alias)
2686 {
2687 const char *name;
2688 enum gcc_plugin_symbol_kind kind;
2689 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2690 unsigned slot_num;
2691 uint64_t size;
2692 const char *comdat;
2693 unsigned char c;
2694
2695 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2696
2697 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2698
2699 /* This behaves like assemble_name_raw in varasm.c, performing the
2700 same name manipulations that ASM_OUTPUT_LABELREF does. */
2701 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2702
2703 if (seen->add (name))
2704 return;
2705
2706 streamer_tree_cache_lookup (cache, t, &slot_num);
2707 gcc_assert (slot_num != (unsigned)-1);
2708
2709 if (DECL_EXTERNAL (t))
2710 {
2711 if (DECL_WEAK (t))
2712 kind = GCCPK_WEAKUNDEF;
2713 else
2714 kind = GCCPK_UNDEF;
2715 }
2716 else
2717 {
2718 if (DECL_WEAK (t))
2719 kind = GCCPK_WEAKDEF;
2720 else if (DECL_COMMON (t))
2721 kind = GCCPK_COMMON;
2722 else
2723 kind = GCCPK_DEF;
2724
2725 /* When something is defined, it should have node attached. */
2726 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2727 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2728 || (cgraph_node::get (t)
2729 && cgraph_node::get (t)->definition));
2730 }
2731
2732 /* Imitate what default_elf_asm_output_external do.
2733 When symbol is external, we need to output it with DEFAULT visibility
2734 when compiling with -fvisibility=default, while with HIDDEN visibility
2735 when symbol has attribute (visibility("hidden")) specified.
2736 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2737 right. */
2738
2739 if (DECL_EXTERNAL (t)
2740 && !targetm.binds_local_p (t))
2741 visibility = GCCPV_DEFAULT;
2742 else
2743 switch (DECL_VISIBILITY (t))
2744 {
2745 case VISIBILITY_DEFAULT:
2746 visibility = GCCPV_DEFAULT;
2747 break;
2748 case VISIBILITY_PROTECTED:
2749 visibility = GCCPV_PROTECTED;
2750 break;
2751 case VISIBILITY_HIDDEN:
2752 visibility = GCCPV_HIDDEN;
2753 break;
2754 case VISIBILITY_INTERNAL:
2755 visibility = GCCPV_INTERNAL;
2756 break;
2757 }
2758
2759 if (kind == GCCPK_COMMON
2760 && DECL_SIZE_UNIT (t)
2761 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2762 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2763 else
2764 size = 0;
2765
2766 if (DECL_ONE_ONLY (t))
2767 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2768 else
2769 comdat = "";
2770
2771 lto_write_data (name, strlen (name) + 1);
2772 lto_write_data (comdat, strlen (comdat) + 1);
2773 c = (unsigned char) kind;
2774 lto_write_data (&c, 1);
2775 c = (unsigned char) visibility;
2776 lto_write_data (&c, 1);
2777 lto_write_data (&size, 8);
2778 lto_write_data (&slot_num, 4);
2779 }
2780
2781 /* Write extension information for symbols (symbol type, section flags). */
2782
2783 static void
2784 write_symbol_extension_info (tree t)
2785 {
2786 unsigned char c;
2787 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
2788 ? GCCST_VARIABLE : GCCST_FUNCTION);
2789 lto_write_data (&c, 1);
2790 unsigned char section_kind = 0;
2791 if (TREE_CODE (t) == VAR_DECL)
2792 {
2793 section *s = get_variable_section (t, false);
2794 if (s->common.flags & SECTION_BSS)
2795 section_kind |= GCCSSK_BSS;
2796 }
2797 lto_write_data (&section_kind, 1);
2798 }
2799
2800 /* Write an IL symbol table to OB.
2801 SET and VSET are cgraph/varpool node sets we are outputting. */
2802
2803 static unsigned int
2804 produce_symtab (struct output_block *ob)
2805 {
2806 unsigned int streamed_symbols = 0;
2807 struct streamer_tree_cache_d *cache = ob->writer_cache;
2808 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
2809 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2810 lto_symtab_encoder_iterator lsei;
2811
2812 lto_begin_section (section_name, false);
2813 free (section_name);
2814
2815 hash_set<const char *> seen;
2816
2817 /* Write the symbol table.
2818 First write everything defined and then all declarations.
2819 This is necessary to handle cases where we have duplicated symbols. */
2820 for (lsei = lsei_start (encoder);
2821 !lsei_end_p (lsei); lsei_next (&lsei))
2822 {
2823 symtab_node *node = lsei_node (lsei);
2824
2825 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2826 continue;
2827 write_symbol (cache, node->decl, &seen, false);
2828 ++streamed_symbols;
2829 }
2830 for (lsei = lsei_start (encoder);
2831 !lsei_end_p (lsei); lsei_next (&lsei))
2832 {
2833 symtab_node *node = lsei_node (lsei);
2834
2835 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2836 continue;
2837 write_symbol (cache, node->decl, &seen, false);
2838 ++streamed_symbols;
2839 }
2840
2841 lto_end_section ();
2842
2843 return streamed_symbols;
2844 }
2845
2846 /* Symtab extension version. */
2847 #define LTO_SYMTAB_EXTENSION_VERSION 1
2848
2849 /* Write an IL symbol table extension to OB.
2850 SET and VSET are cgraph/varpool node sets we are outputting. */
2851
2852 static void
2853 produce_symtab_extension (struct output_block *ob,
2854 unsigned int previous_streamed_symbols)
2855 {
2856 unsigned int streamed_symbols = 0;
2857 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
2858 NULL, 0, NULL);
2859 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2860 lto_symtab_encoder_iterator lsei;
2861
2862 lto_begin_section (section_name, false);
2863 free (section_name);
2864
2865 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
2866 lto_write_data (&version, 1);
2867
2868 /* Write the symbol table.
2869 First write everything defined and then all declarations.
2870 This is necessary to handle cases where we have duplicated symbols. */
2871 for (lsei = lsei_start (encoder);
2872 !lsei_end_p (lsei); lsei_next (&lsei))
2873 {
2874 symtab_node *node = lsei_node (lsei);
2875
2876 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2877 continue;
2878 write_symbol_extension_info (node->decl);
2879 ++streamed_symbols;
2880 }
2881 for (lsei = lsei_start (encoder);
2882 !lsei_end_p (lsei); lsei_next (&lsei))
2883 {
2884 symtab_node *node = lsei_node (lsei);
2885
2886 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2887 continue;
2888 write_symbol_extension_info (node->decl);
2889 ++streamed_symbols;
2890 }
2891
2892 gcc_assert (previous_streamed_symbols == streamed_symbols);
2893 lto_end_section ();
2894 }
2895
2896
2897 /* Init the streamer_mode_table for output, where we collect info on what
2898 machine_mode values have been streamed. */
2899 void
2900 lto_output_init_mode_table (void)
2901 {
2902 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2903 }
2904
2905
2906 /* Write the mode table. */
2907 static void
2908 lto_write_mode_table (void)
2909 {
2910 struct output_block *ob;
2911 ob = create_output_block (LTO_section_mode_table);
2912 bitpack_d bp = bitpack_create (ob->main_stream);
2913
2914 /* Ensure that for GET_MODE_INNER (m) != m we have
2915 also the inner mode marked. */
2916 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2917 if (streamer_mode_table[i])
2918 {
2919 machine_mode m = (machine_mode) i;
2920 machine_mode inner_m = GET_MODE_INNER (m);
2921 if (inner_m != m)
2922 streamer_mode_table[(int) inner_m] = 1;
2923 }
2924 /* First stream modes that have GET_MODE_INNER (m) == m,
2925 so that we can refer to them afterwards. */
2926 for (int pass = 0; pass < 2; pass++)
2927 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2928 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2929 {
2930 machine_mode m = (machine_mode) i;
2931 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2932 continue;
2933 bp_pack_value (&bp, m, 8);
2934 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2935 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2936 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2937 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2938 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2939 switch (GET_MODE_CLASS (m))
2940 {
2941 case MODE_FRACT:
2942 case MODE_UFRACT:
2943 case MODE_ACCUM:
2944 case MODE_UACCUM:
2945 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2946 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2947 break;
2948 case MODE_FLOAT:
2949 case MODE_DECIMAL_FLOAT:
2950 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2951 break;
2952 default:
2953 break;
2954 }
2955 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2956 }
2957 bp_pack_value (&bp, VOIDmode, 8);
2958
2959 streamer_write_bitpack (&bp);
2960
2961 char *section_name
2962 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
2963 lto_begin_section (section_name, !flag_wpa);
2964 free (section_name);
2965
2966 /* The entire header stream is computed here. */
2967 struct lto_simple_header_with_strings header;
2968 memset (&header, 0, sizeof (header));
2969
2970 header.main_size = ob->main_stream->total_size;
2971 header.string_size = ob->string_stream->total_size;
2972 lto_write_data (&header, sizeof header);
2973
2974 /* Put all of the gimple and the string table out the asm file as a
2975 block of text. */
2976 lto_write_stream (ob->main_stream);
2977 lto_write_stream (ob->string_stream);
2978
2979 lto_end_section ();
2980 destroy_output_block (ob);
2981 }
2982
2983
2984 /* This pass is run after all of the functions are serialized and all
2985 of the IPA passes have written their serialized forms. This pass
2986 causes the vector of all of the global decls and types used from
2987 this file to be written in to a section that can then be read in to
2988 recover these on other side. */
2989
2990 void
2991 produce_asm_for_decls (void)
2992 {
2993 struct lto_out_decl_state *out_state;
2994 struct lto_out_decl_state *fn_out_state;
2995 struct lto_decl_header header;
2996 char *section_name;
2997 struct output_block *ob;
2998 unsigned idx, num_fns;
2999 size_t decl_state_size;
3000 int32_t num_decl_states;
3001
3002 ob = create_output_block (LTO_section_decls);
3003
3004 memset (&header, 0, sizeof (struct lto_decl_header));
3005
3006 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3007 lto_begin_section (section_name, !flag_wpa);
3008 free (section_name);
3009
3010 /* Make string 0 be a NULL string. */
3011 streamer_write_char_stream (ob->string_stream, 0);
3012
3013 gcc_assert (!alias_pairs);
3014
3015 /* Get rid of the global decl state hash tables to save some memory. */
3016 out_state = lto_get_out_decl_state ();
3017 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3018 if (out_state->streams[i].tree_hash_table)
3019 {
3020 delete out_state->streams[i].tree_hash_table;
3021 out_state->streams[i].tree_hash_table = NULL;
3022 }
3023
3024 /* Write the global symbols. */
3025 if (streamer_dump_file)
3026 fprintf (streamer_dump_file, "Outputting global stream\n");
3027 lto_output_decl_state_streams (ob, out_state);
3028 num_fns = lto_function_decl_states.length ();
3029 for (idx = 0; idx < num_fns; idx++)
3030 {
3031 fn_out_state =
3032 lto_function_decl_states[idx];
3033 if (streamer_dump_file)
3034 fprintf (streamer_dump_file, "Outputting stream for %s\n",
3035 IDENTIFIER_POINTER
3036 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3037 lto_output_decl_state_streams (ob, fn_out_state);
3038 }
3039
3040 /* Currently not used. This field would allow us to preallocate
3041 the globals vector, so that it need not be resized as it is extended. */
3042 header.num_nodes = -1;
3043
3044 /* Compute the total size of all decl out states. */
3045 decl_state_size = sizeof (int32_t);
3046 decl_state_size += lto_out_decl_state_written_size (out_state);
3047 for (idx = 0; idx < num_fns; idx++)
3048 {
3049 fn_out_state =
3050 lto_function_decl_states[idx];
3051 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3052 }
3053 header.decl_state_size = decl_state_size;
3054
3055 header.main_size = ob->main_stream->total_size;
3056 header.string_size = ob->string_stream->total_size;
3057
3058 lto_write_data (&header, sizeof header);
3059
3060 /* Write the main out-decl state, followed by out-decl states of
3061 functions. */
3062 num_decl_states = num_fns + 1;
3063 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3064 lto_output_decl_state_refs (ob, out_state);
3065 for (idx = 0; idx < num_fns; idx++)
3066 {
3067 fn_out_state = lto_function_decl_states[idx];
3068 lto_output_decl_state_refs (ob, fn_out_state);
3069 }
3070
3071 lto_write_stream (ob->main_stream);
3072 lto_write_stream (ob->string_stream);
3073
3074 lto_end_section ();
3075
3076 /* Write the symbol table. It is used by linker to determine dependencies
3077 and thus we can skip it for WPA. */
3078 if (!flag_wpa)
3079 {
3080 unsigned int streamed_symbols = produce_symtab (ob);
3081 produce_symtab_extension (ob, streamed_symbols);
3082 }
3083
3084 /* Write command line opts. */
3085 lto_write_options ();
3086
3087 /* Deallocate memory and clean up. */
3088 for (idx = 0; idx < num_fns; idx++)
3089 {
3090 fn_out_state =
3091 lto_function_decl_states[idx];
3092 lto_delete_out_decl_state (fn_out_state);
3093 }
3094 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3095 lto_function_decl_states.release ();
3096 destroy_output_block (ob);
3097 if (lto_stream_offload_p)
3098 lto_write_mode_table ();
3099 }