re PR c++/47202 (Endless recursion during mangling)
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 static void lto_write_tree (struct output_block*, tree, bool);
58
59 /* Clear the line info stored in DATA_IN. */
60
61 static void
62 clear_line_info (struct output_block *ob)
63 {
64 ob->current_file = NULL;
65 ob->current_line = 0;
66 ob->current_col = 0;
67 }
68
69
70 /* Create the output block and return it. SECTION_TYPE is
71 LTO_section_function_body or LTO_static_initializer. */
72
73 struct output_block *
74 create_output_block (enum lto_section_type section_type)
75 {
76 struct output_block *ob = XCNEW (struct output_block);
77
78 ob->section_type = section_type;
79 ob->decl_state = lto_get_out_decl_state ();
80 ob->main_stream = XCNEW (struct lto_output_stream);
81 ob->string_stream = XCNEW (struct lto_output_stream);
82 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
83
84 if (section_type == LTO_section_function_body)
85 ob->cfg_stream = XCNEW (struct lto_output_stream);
86
87 clear_line_info (ob);
88
89 ob->string_hash_table.create (37);
90 gcc_obstack_init (&ob->obstack);
91
92 return ob;
93 }
94
95
96 /* Destroy the output block OB. */
97
98 void
99 destroy_output_block (struct output_block *ob)
100 {
101 enum lto_section_type section_type = ob->section_type;
102
103 ob->string_hash_table.dispose ();
104
105 free (ob->main_stream);
106 free (ob->string_stream);
107 if (section_type == LTO_section_function_body)
108 free (ob->cfg_stream);
109
110 streamer_tree_cache_delete (ob->writer_cache);
111 obstack_free (&ob->obstack, NULL);
112
113 free (ob);
114 }
115
116
117 /* Look up NODE in the type table and write the index for it to OB. */
118
119 static void
120 output_type_ref (struct output_block *ob, tree node)
121 {
122 streamer_write_record_start (ob, LTO_type_ref);
123 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
124 }
125
126
127 /* Return true if tree node T is written to various tables. For these
128 nodes, we sometimes want to write their phyiscal representation
129 (via lto_output_tree), and sometimes we need to emit an index
130 reference into a table (via lto_output_tree_ref). */
131
132 static bool
133 tree_is_indexable (tree t)
134 {
135 /* Parameters and return values of functions of variably modified types
136 must go to global stream, because they may be used in the type
137 definition. */
138 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
139 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
140 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
141 || TREE_CODE (t) == TYPE_DECL
142 || TREE_CODE (t) == CONST_DECL
143 || TREE_CODE (t) == NAMELIST_DECL)
144 && decl_function_context (t))
145 return false;
146 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
147 return false;
148 /* Variably modified types need to be streamed alongside function
149 bodies because they can refer to local entities. Together with
150 them we have to localize their members as well.
151 ??? In theory that includes non-FIELD_DECLs as well. */
152 else if (TYPE_P (t)
153 && variably_modified_type_p (t, NULL_TREE))
154 return false;
155 else if (TREE_CODE (t) == FIELD_DECL
156 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
157 return false;
158 else
159 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
160 }
161
162
163 /* Output info about new location into bitpack BP.
164 After outputting bitpack, lto_output_location_data has
165 to be done to output actual data. */
166
167 void
168 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
169 location_t loc)
170 {
171 expanded_location xloc;
172
173 loc = LOCATION_LOCUS (loc);
174 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
175 if (loc == UNKNOWN_LOCATION)
176 return;
177
178 xloc = expand_location (loc);
179
180 bp_pack_value (bp, ob->current_file != xloc.file, 1);
181 bp_pack_value (bp, ob->current_line != xloc.line, 1);
182 bp_pack_value (bp, ob->current_col != xloc.column, 1);
183
184 if (ob->current_file != xloc.file)
185 bp_pack_var_len_unsigned (bp,
186 streamer_string_index (ob, xloc.file,
187 strlen (xloc.file) + 1,
188 true));
189 ob->current_file = xloc.file;
190
191 if (ob->current_line != xloc.line)
192 bp_pack_var_len_unsigned (bp, xloc.line);
193 ob->current_line = xloc.line;
194
195 if (ob->current_col != xloc.column)
196 bp_pack_var_len_unsigned (bp, xloc.column);
197 ob->current_col = xloc.column;
198 }
199
200
201 /* If EXPR is an indexable tree node, output a reference to it to
202 output block OB. Otherwise, output the physical representation of
203 EXPR to OB. */
204
205 static void
206 lto_output_tree_ref (struct output_block *ob, tree expr)
207 {
208 enum tree_code code;
209
210 if (TYPE_P (expr))
211 {
212 output_type_ref (ob, expr);
213 return;
214 }
215
216 code = TREE_CODE (expr);
217 switch (code)
218 {
219 case SSA_NAME:
220 streamer_write_record_start (ob, LTO_ssa_name_ref);
221 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
222 break;
223
224 case FIELD_DECL:
225 streamer_write_record_start (ob, LTO_field_decl_ref);
226 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
227 break;
228
229 case FUNCTION_DECL:
230 streamer_write_record_start (ob, LTO_function_decl_ref);
231 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
232 break;
233
234 case VAR_DECL:
235 case DEBUG_EXPR_DECL:
236 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
287 }
288 }
289
290
291 /* Return true if EXPR is a tree node that can be written to disk. */
292
293 static inline bool
294 lto_is_streamable (tree expr)
295 {
296 enum tree_code code = TREE_CODE (expr);
297
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
313 }
314
315
316 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
317
318 static tree
319 get_symbol_initial_value (struct output_block *ob, tree expr)
320 {
321 gcc_checking_assert (DECL_P (expr)
322 && TREE_CODE (expr) != FUNCTION_DECL
323 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
324
325 /* Handle DECL_INITIAL for symbols. */
326 tree initial = DECL_INITIAL (expr);
327 if (TREE_CODE (expr) == VAR_DECL
328 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
329 && !DECL_IN_CONSTANT_POOL (expr)
330 && initial)
331 {
332 lto_symtab_encoder_t encoder;
333 varpool_node *vnode;
334
335 encoder = ob->decl_state->symtab_node_encoder;
336 vnode = varpool_get_node (expr);
337 if (!vnode
338 || !lto_symtab_encoder_encode_initializer_p (encoder,
339 vnode))
340 initial = error_mark_node;
341 }
342
343 return initial;
344 }
345
346
347 /* Write a physical representation of tree node EXPR to output block
348 OB. If REF_P is true, the leaves of EXPR are emitted as references
349 via lto_output_tree_ref. IX is the index into the streamer cache
350 where EXPR is stored. */
351
352 static void
353 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
354 {
355 /* Pack all the non-pointer fields in EXPR into a bitpack and write
356 the resulting bitpack. */
357 bitpack_d bp = bitpack_create (ob->main_stream);
358 streamer_pack_tree_bitfields (ob, &bp, expr);
359 streamer_write_bitpack (&bp);
360
361 /* Write all the pointer fields in EXPR. */
362 streamer_write_tree_body (ob, expr, ref_p);
363
364 /* Write any LTO-specific data to OB. */
365 if (DECL_P (expr)
366 && TREE_CODE (expr) != FUNCTION_DECL
367 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
368 {
369 /* Handle DECL_INITIAL for symbols. */
370 tree initial = get_symbol_initial_value (ob, expr);
371 stream_write_tree (ob, initial, ref_p);
372 }
373 }
374
375 /* Write a physical representation of tree node EXPR to output block
376 OB. If REF_P is true, the leaves of EXPR are emitted as references
377 via lto_output_tree_ref. IX is the index into the streamer cache
378 where EXPR is stored. */
379
380 static void
381 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
382 {
383 if (!lto_is_streamable (expr))
384 internal_error ("tree code %qs is not supported in LTO streams",
385 get_tree_code_name (TREE_CODE (expr)));
386
387 /* Write the header, containing everything needed to materialize
388 EXPR on the reading side. */
389 streamer_write_tree_header (ob, expr);
390
391 lto_write_tree_1 (ob, expr, ref_p);
392
393 /* Mark the end of EXPR. */
394 streamer_write_zero (ob);
395 }
396
397 /* Emit the physical representation of tree node EXPR to output block
398 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
399 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
400
401 static void
402 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
403 bool ref_p, bool this_ref_p)
404 {
405 unsigned ix;
406
407 gcc_checking_assert (expr != NULL_TREE
408 && !(this_ref_p && tree_is_indexable (expr)));
409
410 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
411 expr, hash, &ix);
412 gcc_assert (!exists_p);
413 if (streamer_handle_as_builtin_p (expr))
414 {
415 /* MD and NORMAL builtins do not need to be written out
416 completely as they are always instantiated by the
417 compiler on startup. The only builtins that need to
418 be written out are BUILT_IN_FRONTEND. For all other
419 builtins, we simply write the class and code. */
420 streamer_write_builtin (ob, expr);
421 }
422 else if (TREE_CODE (expr) == INTEGER_CST
423 && !TREE_OVERFLOW (expr))
424 {
425 /* Shared INTEGER_CST nodes are special because they need their
426 original type to be materialized by the reader (to implement
427 TYPE_CACHED_VALUES). */
428 streamer_write_integer_cst (ob, expr, ref_p);
429 }
430 else
431 {
432 /* This is the first time we see EXPR, write its fields
433 to OB. */
434 lto_write_tree (ob, expr, ref_p);
435 }
436 }
437
438 struct sccs
439 {
440 unsigned int dfsnum;
441 unsigned int low;
442 };
443
444 struct scc_entry
445 {
446 tree t;
447 hashval_t hash;
448 };
449
450 static unsigned int next_dfs_num;
451 static vec<scc_entry> sccstack;
452 static struct pointer_map_t *sccstate;
453 static struct obstack sccstate_obstack;
454
455 static void
456 DFS_write_tree (struct output_block *ob, sccs *from_state,
457 tree expr, bool ref_p, bool this_ref_p);
458
459 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
460 DFS recurse for all tree edges originating from it. */
461
462 static void
463 DFS_write_tree_body (struct output_block *ob,
464 tree expr, sccs *expr_state, bool ref_p)
465 {
466 #define DFS_follow_tree_edge(DEST) \
467 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
468
469 enum tree_code code;
470
471 code = TREE_CODE (expr);
472
473 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
474 {
475 if (TREE_CODE (expr) != IDENTIFIER_NODE)
476 DFS_follow_tree_edge (TREE_TYPE (expr));
477 }
478
479 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
480 {
481 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
482 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
483 }
484
485 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
486 {
487 DFS_follow_tree_edge (TREE_REALPART (expr));
488 DFS_follow_tree_edge (TREE_IMAGPART (expr));
489 }
490
491 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
492 {
493 /* Drop names that were created for anonymous entities. */
494 if (DECL_NAME (expr)
495 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
496 && ANON_AGGRNAME_P (DECL_NAME (expr)))
497 ;
498 else
499 DFS_follow_tree_edge (DECL_NAME (expr));
500 DFS_follow_tree_edge (DECL_CONTEXT (expr));
501 }
502
503 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
504 {
505 DFS_follow_tree_edge (DECL_SIZE (expr));
506 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
507
508 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
509 special handling in LTO, it must be handled by streamer hooks. */
510
511 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
512
513 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
514 for early inlining so drop it on the floor instead of ICEing in
515 dwarf2out.c. */
516
517 if ((TREE_CODE (expr) == VAR_DECL
518 || TREE_CODE (expr) == PARM_DECL)
519 && DECL_HAS_VALUE_EXPR_P (expr))
520 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
521 if (TREE_CODE (expr) == VAR_DECL)
522 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
526 {
527 if (TREE_CODE (expr) == TYPE_DECL)
528 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
529 DFS_follow_tree_edge (DECL_VINDEX (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
533 {
534 /* Make sure we don't inadvertently set the assembler name. */
535 if (DECL_ASSEMBLER_NAME_SET_P (expr))
536 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
537 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
538 }
539
540 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
541 {
542 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
543 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
544 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
545 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
546 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
547 }
548
549 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
550 {
551 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
552 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
553 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
554 }
555
556 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
557 {
558 DFS_follow_tree_edge (TYPE_SIZE (expr));
559 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
560 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
561 DFS_follow_tree_edge (TYPE_NAME (expr));
562 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
563 reconstructed during fixup. */
564 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
565 during fixup. */
566 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
567 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
568 /* TYPE_CANONICAL is re-computed during type merging, so no need
569 to follow it here. */
570 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
571 }
572
573 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
574 {
575 if (TREE_CODE (expr) == ENUMERAL_TYPE)
576 DFS_follow_tree_edge (TYPE_VALUES (expr));
577 else if (TREE_CODE (expr) == ARRAY_TYPE)
578 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
579 else if (RECORD_OR_UNION_TYPE_P (expr))
580 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
581 DFS_follow_tree_edge (t);
582 else if (TREE_CODE (expr) == FUNCTION_TYPE
583 || TREE_CODE (expr) == METHOD_TYPE)
584 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
585
586 if (!POINTER_TYPE_P (expr))
587 DFS_follow_tree_edge (TYPE_MINVAL (expr));
588 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
589 if (RECORD_OR_UNION_TYPE_P (expr))
590 DFS_follow_tree_edge (TYPE_BINFO (expr));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
594 {
595 DFS_follow_tree_edge (TREE_PURPOSE (expr));
596 DFS_follow_tree_edge (TREE_VALUE (expr));
597 DFS_follow_tree_edge (TREE_CHAIN (expr));
598 }
599
600 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
601 {
602 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
603 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
604 }
605
606 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
607 {
608 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
609 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
610 DFS_follow_tree_edge (TREE_BLOCK (expr));
611 }
612
613 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
614 {
615 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
616 /* ??? FIXME. See also streamer_write_chain. */
617 if (!(VAR_OR_FUNCTION_DECL_P (t)
618 && DECL_EXTERNAL (t)))
619 DFS_follow_tree_edge (t);
620
621 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
622
623 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
624 handle - those that represent inlined function scopes.
625 For the drop rest them on the floor instead of ICEing
626 in dwarf2out.c. */
627 if (inlined_function_outer_scope_p (expr))
628 {
629 tree ultimate_origin = block_ultimate_origin (expr);
630 DFS_follow_tree_edge (ultimate_origin);
631 }
632 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
633 information for early inlined BLOCKs so drop it on the floor instead
634 of ICEing in dwarf2out.c. */
635
636 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
637 streaming time. */
638
639 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
640 list is re-constructed from BLOCK_SUPERCONTEXT. */
641 }
642
643 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
644 {
645 unsigned i;
646 tree t;
647
648 /* Note that the number of BINFO slots has already been emitted in
649 EXPR's header (see streamer_write_tree_header) because this length
650 is needed to build the empty BINFO node on the reader side. */
651 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
652 DFS_follow_tree_edge (t);
653 DFS_follow_tree_edge (BINFO_OFFSET (expr));
654 DFS_follow_tree_edge (BINFO_VTABLE (expr));
655 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
656
657 /* The number of BINFO_BASE_ACCESSES has already been emitted in
658 EXPR's bitfield section. */
659 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
660 DFS_follow_tree_edge (t);
661
662 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
663 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
664 }
665
666 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
667 {
668 unsigned i;
669 tree index, value;
670
671 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
672 {
673 DFS_follow_tree_edge (index);
674 DFS_follow_tree_edge (value);
675 }
676 }
677
678 if (code == OMP_CLAUSE)
679 {
680 int i;
681 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
682 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
683 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
684 }
685
686 #undef DFS_follow_tree_edge
687 }
688
689 /* Return a hash value for the tree T. */
690
691 static hashval_t
692 hash_tree (struct streamer_tree_cache_d *cache, tree t)
693 {
694 #define visit(SIBLING) \
695 do { \
696 unsigned ix; \
697 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
698 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
699 } while (0)
700
701 /* Hash TS_BASE. */
702 enum tree_code code = TREE_CODE (t);
703 hashval_t v = iterative_hash_host_wide_int (code, 0);
704 if (!TYPE_P (t))
705 {
706 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
707 | (TREE_CONSTANT (t) << 1)
708 | (TREE_READONLY (t) << 2)
709 | (TREE_PUBLIC (t) << 3), v);
710 }
711 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
712 | (TREE_THIS_VOLATILE (t) << 1), v);
713 if (DECL_P (t))
714 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
715 else if (TYPE_P (t))
716 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
717 if (TYPE_P (t))
718 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
719 else
720 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
721 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
722 | (TREE_STATIC (t) << 1)
723 | (TREE_PROTECTED (t) << 2)
724 | (TREE_DEPRECATED (t) << 3), v);
725 if (code != TREE_BINFO)
726 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
727 if (TYPE_P (t))
728 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
729 | (TYPE_ADDR_SPACE (t) << 1), v);
730 else if (code == SSA_NAME)
731 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
732
733 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
734 {
735 int i;
736 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
737 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
738 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
739 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
740 }
741
742 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
743 {
744 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
745 v = iterative_hash_host_wide_int (r.cl, v);
746 v = iterative_hash_host_wide_int (r.decimal
747 | (r.sign << 1)
748 | (r.signalling << 2)
749 | (r.canonical << 3), v);
750 v = iterative_hash_host_wide_int (r.uexp, v);
751 for (unsigned i = 0; i < SIGSZ; ++i)
752 v = iterative_hash_host_wide_int (r.sig[i], v);
753 }
754
755 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
756 {
757 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
758 v = iterative_hash_host_wide_int (f.mode, v);
759 v = iterative_hash_host_wide_int (f.data.low, v);
760 v = iterative_hash_host_wide_int (f.data.high, v);
761 }
762
763 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
764 {
765 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
766 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
767 | (DECL_VIRTUAL_P (t) << 1)
768 | (DECL_IGNORED_P (t) << 2)
769 | (DECL_ABSTRACT (t) << 3)
770 | (DECL_ARTIFICIAL (t) << 4)
771 | (DECL_USER_ALIGN (t) << 5)
772 | (DECL_PRESERVE_P (t) << 6)
773 | (DECL_EXTERNAL (t) << 7)
774 | (DECL_GIMPLE_REG_P (t) << 8), v);
775 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
776 if (code == LABEL_DECL)
777 {
778 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
779 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
780 }
781 else if (code == FIELD_DECL)
782 {
783 v = iterative_hash_host_wide_int (DECL_PACKED (t)
784 | (DECL_NONADDRESSABLE_P (t) << 1),
785 v);
786 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
787 }
788 else if (code == VAR_DECL)
789 {
790 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
791 | (DECL_NONLOCAL_FRAME (t) << 1),
792 v);
793 }
794 if (code == RESULT_DECL
795 || code == PARM_DECL
796 || code == VAR_DECL)
797 {
798 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
799 if (code == VAR_DECL
800 || code == PARM_DECL)
801 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
802 }
803 }
804
805 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
806 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
807
808 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
809 {
810 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
811 | (DECL_DLLIMPORT_P (t) << 1)
812 | (DECL_WEAK (t) << 2)
813 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
814 | (DECL_COMDAT (t) << 4)
815 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
816 v);
817 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
818 if (code == VAR_DECL)
819 {
820 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
821 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
822 | (DECL_IN_CONSTANT_POOL (t) << 1),
823 v);
824 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
825 }
826 if (TREE_CODE (t) == FUNCTION_DECL)
827 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
828 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
829 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
830 v);
831 if (VAR_OR_FUNCTION_DECL_P (t))
832 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
833 }
834
835 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
836 {
837 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
838 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
839 | (DECL_STATIC_DESTRUCTOR (t) << 1)
840 | (DECL_UNINLINABLE (t) << 2)
841 | (DECL_POSSIBLY_INLINED (t) << 3)
842 | (DECL_IS_NOVOPS (t) << 4)
843 | (DECL_IS_RETURNS_TWICE (t) << 5)
844 | (DECL_IS_MALLOC (t) << 6)
845 | (DECL_IS_OPERATOR_NEW (t) << 7)
846 | (DECL_DECLARED_INLINE_P (t) << 8)
847 | (DECL_STATIC_CHAIN (t) << 9)
848 | (DECL_NO_INLINE_WARNING_P (t) << 10)
849 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
850 | (DECL_NO_LIMIT_STACK (t) << 12)
851 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
852 | (DECL_PURE_P (t) << 14)
853 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
854 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
855 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
856 if (DECL_STATIC_DESTRUCTOR (t))
857 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
858 }
859
860 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
861 {
862 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
863 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
864 | (TYPE_NO_FORCE_BLK (t) << 1)
865 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
866 | (TYPE_PACKED (t) << 3)
867 | (TYPE_RESTRICT (t) << 4)
868 | (TYPE_USER_ALIGN (t) << 5)
869 | (TYPE_READONLY (t) << 6), v);
870 if (RECORD_OR_UNION_TYPE_P (t))
871 {
872 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
873 | (TYPE_FINAL_P (t) << 1), v);
874 }
875 else if (code == ARRAY_TYPE)
876 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
877 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
878 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
879 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
880 || (!in_lto_p
881 && get_alias_set (t) == 0))
882 ? 0 : -1, v);
883 }
884
885 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
886 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
887 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
888
889 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
890 gcc_unreachable ();
891
892 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
893 v = iterative_hash (t, sizeof (struct cl_optimization), v);
894
895 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
896 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
897
898 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
899 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
900
901 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
902 {
903 if (POINTER_TYPE_P (t))
904 {
905 /* For pointers factor in the pointed-to type recursively as
906 we cannot recurse through only pointers.
907 ??? We can generalize this by keeping track of the
908 in-SCC edges for each tree (or arbitrarily the first
909 such edge) and hashing that in in a second stage
910 (instead of the quadratic mixing of the SCC we do now). */
911 hashval_t x;
912 unsigned ix;
913 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
914 x = streamer_tree_cache_get_hash (cache, ix);
915 else
916 x = hash_tree (cache, TREE_TYPE (t));
917 v = iterative_hash_hashval_t (x, v);
918 }
919 else if (code != IDENTIFIER_NODE)
920 visit (TREE_TYPE (t));
921 }
922
923 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
924 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
925 visit (VECTOR_CST_ELT (t, i));
926
927 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
928 {
929 visit (TREE_REALPART (t));
930 visit (TREE_IMAGPART (t));
931 }
932
933 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
934 {
935 /* Drop names that were created for anonymous entities. */
936 if (DECL_NAME (t)
937 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
938 && ANON_AGGRNAME_P (DECL_NAME (t)))
939 ;
940 else
941 visit (DECL_NAME (t));
942 if (DECL_FILE_SCOPE_P (t))
943 ;
944 else
945 visit (DECL_CONTEXT (t));
946 }
947
948 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
949 {
950 visit (DECL_SIZE (t));
951 visit (DECL_SIZE_UNIT (t));
952 visit (DECL_ATTRIBUTES (t));
953 if ((code == VAR_DECL
954 || code == PARM_DECL)
955 && DECL_HAS_VALUE_EXPR_P (t))
956 visit (DECL_VALUE_EXPR (t));
957 if (code == VAR_DECL
958 && DECL_HAS_DEBUG_EXPR_P (t))
959 visit (DECL_DEBUG_EXPR (t));
960 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
961 be able to call get_symbol_initial_value. */
962 }
963
964 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
965 {
966 if (code == TYPE_DECL)
967 visit (DECL_ORIGINAL_TYPE (t));
968 visit (DECL_VINDEX (t));
969 }
970
971 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
972 {
973 if (DECL_ASSEMBLER_NAME_SET_P (t))
974 visit (DECL_ASSEMBLER_NAME (t));
975 visit (DECL_SECTION_NAME (t));
976 }
977
978 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
979 {
980 visit (DECL_FIELD_OFFSET (t));
981 visit (DECL_BIT_FIELD_TYPE (t));
982 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
983 visit (DECL_FIELD_BIT_OFFSET (t));
984 visit (DECL_FCONTEXT (t));
985 }
986
987 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
988 {
989 visit (DECL_FUNCTION_PERSONALITY (t));
990 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
991 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
992 }
993
994 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
995 {
996 visit (TYPE_SIZE (t));
997 visit (TYPE_SIZE_UNIT (t));
998 visit (TYPE_ATTRIBUTES (t));
999 visit (TYPE_NAME (t));
1000 visit (TYPE_MAIN_VARIANT (t));
1001 if (TYPE_FILE_SCOPE_P (t))
1002 ;
1003 else
1004 visit (TYPE_CONTEXT (t));
1005 visit (TYPE_STUB_DECL (t));
1006 }
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1009 {
1010 if (code == ENUMERAL_TYPE)
1011 visit (TYPE_VALUES (t));
1012 else if (code == ARRAY_TYPE)
1013 visit (TYPE_DOMAIN (t));
1014 else if (RECORD_OR_UNION_TYPE_P (t))
1015 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1016 visit (f);
1017 else if (code == FUNCTION_TYPE
1018 || code == METHOD_TYPE)
1019 visit (TYPE_ARG_TYPES (t));
1020 if (!POINTER_TYPE_P (t))
1021 visit (TYPE_MINVAL (t));
1022 visit (TYPE_MAXVAL (t));
1023 if (RECORD_OR_UNION_TYPE_P (t))
1024 visit (TYPE_BINFO (t));
1025 }
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1028 {
1029 visit (TREE_PURPOSE (t));
1030 visit (TREE_VALUE (t));
1031 visit (TREE_CHAIN (t));
1032 }
1033
1034 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1035 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1036 visit (TREE_VEC_ELT (t, i));
1037
1038 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1039 {
1040 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1041 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1042 visit (TREE_OPERAND (t, i));
1043 }
1044
1045 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1046 {
1047 unsigned i;
1048 tree b;
1049 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1050 visit (b);
1051 visit (BINFO_OFFSET (t));
1052 visit (BINFO_VTABLE (t));
1053 visit (BINFO_VPTR_FIELD (t));
1054 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1055 visit (b);
1056 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1057 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1058 }
1059
1060 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1061 {
1062 unsigned i;
1063 tree index, value;
1064 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1065 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1066 {
1067 visit (index);
1068 visit (value);
1069 }
1070 }
1071
1072 if (code == OMP_CLAUSE)
1073 {
1074 int i;
1075
1076 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1077 switch (OMP_CLAUSE_CODE (t))
1078 {
1079 case OMP_CLAUSE_DEFAULT:
1080 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1081 break;
1082 case OMP_CLAUSE_SCHEDULE:
1083 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1084 break;
1085 case OMP_CLAUSE_DEPEND:
1086 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1087 break;
1088 case OMP_CLAUSE_MAP:
1089 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1090 break;
1091 case OMP_CLAUSE_PROC_BIND:
1092 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1093 break;
1094 case OMP_CLAUSE_REDUCTION:
1095 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1096 break;
1097 default:
1098 break;
1099 }
1100 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1101 visit (OMP_CLAUSE_OPERAND (t, i));
1102 visit (OMP_CLAUSE_CHAIN (t));
1103 }
1104
1105 return v;
1106
1107 #undef visit
1108 }
1109
1110 /* Compare two SCC entries by their hash value for qsorting them. */
1111
1112 static int
1113 scc_entry_compare (const void *p1_, const void *p2_)
1114 {
1115 const scc_entry *p1 = (const scc_entry *) p1_;
1116 const scc_entry *p2 = (const scc_entry *) p2_;
1117 if (p1->hash < p2->hash)
1118 return -1;
1119 else if (p1->hash > p2->hash)
1120 return 1;
1121 return 0;
1122 }
1123
1124 /* Return a hash value for the SCC on the SCC stack from FIRST with
1125 size SIZE. */
1126
1127 static hashval_t
1128 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1129 {
1130 /* Compute hash values for the SCC members. */
1131 for (unsigned i = 0; i < size; ++i)
1132 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1133
1134 if (size == 1)
1135 return sccstack[first].hash;
1136
1137 /* Sort the SCC of type, hash pairs so that when we mix in
1138 all members of the SCC the hash value becomes independent on
1139 the order we visited the SCC. Disregard hashes equal to
1140 the hash of the tree we mix into because we cannot guarantee
1141 a stable sort for those across different TUs. */
1142 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1143 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1144 for (unsigned i = 0; i < size; ++i)
1145 {
1146 hashval_t hash = sccstack[first+i].hash;
1147 hashval_t orig_hash = hash;
1148 unsigned j;
1149 /* Skip same hashes. */
1150 for (j = i + 1;
1151 j < size && sccstack[first+j].hash == orig_hash; ++j)
1152 ;
1153 for (; j < size; ++j)
1154 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1155 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1156 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1157 tem[i] = hash;
1158 }
1159 hashval_t scc_hash = 0;
1160 for (unsigned i = 0; i < size; ++i)
1161 {
1162 sccstack[first+i].hash = tem[i];
1163 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1164 }
1165 return scc_hash;
1166 }
1167
1168 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1169 already in the streamer cache. Main routine called for
1170 each visit of EXPR. */
1171
1172 static void
1173 DFS_write_tree (struct output_block *ob, sccs *from_state,
1174 tree expr, bool ref_p, bool this_ref_p)
1175 {
1176 unsigned ix;
1177 sccs **slot;
1178
1179 /* Handle special cases. */
1180 if (expr == NULL_TREE)
1181 return;
1182
1183 /* Do not DFS walk into indexable trees. */
1184 if (this_ref_p && tree_is_indexable (expr))
1185 return;
1186
1187 /* Check if we already streamed EXPR. */
1188 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1189 return;
1190
1191 slot = (sccs **)pointer_map_insert (sccstate, expr);
1192 sccs *cstate = *slot;
1193 if (!cstate)
1194 {
1195 scc_entry e = { expr, 0 };
1196 /* Not yet visited. DFS recurse and push it onto the stack. */
1197 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1198 sccstack.safe_push (e);
1199 cstate->dfsnum = next_dfs_num++;
1200 cstate->low = cstate->dfsnum;
1201
1202 if (streamer_handle_as_builtin_p (expr))
1203 ;
1204 else if (TREE_CODE (expr) == INTEGER_CST
1205 && !TREE_OVERFLOW (expr))
1206 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1207 else
1208 {
1209 DFS_write_tree_body (ob, expr, cstate, ref_p);
1210
1211 /* Walk any LTO-specific edges. */
1212 if (DECL_P (expr)
1213 && TREE_CODE (expr) != FUNCTION_DECL
1214 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1215 {
1216 /* Handle DECL_INITIAL for symbols. */
1217 tree initial = get_symbol_initial_value (ob, expr);
1218 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1219 }
1220 }
1221
1222 /* See if we found an SCC. */
1223 if (cstate->low == cstate->dfsnum)
1224 {
1225 unsigned first, size;
1226 tree x;
1227
1228 /* Pop the SCC and compute its size. */
1229 first = sccstack.length ();
1230 do
1231 {
1232 x = sccstack[--first].t;
1233 }
1234 while (x != expr);
1235 size = sccstack.length () - first;
1236
1237 /* No need to compute hashes for LTRANS units, we don't perform
1238 any merging there. */
1239 hashval_t scc_hash = 0;
1240 unsigned scc_entry_len = 0;
1241 if (!flag_wpa)
1242 {
1243 scc_hash = hash_scc (ob->writer_cache, first, size);
1244
1245 /* Put the entries with the least number of collisions first. */
1246 unsigned entry_start = 0;
1247 scc_entry_len = size + 1;
1248 for (unsigned i = 0; i < size;)
1249 {
1250 unsigned from = i;
1251 for (i = i + 1; i < size
1252 && (sccstack[first + i].hash
1253 == sccstack[first + from].hash); ++i)
1254 ;
1255 if (i - from < scc_entry_len)
1256 {
1257 scc_entry_len = i - from;
1258 entry_start = from;
1259 }
1260 }
1261 for (unsigned i = 0; i < scc_entry_len; ++i)
1262 {
1263 scc_entry tem = sccstack[first + i];
1264 sccstack[first + i] = sccstack[first + entry_start + i];
1265 sccstack[first + entry_start + i] = tem;
1266 }
1267 }
1268
1269 /* Write LTO_tree_scc. */
1270 streamer_write_record_start (ob, LTO_tree_scc);
1271 streamer_write_uhwi (ob, size);
1272 streamer_write_uhwi (ob, scc_hash);
1273
1274 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1275 All INTEGER_CSTs need to be handled this way as we need
1276 their type to materialize them. Also builtins are handled
1277 this way.
1278 ??? We still wrap these in LTO_tree_scc so at the
1279 input side we can properly identify the tree we want
1280 to ultimatively return. */
1281 if (size == 1)
1282 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1283 else
1284 {
1285 /* Write the size of the SCC entry candidates. */
1286 streamer_write_uhwi (ob, scc_entry_len);
1287
1288 /* Write all headers and populate the streamer cache. */
1289 for (unsigned i = 0; i < size; ++i)
1290 {
1291 hashval_t hash = sccstack[first+i].hash;
1292 tree t = sccstack[first+i].t;
1293 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1294 t, hash, &ix);
1295 gcc_assert (!exists_p);
1296
1297 if (!lto_is_streamable (t))
1298 internal_error ("tree code %qs is not supported "
1299 "in LTO streams",
1300 get_tree_code_name (TREE_CODE (t)));
1301
1302 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1303
1304 /* Write the header, containing everything needed to
1305 materialize EXPR on the reading side. */
1306 streamer_write_tree_header (ob, t);
1307 }
1308
1309 /* Write the bitpacks and tree references. */
1310 for (unsigned i = 0; i < size; ++i)
1311 {
1312 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1313
1314 /* Mark the end of the tree. */
1315 streamer_write_zero (ob);
1316 }
1317 }
1318
1319 /* Finally truncate the vector. */
1320 sccstack.truncate (first);
1321
1322 if (from_state)
1323 from_state->low = MIN (from_state->low, cstate->low);
1324 return;
1325 }
1326
1327 if (from_state)
1328 from_state->low = MIN (from_state->low, cstate->low);
1329 }
1330 gcc_checking_assert (from_state);
1331 if (cstate->dfsnum < from_state->dfsnum)
1332 from_state->low = MIN (cstate->dfsnum, from_state->low);
1333 }
1334
1335
1336 /* Emit the physical representation of tree node EXPR to output block
1337 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1338 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1339
1340 void
1341 lto_output_tree (struct output_block *ob, tree expr,
1342 bool ref_p, bool this_ref_p)
1343 {
1344 unsigned ix;
1345 bool existed_p;
1346
1347 if (expr == NULL_TREE)
1348 {
1349 streamer_write_record_start (ob, LTO_null);
1350 return;
1351 }
1352
1353 if (this_ref_p && tree_is_indexable (expr))
1354 {
1355 lto_output_tree_ref (ob, expr);
1356 return;
1357 }
1358
1359 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1360 if (existed_p)
1361 {
1362 /* If a node has already been streamed out, make sure that
1363 we don't write it more than once. Otherwise, the reader
1364 will instantiate two different nodes for the same object. */
1365 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1366 streamer_write_uhwi (ob, ix);
1367 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1368 lto_tree_code_to_tag (TREE_CODE (expr)));
1369 lto_stats.num_pickle_refs_output++;
1370 }
1371 else
1372 {
1373 /* This is the first time we see EXPR, write all reachable
1374 trees to OB. */
1375 static bool in_dfs_walk;
1376
1377 /* Protect against recursion which means disconnect between
1378 what tree edges we walk in the DFS walk and what edges
1379 we stream out. */
1380 gcc_assert (!in_dfs_walk);
1381
1382 /* Start the DFS walk. */
1383 /* Save ob state ... */
1384 /* let's see ... */
1385 in_dfs_walk = true;
1386 sccstate = pointer_map_create ();
1387 gcc_obstack_init (&sccstate_obstack);
1388 next_dfs_num = 1;
1389 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1390 sccstack.release ();
1391 pointer_map_destroy (sccstate);
1392 obstack_free (&sccstate_obstack, NULL);
1393 in_dfs_walk = false;
1394
1395 /* Finally append a reference to the tree we were writing.
1396 ??? If expr ended up as a singleton we could have
1397 inlined it here and avoid outputting a reference. */
1398 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1399 gcc_assert (existed_p);
1400 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1401 streamer_write_uhwi (ob, ix);
1402 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1403 lto_tree_code_to_tag (TREE_CODE (expr)));
1404 lto_stats.num_pickle_refs_output++;
1405 }
1406 }
1407
1408
1409 /* Output to OB a list of try/catch handlers starting with FIRST. */
1410
1411 static void
1412 output_eh_try_list (struct output_block *ob, eh_catch first)
1413 {
1414 eh_catch n;
1415
1416 for (n = first; n; n = n->next_catch)
1417 {
1418 streamer_write_record_start (ob, LTO_eh_catch);
1419 stream_write_tree (ob, n->type_list, true);
1420 stream_write_tree (ob, n->filter_list, true);
1421 stream_write_tree (ob, n->label, true);
1422 }
1423
1424 streamer_write_record_start (ob, LTO_null);
1425 }
1426
1427
1428 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1429 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1430 detect EH region sharing. */
1431
1432 static void
1433 output_eh_region (struct output_block *ob, eh_region r)
1434 {
1435 enum LTO_tags tag;
1436
1437 if (r == NULL)
1438 {
1439 streamer_write_record_start (ob, LTO_null);
1440 return;
1441 }
1442
1443 if (r->type == ERT_CLEANUP)
1444 tag = LTO_ert_cleanup;
1445 else if (r->type == ERT_TRY)
1446 tag = LTO_ert_try;
1447 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1448 tag = LTO_ert_allowed_exceptions;
1449 else if (r->type == ERT_MUST_NOT_THROW)
1450 tag = LTO_ert_must_not_throw;
1451 else
1452 gcc_unreachable ();
1453
1454 streamer_write_record_start (ob, tag);
1455 streamer_write_hwi (ob, r->index);
1456
1457 if (r->outer)
1458 streamer_write_hwi (ob, r->outer->index);
1459 else
1460 streamer_write_zero (ob);
1461
1462 if (r->inner)
1463 streamer_write_hwi (ob, r->inner->index);
1464 else
1465 streamer_write_zero (ob);
1466
1467 if (r->next_peer)
1468 streamer_write_hwi (ob, r->next_peer->index);
1469 else
1470 streamer_write_zero (ob);
1471
1472 if (r->type == ERT_TRY)
1473 {
1474 output_eh_try_list (ob, r->u.eh_try.first_catch);
1475 }
1476 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1477 {
1478 stream_write_tree (ob, r->u.allowed.type_list, true);
1479 stream_write_tree (ob, r->u.allowed.label, true);
1480 streamer_write_uhwi (ob, r->u.allowed.filter);
1481 }
1482 else if (r->type == ERT_MUST_NOT_THROW)
1483 {
1484 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1485 bitpack_d bp = bitpack_create (ob->main_stream);
1486 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1487 streamer_write_bitpack (&bp);
1488 }
1489
1490 if (r->landing_pads)
1491 streamer_write_hwi (ob, r->landing_pads->index);
1492 else
1493 streamer_write_zero (ob);
1494 }
1495
1496
1497 /* Output landing pad LP to OB. */
1498
1499 static void
1500 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1501 {
1502 if (lp == NULL)
1503 {
1504 streamer_write_record_start (ob, LTO_null);
1505 return;
1506 }
1507
1508 streamer_write_record_start (ob, LTO_eh_landing_pad);
1509 streamer_write_hwi (ob, lp->index);
1510 if (lp->next_lp)
1511 streamer_write_hwi (ob, lp->next_lp->index);
1512 else
1513 streamer_write_zero (ob);
1514
1515 if (lp->region)
1516 streamer_write_hwi (ob, lp->region->index);
1517 else
1518 streamer_write_zero (ob);
1519
1520 stream_write_tree (ob, lp->post_landing_pad, true);
1521 }
1522
1523
1524 /* Output the existing eh_table to OB. */
1525
1526 static void
1527 output_eh_regions (struct output_block *ob, struct function *fn)
1528 {
1529 if (fn->eh && fn->eh->region_tree)
1530 {
1531 unsigned i;
1532 eh_region eh;
1533 eh_landing_pad lp;
1534 tree ttype;
1535
1536 streamer_write_record_start (ob, LTO_eh_table);
1537
1538 /* Emit the index of the root of the EH region tree. */
1539 streamer_write_hwi (ob, fn->eh->region_tree->index);
1540
1541 /* Emit all the EH regions in the region array. */
1542 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1543 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1544 output_eh_region (ob, eh);
1545
1546 /* Emit all landing pads. */
1547 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1548 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1549 output_eh_lp (ob, lp);
1550
1551 /* Emit all the runtime type data. */
1552 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1553 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1554 stream_write_tree (ob, ttype, true);
1555
1556 /* Emit the table of action chains. */
1557 if (targetm.arm_eabi_unwinder)
1558 {
1559 tree t;
1560 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1561 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1562 stream_write_tree (ob, t, true);
1563 }
1564 else
1565 {
1566 uchar c;
1567 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1568 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1569 streamer_write_char_stream (ob->main_stream, c);
1570 }
1571 }
1572
1573 /* The LTO_null either terminates the record or indicates that there
1574 are no eh_records at all. */
1575 streamer_write_record_start (ob, LTO_null);
1576 }
1577
1578
1579 /* Output all of the active ssa names to the ssa_names stream. */
1580
1581 static void
1582 output_ssa_names (struct output_block *ob, struct function *fn)
1583 {
1584 unsigned int i, len;
1585
1586 len = vec_safe_length (SSANAMES (fn));
1587 streamer_write_uhwi (ob, len);
1588
1589 for (i = 1; i < len; i++)
1590 {
1591 tree ptr = (*SSANAMES (fn))[i];
1592
1593 if (ptr == NULL_TREE
1594 || SSA_NAME_IN_FREE_LIST (ptr)
1595 || virtual_operand_p (ptr))
1596 continue;
1597
1598 streamer_write_uhwi (ob, i);
1599 streamer_write_char_stream (ob->main_stream,
1600 SSA_NAME_IS_DEFAULT_DEF (ptr));
1601 if (SSA_NAME_VAR (ptr))
1602 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1603 else
1604 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1605 stream_write_tree (ob, TREE_TYPE (ptr), true);
1606 }
1607
1608 streamer_write_zero (ob);
1609 }
1610
1611
1612 /* Output a wide-int. */
1613
1614 static void
1615 streamer_write_wi (struct output_block *ob,
1616 const widest_int &w)
1617 {
1618 int len = w.get_len ();
1619
1620 streamer_write_uhwi (ob, w.get_precision ());
1621 streamer_write_uhwi (ob, len);
1622 for (int i = 0; i < len; i++)
1623 streamer_write_hwi (ob, w.elt (i));
1624 }
1625
1626
1627 /* Output the cfg. */
1628
1629 static void
1630 output_cfg (struct output_block *ob, struct function *fn)
1631 {
1632 struct lto_output_stream *tmp_stream = ob->main_stream;
1633 basic_block bb;
1634
1635 ob->main_stream = ob->cfg_stream;
1636
1637 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1638 profile_status_for_fn (fn));
1639
1640 /* Output the number of the highest basic block. */
1641 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1642
1643 FOR_ALL_BB_FN (bb, fn)
1644 {
1645 edge_iterator ei;
1646 edge e;
1647
1648 streamer_write_hwi (ob, bb->index);
1649
1650 /* Output the successors and the edge flags. */
1651 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1652 FOR_EACH_EDGE (e, ei, bb->succs)
1653 {
1654 streamer_write_uhwi (ob, e->dest->index);
1655 streamer_write_hwi (ob, e->probability);
1656 streamer_write_gcov_count (ob, e->count);
1657 streamer_write_uhwi (ob, e->flags);
1658 }
1659 }
1660
1661 streamer_write_hwi (ob, -1);
1662
1663 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1664 while (bb->next_bb)
1665 {
1666 streamer_write_hwi (ob, bb->next_bb->index);
1667 bb = bb->next_bb;
1668 }
1669
1670 streamer_write_hwi (ob, -1);
1671
1672 /* ??? The cfgloop interface is tied to cfun. */
1673 gcc_assert (cfun == fn);
1674
1675 /* Output the number of loops. */
1676 streamer_write_uhwi (ob, number_of_loops (fn));
1677
1678 /* Output each loop, skipping the tree root which has number zero. */
1679 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1680 {
1681 struct loop *loop = get_loop (fn, i);
1682
1683 /* Write the index of the loop header. That's enough to rebuild
1684 the loop tree on the reader side. Stream -1 for an unused
1685 loop entry. */
1686 if (!loop)
1687 {
1688 streamer_write_hwi (ob, -1);
1689 continue;
1690 }
1691 else
1692 streamer_write_hwi (ob, loop->header->index);
1693
1694 /* Write everything copy_loop_info copies. */
1695 streamer_write_enum (ob->main_stream,
1696 loop_estimation, EST_LAST, loop->estimate_state);
1697 streamer_write_hwi (ob, loop->any_upper_bound);
1698 if (loop->any_upper_bound)
1699 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1700 streamer_write_hwi (ob, loop->any_estimate);
1701 if (loop->any_estimate)
1702 streamer_write_wi (ob, loop->nb_iterations_estimate);
1703
1704 /* Write OMP SIMD related info. */
1705 streamer_write_hwi (ob, loop->safelen);
1706 streamer_write_hwi (ob, loop->dont_vectorize);
1707 streamer_write_hwi (ob, loop->force_vectorize);
1708 stream_write_tree (ob, loop->simduid, true);
1709 }
1710
1711 ob->main_stream = tmp_stream;
1712 }
1713
1714
1715 /* Create the header in the file using OB. If the section type is for
1716 a function, set FN to the decl for that function. */
1717
1718 void
1719 produce_asm (struct output_block *ob, tree fn)
1720 {
1721 enum lto_section_type section_type = ob->section_type;
1722 struct lto_function_header header;
1723 char *section_name;
1724 struct lto_output_stream *header_stream;
1725
1726 if (section_type == LTO_section_function_body)
1727 {
1728 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1729 section_name = lto_get_section_name (section_type, name, NULL);
1730 }
1731 else
1732 section_name = lto_get_section_name (section_type, NULL, NULL);
1733
1734 lto_begin_section (section_name, !flag_wpa);
1735 free (section_name);
1736
1737 /* The entire header is stream computed here. */
1738 memset (&header, 0, sizeof (struct lto_function_header));
1739
1740 /* Write the header. */
1741 header.lto_header.major_version = LTO_major_version;
1742 header.lto_header.minor_version = LTO_minor_version;
1743
1744 header.compressed_size = 0;
1745
1746 if (section_type == LTO_section_function_body)
1747 header.cfg_size = ob->cfg_stream->total_size;
1748 header.main_size = ob->main_stream->total_size;
1749 header.string_size = ob->string_stream->total_size;
1750
1751 header_stream = XCNEW (struct lto_output_stream);
1752 lto_output_data_stream (header_stream, &header, sizeof header);
1753 lto_write_stream (header_stream);
1754 free (header_stream);
1755
1756 /* Put all of the gimple and the string table out the asm file as a
1757 block of text. */
1758 if (section_type == LTO_section_function_body)
1759 lto_write_stream (ob->cfg_stream);
1760 lto_write_stream (ob->main_stream);
1761 lto_write_stream (ob->string_stream);
1762
1763 lto_end_section ();
1764 }
1765
1766
1767 /* Output the base body of struct function FN using output block OB. */
1768
1769 static void
1770 output_struct_function_base (struct output_block *ob, struct function *fn)
1771 {
1772 struct bitpack_d bp;
1773 unsigned i;
1774 tree t;
1775
1776 /* Output the static chain and non-local goto save area. */
1777 stream_write_tree (ob, fn->static_chain_decl, true);
1778 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1779
1780 /* Output all the local variables in the function. */
1781 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1782 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1783 stream_write_tree (ob, t, true);
1784
1785 /* Output current IL state of the function. */
1786 streamer_write_uhwi (ob, fn->curr_properties);
1787
1788 /* Write all the attributes for FN. */
1789 bp = bitpack_create (ob->main_stream);
1790 bp_pack_value (&bp, fn->is_thunk, 1);
1791 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1792 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1793 bp_pack_value (&bp, fn->returns_struct, 1);
1794 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1795 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1796 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1797 bp_pack_value (&bp, fn->after_inlining, 1);
1798 bp_pack_value (&bp, fn->stdarg, 1);
1799 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1800 bp_pack_value (&bp, fn->calls_alloca, 1);
1801 bp_pack_value (&bp, fn->calls_setjmp, 1);
1802 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1803 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1804 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1805 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1806
1807 /* Output the function start and end loci. */
1808 stream_output_location (ob, &bp, fn->function_start_locus);
1809 stream_output_location (ob, &bp, fn->function_end_locus);
1810
1811 streamer_write_bitpack (&bp);
1812 }
1813
1814
1815 /* Output the body of function NODE->DECL. */
1816
1817 static void
1818 output_function (struct cgraph_node *node)
1819 {
1820 tree function;
1821 struct function *fn;
1822 basic_block bb;
1823 struct output_block *ob;
1824
1825 function = node->decl;
1826 fn = DECL_STRUCT_FUNCTION (function);
1827 ob = create_output_block (LTO_section_function_body);
1828
1829 clear_line_info (ob);
1830 ob->cgraph_node = node;
1831
1832 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1833
1834 /* Set current_function_decl and cfun. */
1835 push_cfun (fn);
1836
1837 /* Make string 0 be a NULL string. */
1838 streamer_write_char_stream (ob->string_stream, 0);
1839
1840 streamer_write_record_start (ob, LTO_function);
1841
1842 /* Output decls for parameters and args. */
1843 stream_write_tree (ob, DECL_RESULT (function), true);
1844 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1845
1846 /* Output DECL_INITIAL for the function, which contains the tree of
1847 lexical scopes. */
1848 stream_write_tree (ob, DECL_INITIAL (function), true);
1849
1850 /* We also stream abstract functions where we stream only stuff needed for
1851 debug info. */
1852 if (gimple_has_body_p (function))
1853 {
1854 streamer_write_uhwi (ob, 1);
1855 output_struct_function_base (ob, fn);
1856
1857 /* Output all the SSA names used in the function. */
1858 output_ssa_names (ob, fn);
1859
1860 /* Output any exception handling regions. */
1861 output_eh_regions (ob, fn);
1862
1863
1864 /* We will renumber the statements. The code that does this uses
1865 the same ordering that we use for serializing them so we can use
1866 the same code on the other end and not have to write out the
1867 statement numbers. We do not assign UIDs to PHIs here because
1868 virtual PHIs get re-computed on-the-fly which would make numbers
1869 inconsistent. */
1870 set_gimple_stmt_max_uid (cfun, 0);
1871 FOR_ALL_BB_FN (bb, cfun)
1872 {
1873 gimple_stmt_iterator gsi;
1874 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1875 {
1876 gimple stmt = gsi_stmt (gsi);
1877
1878 /* Virtual PHIs are not going to be streamed. */
1879 if (!virtual_operand_p (gimple_phi_result (stmt)))
1880 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1881 }
1882 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1883 {
1884 gimple stmt = gsi_stmt (gsi);
1885 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1886 }
1887 }
1888 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1889 virtual phis now. */
1890 FOR_ALL_BB_FN (bb, cfun)
1891 {
1892 gimple_stmt_iterator gsi;
1893 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1894 {
1895 gimple stmt = gsi_stmt (gsi);
1896 if (virtual_operand_p (gimple_phi_result (stmt)))
1897 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1898 }
1899 }
1900
1901 /* Output the code for the function. */
1902 FOR_ALL_BB_FN (bb, fn)
1903 output_bb (ob, bb, fn);
1904
1905 /* The terminator for this function. */
1906 streamer_write_record_start (ob, LTO_null);
1907
1908 output_cfg (ob, fn);
1909
1910 pop_cfun ();
1911 }
1912 else
1913 streamer_write_uhwi (ob, 0);
1914
1915 /* Create a section to hold the pickled output of this function. */
1916 produce_asm (ob, function);
1917
1918 destroy_output_block (ob);
1919 }
1920
1921
1922 /* Emit toplevel asms. */
1923
1924 void
1925 lto_output_toplevel_asms (void)
1926 {
1927 struct output_block *ob;
1928 struct asm_node *can;
1929 char *section_name;
1930 struct lto_output_stream *header_stream;
1931 struct lto_asm_header header;
1932
1933 if (! asm_nodes)
1934 return;
1935
1936 ob = create_output_block (LTO_section_asm);
1937
1938 /* Make string 0 be a NULL string. */
1939 streamer_write_char_stream (ob->string_stream, 0);
1940
1941 for (can = asm_nodes; can; can = can->next)
1942 {
1943 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1944 streamer_write_hwi (ob, can->order);
1945 }
1946
1947 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1948
1949 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1950 lto_begin_section (section_name, !flag_wpa);
1951 free (section_name);
1952
1953 /* The entire header stream is computed here. */
1954 memset (&header, 0, sizeof (header));
1955
1956 /* Write the header. */
1957 header.lto_header.major_version = LTO_major_version;
1958 header.lto_header.minor_version = LTO_minor_version;
1959
1960 header.main_size = ob->main_stream->total_size;
1961 header.string_size = ob->string_stream->total_size;
1962
1963 header_stream = XCNEW (struct lto_output_stream);
1964 lto_output_data_stream (header_stream, &header, sizeof (header));
1965 lto_write_stream (header_stream);
1966 free (header_stream);
1967
1968 /* Put all of the gimple and the string table out the asm file as a
1969 block of text. */
1970 lto_write_stream (ob->main_stream);
1971 lto_write_stream (ob->string_stream);
1972
1973 lto_end_section ();
1974
1975 destroy_output_block (ob);
1976 }
1977
1978
1979 /* Copy the function body of NODE without deserializing. */
1980
1981 static void
1982 copy_function (struct cgraph_node *node)
1983 {
1984 tree function = node->decl;
1985 struct lto_file_decl_data *file_data = node->lto_file_data;
1986 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1987 const char *data;
1988 size_t len;
1989 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1990 char *section_name =
1991 lto_get_section_name (LTO_section_function_body, name, NULL);
1992 size_t i, j;
1993 struct lto_in_decl_state *in_state;
1994 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1995
1996 lto_begin_section (section_name, !flag_wpa);
1997 free (section_name);
1998
1999 /* We may have renamed the declaration, e.g., a static function. */
2000 name = lto_get_decl_name_mapping (file_data, name);
2001
2002 data = lto_get_section_data (file_data, LTO_section_function_body,
2003 name, &len);
2004 gcc_assert (data);
2005
2006 /* Do a bit copy of the function body. */
2007 lto_output_data_stream (output_stream, data, len);
2008 lto_write_stream (output_stream);
2009
2010 /* Copy decls. */
2011 in_state =
2012 lto_get_function_in_decl_state (node->lto_file_data, function);
2013 gcc_assert (in_state);
2014
2015 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2016 {
2017 size_t n = in_state->streams[i].size;
2018 tree *trees = in_state->streams[i].trees;
2019 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2020
2021 /* The out state must have the same indices and the in state.
2022 So just copy the vector. All the encoders in the in state
2023 must be empty where we reach here. */
2024 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2025 encoder->trees.reserve_exact (n);
2026 for (j = 0; j < n; j++)
2027 encoder->trees.safe_push (trees[j]);
2028 }
2029
2030 lto_free_section_data (file_data, LTO_section_function_body, name,
2031 data, len);
2032 free (output_stream);
2033 lto_end_section ();
2034 }
2035
2036 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2037
2038 static tree
2039 wrap_refs (tree *tp, int *ws, void *)
2040 {
2041 tree t = *tp;
2042 if (handled_component_p (t)
2043 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2044 {
2045 tree decl = TREE_OPERAND (t, 0);
2046 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2047 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2048 build1 (ADDR_EXPR, ptrtype, decl),
2049 build_int_cst (ptrtype, 0));
2050 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2051 *ws = 0;
2052 }
2053 else if (TREE_CODE (t) == CONSTRUCTOR)
2054 ;
2055 else if (!EXPR_P (t))
2056 *ws = 0;
2057 return NULL_TREE;
2058 }
2059
2060 /* Main entry point from the pass manager. */
2061
2062 void
2063 lto_output (void)
2064 {
2065 struct lto_out_decl_state *decl_state;
2066 #ifdef ENABLE_CHECKING
2067 bitmap output = lto_bitmap_alloc ();
2068 #endif
2069 int i, n_nodes;
2070 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2071
2072 /* Initialize the streamer. */
2073 lto_streamer_init ();
2074
2075 n_nodes = lto_symtab_encoder_size (encoder);
2076 /* Process only the functions with bodies. */
2077 for (i = 0; i < n_nodes; i++)
2078 {
2079 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2080 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2081 {
2082 if (lto_symtab_encoder_encode_body_p (encoder, node)
2083 && !node->alias)
2084 {
2085 #ifdef ENABLE_CHECKING
2086 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2087 bitmap_set_bit (output, DECL_UID (node->decl));
2088 #endif
2089 decl_state = lto_new_out_decl_state ();
2090 lto_push_out_decl_state (decl_state);
2091 if (gimple_has_body_p (node->decl) || !flag_wpa)
2092 output_function (node);
2093 else
2094 copy_function (node);
2095 gcc_assert (lto_get_out_decl_state () == decl_state);
2096 lto_pop_out_decl_state ();
2097 lto_record_function_out_decl_state (node->decl, decl_state);
2098 }
2099 }
2100 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2101 {
2102 /* Wrap symbol references inside the ctor in a type
2103 preserving MEM_REF. */
2104 tree ctor = DECL_INITIAL (node->decl);
2105 if (ctor && !in_lto_p)
2106 walk_tree (&ctor, wrap_refs, NULL, NULL);
2107 }
2108 }
2109
2110 /* Emit the callgraph after emitting function bodies. This needs to
2111 be done now to make sure that all the statements in every function
2112 have been renumbered so that edges can be associated with call
2113 statements using the statement UIDs. */
2114 output_symtab ();
2115
2116 #ifdef ENABLE_CHECKING
2117 lto_bitmap_free (output);
2118 #endif
2119 }
2120
2121 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2122 from it and required for correct representation of its semantics.
2123 Each node in ENCODER must be a global declaration or a type. A node
2124 is written only once, even if it appears multiple times in the
2125 vector. Certain transitively-reachable nodes, such as those
2126 representing expressions, may be duplicated, but such nodes
2127 must not appear in ENCODER itself. */
2128
2129 static void
2130 write_global_stream (struct output_block *ob,
2131 struct lto_tree_ref_encoder *encoder)
2132 {
2133 tree t;
2134 size_t index;
2135 const size_t size = lto_tree_ref_encoder_size (encoder);
2136
2137 for (index = 0; index < size; index++)
2138 {
2139 t = lto_tree_ref_encoder_get_tree (encoder, index);
2140 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2141 stream_write_tree (ob, t, false);
2142 }
2143 }
2144
2145
2146 /* Write a sequence of indices into the globals vector corresponding
2147 to the trees in ENCODER. These are used by the reader to map the
2148 indices used to refer to global entities within function bodies to
2149 their referents. */
2150
2151 static void
2152 write_global_references (struct output_block *ob,
2153 struct lto_output_stream *ref_stream,
2154 struct lto_tree_ref_encoder *encoder)
2155 {
2156 tree t;
2157 uint32_t index;
2158 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2159
2160 /* Write size as 32-bit unsigned. */
2161 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2162
2163 for (index = 0; index < size; index++)
2164 {
2165 uint32_t slot_num;
2166
2167 t = lto_tree_ref_encoder_get_tree (encoder, index);
2168 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2169 gcc_assert (slot_num != (unsigned)-1);
2170 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2171 }
2172 }
2173
2174
2175 /* Write all the streams in an lto_out_decl_state STATE using
2176 output block OB and output stream OUT_STREAM. */
2177
2178 void
2179 lto_output_decl_state_streams (struct output_block *ob,
2180 struct lto_out_decl_state *state)
2181 {
2182 int i;
2183
2184 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2185 write_global_stream (ob, &state->streams[i]);
2186 }
2187
2188
2189 /* Write all the references in an lto_out_decl_state STATE using
2190 output block OB and output stream OUT_STREAM. */
2191
2192 void
2193 lto_output_decl_state_refs (struct output_block *ob,
2194 struct lto_output_stream *out_stream,
2195 struct lto_out_decl_state *state)
2196 {
2197 unsigned i;
2198 uint32_t ref;
2199 tree decl;
2200
2201 /* Write reference to FUNCTION_DECL. If there is not function,
2202 write reference to void_type_node. */
2203 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2204 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2205 gcc_assert (ref != (unsigned)-1);
2206 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2207
2208 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2209 write_global_references (ob, out_stream, &state->streams[i]);
2210 }
2211
2212
2213 /* Return the written size of STATE. */
2214
2215 static size_t
2216 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2217 {
2218 int i;
2219 size_t size;
2220
2221 size = sizeof (int32_t); /* fn_ref. */
2222 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2223 {
2224 size += sizeof (int32_t); /* vector size. */
2225 size += (lto_tree_ref_encoder_size (&state->streams[i])
2226 * sizeof (int32_t));
2227 }
2228 return size;
2229 }
2230
2231
2232 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2233 so far. */
2234
2235 static void
2236 write_symbol (struct streamer_tree_cache_d *cache,
2237 struct lto_output_stream *stream,
2238 tree t, struct pointer_set_t *seen, bool alias)
2239 {
2240 const char *name;
2241 enum gcc_plugin_symbol_kind kind;
2242 enum gcc_plugin_symbol_visibility visibility;
2243 unsigned slot_num;
2244 uint64_t size;
2245 const char *comdat;
2246 unsigned char c;
2247
2248 /* None of the following kinds of symbols are needed in the
2249 symbol table. */
2250 if (!TREE_PUBLIC (t)
2251 || is_builtin_fn (t)
2252 || DECL_ABSTRACT (t)
2253 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2254 return;
2255 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2256
2257 gcc_assert (TREE_CODE (t) == VAR_DECL
2258 || TREE_CODE (t) == FUNCTION_DECL);
2259
2260 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2261
2262 /* This behaves like assemble_name_raw in varasm.c, performing the
2263 same name manipulations that ASM_OUTPUT_LABELREF does. */
2264 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2265
2266 if (pointer_set_contains (seen, name))
2267 return;
2268 pointer_set_insert (seen, name);
2269
2270 streamer_tree_cache_lookup (cache, t, &slot_num);
2271 gcc_assert (slot_num != (unsigned)-1);
2272
2273 if (DECL_EXTERNAL (t))
2274 {
2275 if (DECL_WEAK (t))
2276 kind = GCCPK_WEAKUNDEF;
2277 else
2278 kind = GCCPK_UNDEF;
2279 }
2280 else
2281 {
2282 if (DECL_WEAK (t))
2283 kind = GCCPK_WEAKDEF;
2284 else if (DECL_COMMON (t))
2285 kind = GCCPK_COMMON;
2286 else
2287 kind = GCCPK_DEF;
2288
2289 /* When something is defined, it should have node attached. */
2290 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2291 || varpool_get_node (t)->definition);
2292 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2293 || (cgraph_get_node (t)
2294 && cgraph_get_node (t)->definition));
2295 }
2296
2297 /* Imitate what default_elf_asm_output_external do.
2298 When symbol is external, we need to output it with DEFAULT visibility
2299 when compiling with -fvisibility=default, while with HIDDEN visibility
2300 when symbol has attribute (visibility("hidden")) specified.
2301 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2302 right. */
2303
2304 if (DECL_EXTERNAL (t)
2305 && !targetm.binds_local_p (t))
2306 visibility = GCCPV_DEFAULT;
2307 else
2308 switch (DECL_VISIBILITY (t))
2309 {
2310 case VISIBILITY_DEFAULT:
2311 visibility = GCCPV_DEFAULT;
2312 break;
2313 case VISIBILITY_PROTECTED:
2314 visibility = GCCPV_PROTECTED;
2315 break;
2316 case VISIBILITY_HIDDEN:
2317 visibility = GCCPV_HIDDEN;
2318 break;
2319 case VISIBILITY_INTERNAL:
2320 visibility = GCCPV_INTERNAL;
2321 break;
2322 }
2323
2324 if (kind == GCCPK_COMMON
2325 && DECL_SIZE_UNIT (t)
2326 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2327 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2328 else
2329 size = 0;
2330
2331 if (DECL_ONE_ONLY (t))
2332 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2333 else
2334 comdat = "";
2335
2336 lto_output_data_stream (stream, name, strlen (name) + 1);
2337 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2338 c = (unsigned char) kind;
2339 lto_output_data_stream (stream, &c, 1);
2340 c = (unsigned char) visibility;
2341 lto_output_data_stream (stream, &c, 1);
2342 lto_output_data_stream (stream, &size, 8);
2343 lto_output_data_stream (stream, &slot_num, 4);
2344 }
2345
2346 /* Return true if NODE should appear in the plugin symbol table. */
2347
2348 bool
2349 output_symbol_p (symtab_node *node)
2350 {
2351 struct cgraph_node *cnode;
2352 if (!symtab_real_symbol_p (node))
2353 return false;
2354 /* We keep external functions in symtab for sake of inlining
2355 and devirtualization. We do not want to see them in symbol table as
2356 references unless they are really used. */
2357 cnode = dyn_cast <cgraph_node *> (node);
2358 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2359 && cnode->callers)
2360 return true;
2361
2362 /* Ignore all references from external vars initializers - they are not really
2363 part of the compilation unit until they are used by folding. Some symbols,
2364 like references to external construction vtables can not be referred to at all.
2365 We decide this at can_refer_decl_in_current_unit_p. */
2366 if (!node->definition || DECL_EXTERNAL (node->decl))
2367 {
2368 int i;
2369 struct ipa_ref *ref;
2370 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2371 i, ref); i++)
2372 {
2373 if (ref->use == IPA_REF_ALIAS)
2374 continue;
2375 if (is_a <cgraph_node *> (ref->referring))
2376 return true;
2377 if (!DECL_EXTERNAL (ref->referring->decl))
2378 return true;
2379 }
2380 return false;
2381 }
2382 return true;
2383 }
2384
2385
2386 /* Write an IL symbol table to OB.
2387 SET and VSET are cgraph/varpool node sets we are outputting. */
2388
2389 static void
2390 produce_symtab (struct output_block *ob)
2391 {
2392 struct streamer_tree_cache_d *cache = ob->writer_cache;
2393 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2394 struct pointer_set_t *seen;
2395 struct lto_output_stream stream;
2396 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2397 lto_symtab_encoder_iterator lsei;
2398
2399 lto_begin_section (section_name, false);
2400 free (section_name);
2401
2402 seen = pointer_set_create ();
2403 memset (&stream, 0, sizeof (stream));
2404
2405 /* Write the symbol table.
2406 First write everything defined and then all declarations.
2407 This is necessary to handle cases where we have duplicated symbols. */
2408 for (lsei = lsei_start (encoder);
2409 !lsei_end_p (lsei); lsei_next (&lsei))
2410 {
2411 symtab_node *node = lsei_node (lsei);
2412
2413 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2414 continue;
2415 write_symbol (cache, &stream, node->decl, seen, false);
2416 }
2417 for (lsei = lsei_start (encoder);
2418 !lsei_end_p (lsei); lsei_next (&lsei))
2419 {
2420 symtab_node *node = lsei_node (lsei);
2421
2422 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2423 continue;
2424 write_symbol (cache, &stream, node->decl, seen, false);
2425 }
2426
2427 lto_write_stream (&stream);
2428 pointer_set_destroy (seen);
2429
2430 lto_end_section ();
2431 }
2432
2433
2434 /* This pass is run after all of the functions are serialized and all
2435 of the IPA passes have written their serialized forms. This pass
2436 causes the vector of all of the global decls and types used from
2437 this file to be written in to a section that can then be read in to
2438 recover these on other side. */
2439
2440 void
2441 produce_asm_for_decls (void)
2442 {
2443 struct lto_out_decl_state *out_state;
2444 struct lto_out_decl_state *fn_out_state;
2445 struct lto_decl_header header;
2446 char *section_name;
2447 struct output_block *ob;
2448 struct lto_output_stream *header_stream, *decl_state_stream;
2449 unsigned idx, num_fns;
2450 size_t decl_state_size;
2451 int32_t num_decl_states;
2452
2453 ob = create_output_block (LTO_section_decls);
2454 ob->global = true;
2455
2456 memset (&header, 0, sizeof (struct lto_decl_header));
2457
2458 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2459 lto_begin_section (section_name, !flag_wpa);
2460 free (section_name);
2461
2462 /* Make string 0 be a NULL string. */
2463 streamer_write_char_stream (ob->string_stream, 0);
2464
2465 gcc_assert (!alias_pairs);
2466
2467 /* Get rid of the global decl state hash tables to save some memory. */
2468 out_state = lto_get_out_decl_state ();
2469 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2470 if (out_state->streams[i].tree_hash_table)
2471 {
2472 delete out_state->streams[i].tree_hash_table;
2473 out_state->streams[i].tree_hash_table = NULL;
2474 }
2475
2476 /* Write the global symbols. */
2477 lto_output_decl_state_streams (ob, out_state);
2478 num_fns = lto_function_decl_states.length ();
2479 for (idx = 0; idx < num_fns; idx++)
2480 {
2481 fn_out_state =
2482 lto_function_decl_states[idx];
2483 lto_output_decl_state_streams (ob, fn_out_state);
2484 }
2485
2486 header.lto_header.major_version = LTO_major_version;
2487 header.lto_header.minor_version = LTO_minor_version;
2488
2489 /* Currently not used. This field would allow us to preallocate
2490 the globals vector, so that it need not be resized as it is extended. */
2491 header.num_nodes = -1;
2492
2493 /* Compute the total size of all decl out states. */
2494 decl_state_size = sizeof (int32_t);
2495 decl_state_size += lto_out_decl_state_written_size (out_state);
2496 for (idx = 0; idx < num_fns; idx++)
2497 {
2498 fn_out_state =
2499 lto_function_decl_states[idx];
2500 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2501 }
2502 header.decl_state_size = decl_state_size;
2503
2504 header.main_size = ob->main_stream->total_size;
2505 header.string_size = ob->string_stream->total_size;
2506
2507 header_stream = XCNEW (struct lto_output_stream);
2508 lto_output_data_stream (header_stream, &header, sizeof header);
2509 lto_write_stream (header_stream);
2510 free (header_stream);
2511
2512 /* Write the main out-decl state, followed by out-decl states of
2513 functions. */
2514 decl_state_stream = XCNEW (struct lto_output_stream);
2515 num_decl_states = num_fns + 1;
2516 lto_output_data_stream (decl_state_stream, &num_decl_states,
2517 sizeof (num_decl_states));
2518 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2519 for (idx = 0; idx < num_fns; idx++)
2520 {
2521 fn_out_state =
2522 lto_function_decl_states[idx];
2523 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2524 }
2525 lto_write_stream (decl_state_stream);
2526 free (decl_state_stream);
2527
2528 lto_write_stream (ob->main_stream);
2529 lto_write_stream (ob->string_stream);
2530
2531 lto_end_section ();
2532
2533 /* Write the symbol table. It is used by linker to determine dependencies
2534 and thus we can skip it for WPA. */
2535 if (!flag_wpa)
2536 produce_symtab (ob);
2537
2538 /* Write command line opts. */
2539 lto_write_options ();
2540
2541 /* Deallocate memory and clean up. */
2542 for (idx = 0; idx < num_fns; idx++)
2543 {
2544 fn_out_state =
2545 lto_function_decl_states[idx];
2546 lto_delete_out_decl_state (fn_out_state);
2547 }
2548 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2549 lto_function_decl_states.release ();
2550 destroy_output_block (ob);
2551 }