class.c (check_methods, [...]): Guard VINDEX checks by FUNCTION_DECL check.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55 #include "builtins.h"
56
57
58 static void lto_write_tree (struct output_block*, tree, bool);
59
60 /* Clear the line info stored in DATA_IN. */
61
62 static void
63 clear_line_info (struct output_block *ob)
64 {
65 ob->current_file = NULL;
66 ob->current_line = 0;
67 ob->current_col = 0;
68 }
69
70
71 /* Create the output block and return it. SECTION_TYPE is
72 LTO_section_function_body or LTO_static_initializer. */
73
74 struct output_block *
75 create_output_block (enum lto_section_type section_type)
76 {
77 struct output_block *ob = XCNEW (struct output_block);
78
79 ob->section_type = section_type;
80 ob->decl_state = lto_get_out_decl_state ();
81 ob->main_stream = XCNEW (struct lto_output_stream);
82 ob->string_stream = XCNEW (struct lto_output_stream);
83 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
84
85 if (section_type == LTO_section_function_body)
86 ob->cfg_stream = XCNEW (struct lto_output_stream);
87
88 clear_line_info (ob);
89
90 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
91 gcc_obstack_init (&ob->obstack);
92
93 return ob;
94 }
95
96
97 /* Destroy the output block OB. */
98
99 void
100 destroy_output_block (struct output_block *ob)
101 {
102 enum lto_section_type section_type = ob->section_type;
103
104 delete ob->string_hash_table;
105 ob->string_hash_table = NULL;
106
107 free (ob->main_stream);
108 free (ob->string_stream);
109 if (section_type == LTO_section_function_body)
110 free (ob->cfg_stream);
111
112 streamer_tree_cache_delete (ob->writer_cache);
113 obstack_free (&ob->obstack, NULL);
114
115 free (ob);
116 }
117
118
119 /* Look up NODE in the type table and write the index for it to OB. */
120
121 static void
122 output_type_ref (struct output_block *ob, tree node)
123 {
124 streamer_write_record_start (ob, LTO_type_ref);
125 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
126 }
127
128
129 /* Return true if tree node T is written to various tables. For these
130 nodes, we sometimes want to write their phyiscal representation
131 (via lto_output_tree), and sometimes we need to emit an index
132 reference into a table (via lto_output_tree_ref). */
133
134 static bool
135 tree_is_indexable (tree t)
136 {
137 /* Parameters and return values of functions of variably modified types
138 must go to global stream, because they may be used in the type
139 definition. */
140 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
141 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
142 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
143 || TREE_CODE (t) == TYPE_DECL
144 || TREE_CODE (t) == CONST_DECL
145 || TREE_CODE (t) == NAMELIST_DECL)
146 && decl_function_context (t))
147 return false;
148 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
149 return false;
150 /* Variably modified types need to be streamed alongside function
151 bodies because they can refer to local entities. Together with
152 them we have to localize their members as well.
153 ??? In theory that includes non-FIELD_DECLs as well. */
154 else if (TYPE_P (t)
155 && variably_modified_type_p (t, NULL_TREE))
156 return false;
157 else if (TREE_CODE (t) == FIELD_DECL
158 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
159 return false;
160 else
161 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
162 }
163
164
165 /* Output info about new location into bitpack BP.
166 After outputting bitpack, lto_output_location_data has
167 to be done to output actual data. */
168
169 void
170 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
171 location_t loc)
172 {
173 expanded_location xloc;
174
175 loc = LOCATION_LOCUS (loc);
176 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
177 if (loc == UNKNOWN_LOCATION)
178 return;
179
180 xloc = expand_location (loc);
181
182 bp_pack_value (bp, ob->current_file != xloc.file, 1);
183 bp_pack_value (bp, ob->current_line != xloc.line, 1);
184 bp_pack_value (bp, ob->current_col != xloc.column, 1);
185
186 if (ob->current_file != xloc.file)
187 bp_pack_var_len_unsigned (bp,
188 streamer_string_index (ob, xloc.file,
189 strlen (xloc.file) + 1,
190 true));
191 ob->current_file = xloc.file;
192
193 if (ob->current_line != xloc.line)
194 bp_pack_var_len_unsigned (bp, xloc.line);
195 ob->current_line = xloc.line;
196
197 if (ob->current_col != xloc.column)
198 bp_pack_var_len_unsigned (bp, xloc.column);
199 ob->current_col = xloc.column;
200 }
201
202
203 /* If EXPR is an indexable tree node, output a reference to it to
204 output block OB. Otherwise, output the physical representation of
205 EXPR to OB. */
206
207 static void
208 lto_output_tree_ref (struct output_block *ob, tree expr)
209 {
210 enum tree_code code;
211
212 if (TYPE_P (expr))
213 {
214 output_type_ref (ob, expr);
215 return;
216 }
217
218 code = TREE_CODE (expr);
219 switch (code)
220 {
221 case SSA_NAME:
222 streamer_write_record_start (ob, LTO_ssa_name_ref);
223 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
224 break;
225
226 case FIELD_DECL:
227 streamer_write_record_start (ob, LTO_field_decl_ref);
228 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case FUNCTION_DECL:
232 streamer_write_record_start (ob, LTO_function_decl_ref);
233 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case VAR_DECL:
237 case DEBUG_EXPR_DECL:
238 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
239 case PARM_DECL:
240 streamer_write_record_start (ob, LTO_global_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case CONST_DECL:
245 streamer_write_record_start (ob, LTO_const_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case IMPORTED_DECL:
250 gcc_assert (decl_function_context (expr) == NULL);
251 streamer_write_record_start (ob, LTO_imported_decl_ref);
252 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
254
255 case TYPE_DECL:
256 streamer_write_record_start (ob, LTO_type_decl_ref);
257 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case NAMELIST_DECL:
261 streamer_write_record_start (ob, LTO_namelist_decl_ref);
262 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMESPACE_DECL:
266 streamer_write_record_start (ob, LTO_namespace_decl_ref);
267 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case LABEL_DECL:
271 streamer_write_record_start (ob, LTO_label_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case RESULT_DECL:
276 streamer_write_record_start (ob, LTO_result_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case TRANSLATION_UNIT_DECL:
281 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 default:
286 /* No other node is indexable, so it should have been handled by
287 lto_output_tree. */
288 gcc_unreachable ();
289 }
290 }
291
292
293 /* Return true if EXPR is a tree node that can be written to disk. */
294
295 static inline bool
296 lto_is_streamable (tree expr)
297 {
298 enum tree_code code = TREE_CODE (expr);
299
300 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
301 name version in lto_output_tree_ref (see output_ssa_names). */
302 return !is_lang_specific (expr)
303 && code != SSA_NAME
304 && code != CALL_EXPR
305 && code != LANG_TYPE
306 && code != MODIFY_EXPR
307 && code != INIT_EXPR
308 && code != TARGET_EXPR
309 && code != BIND_EXPR
310 && code != WITH_CLEANUP_EXPR
311 && code != STATEMENT_LIST
312 && (code == CASE_LABEL_EXPR
313 || code == DECL_EXPR
314 || TREE_CODE_CLASS (code) != tcc_statement);
315 }
316
317
318 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
319
320 static tree
321 get_symbol_initial_value (struct output_block *ob, tree expr)
322 {
323 gcc_checking_assert (DECL_P (expr)
324 && TREE_CODE (expr) != FUNCTION_DECL
325 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
326
327 /* Handle DECL_INITIAL for symbols. */
328 tree initial = DECL_INITIAL (expr);
329 if (TREE_CODE (expr) == VAR_DECL
330 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
331 && !DECL_IN_CONSTANT_POOL (expr)
332 && initial)
333 {
334 lto_symtab_encoder_t encoder;
335 varpool_node *vnode;
336
337 encoder = ob->decl_state->symtab_node_encoder;
338 vnode = varpool_get_node (expr);
339 if (!vnode
340 || !lto_symtab_encoder_encode_initializer_p (encoder,
341 vnode))
342 initial = error_mark_node;
343 }
344
345 return initial;
346 }
347
348
349 /* Write a physical representation of tree node EXPR to output block
350 OB. If REF_P is true, the leaves of EXPR are emitted as references
351 via lto_output_tree_ref. IX is the index into the streamer cache
352 where EXPR is stored. */
353
354 static void
355 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
356 {
357 /* Pack all the non-pointer fields in EXPR into a bitpack and write
358 the resulting bitpack. */
359 bitpack_d bp = bitpack_create (ob->main_stream);
360 streamer_pack_tree_bitfields (ob, &bp, expr);
361 streamer_write_bitpack (&bp);
362
363 /* Write all the pointer fields in EXPR. */
364 streamer_write_tree_body (ob, expr, ref_p);
365
366 /* Write any LTO-specific data to OB. */
367 if (DECL_P (expr)
368 && TREE_CODE (expr) != FUNCTION_DECL
369 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
370 {
371 /* Handle DECL_INITIAL for symbols. */
372 tree initial = get_symbol_initial_value (ob, expr);
373 stream_write_tree (ob, initial, ref_p);
374 }
375 }
376
377 /* Write a physical representation of tree node EXPR to output block
378 OB. If REF_P is true, the leaves of EXPR are emitted as references
379 via lto_output_tree_ref. IX is the index into the streamer cache
380 where EXPR is stored. */
381
382 static void
383 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
384 {
385 if (!lto_is_streamable (expr))
386 internal_error ("tree code %qs is not supported in LTO streams",
387 get_tree_code_name (TREE_CODE (expr)));
388
389 /* Write the header, containing everything needed to materialize
390 EXPR on the reading side. */
391 streamer_write_tree_header (ob, expr);
392
393 lto_write_tree_1 (ob, expr, ref_p);
394
395 /* Mark the end of EXPR. */
396 streamer_write_zero (ob);
397 }
398
399 /* Emit the physical representation of tree node EXPR to output block
400 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
401 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
402
403 static void
404 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
405 bool ref_p, bool this_ref_p)
406 {
407 unsigned ix;
408
409 gcc_checking_assert (expr != NULL_TREE
410 && !(this_ref_p && tree_is_indexable (expr)));
411
412 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
413 expr, hash, &ix);
414 gcc_assert (!exists_p);
415 if (streamer_handle_as_builtin_p (expr))
416 {
417 /* MD and NORMAL builtins do not need to be written out
418 completely as they are always instantiated by the
419 compiler on startup. The only builtins that need to
420 be written out are BUILT_IN_FRONTEND. For all other
421 builtins, we simply write the class and code. */
422 streamer_write_builtin (ob, expr);
423 }
424 else if (TREE_CODE (expr) == INTEGER_CST
425 && !TREE_OVERFLOW (expr))
426 {
427 /* Shared INTEGER_CST nodes are special because they need their
428 original type to be materialized by the reader (to implement
429 TYPE_CACHED_VALUES). */
430 streamer_write_integer_cst (ob, expr, ref_p);
431 }
432 else
433 {
434 /* This is the first time we see EXPR, write its fields
435 to OB. */
436 lto_write_tree (ob, expr, ref_p);
437 }
438 }
439
440 struct sccs
441 {
442 unsigned int dfsnum;
443 unsigned int low;
444 };
445
446 struct scc_entry
447 {
448 tree t;
449 hashval_t hash;
450 };
451
452 static unsigned int next_dfs_num;
453 static vec<scc_entry> sccstack;
454 static struct pointer_map_t *sccstate;
455 static struct obstack sccstate_obstack;
456
457 static void
458 DFS_write_tree (struct output_block *ob, sccs *from_state,
459 tree expr, bool ref_p, bool this_ref_p);
460
461 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
462 DFS recurse for all tree edges originating from it. */
463
464 static void
465 DFS_write_tree_body (struct output_block *ob,
466 tree expr, sccs *expr_state, bool ref_p)
467 {
468 #define DFS_follow_tree_edge(DEST) \
469 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
470
471 enum tree_code code;
472
473 code = TREE_CODE (expr);
474
475 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
476 {
477 if (TREE_CODE (expr) != IDENTIFIER_NODE)
478 DFS_follow_tree_edge (TREE_TYPE (expr));
479 }
480
481 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
482 {
483 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
484 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
485 }
486
487 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
488 {
489 DFS_follow_tree_edge (TREE_REALPART (expr));
490 DFS_follow_tree_edge (TREE_IMAGPART (expr));
491 }
492
493 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
494 {
495 /* Drop names that were created for anonymous entities. */
496 if (DECL_NAME (expr)
497 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
498 && ANON_AGGRNAME_P (DECL_NAME (expr)))
499 ;
500 else
501 DFS_follow_tree_edge (DECL_NAME (expr));
502 DFS_follow_tree_edge (DECL_CONTEXT (expr));
503 }
504
505 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
506 {
507 DFS_follow_tree_edge (DECL_SIZE (expr));
508 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
509
510 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
511 special handling in LTO, it must be handled by streamer hooks. */
512
513 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
514
515 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
516 for early inlining so drop it on the floor instead of ICEing in
517 dwarf2out.c. */
518
519 if ((TREE_CODE (expr) == VAR_DECL
520 || TREE_CODE (expr) == PARM_DECL)
521 && DECL_HAS_VALUE_EXPR_P (expr))
522 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
523 if (TREE_CODE (expr) == VAR_DECL)
524 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
525 }
526
527 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
528 {
529 if (TREE_CODE (expr) == TYPE_DECL)
530 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
531 }
532
533 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
534 {
535 /* Make sure we don't inadvertently set the assembler name. */
536 if (DECL_ASSEMBLER_NAME_SET_P (expr))
537 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
538 }
539
540 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
541 {
542 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
543 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
544 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
545 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
546 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
547 }
548
549 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
550 {
551 DFS_follow_tree_edge (DECL_VINDEX (expr));
552 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
553 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
554 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
555 }
556
557 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
558 {
559 DFS_follow_tree_edge (TYPE_SIZE (expr));
560 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
561 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
562 DFS_follow_tree_edge (TYPE_NAME (expr));
563 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
564 reconstructed during fixup. */
565 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
566 during fixup. */
567 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
568 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
569 /* TYPE_CANONICAL is re-computed during type merging, so no need
570 to follow it here. */
571 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
572 }
573
574 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
575 {
576 if (TREE_CODE (expr) == ENUMERAL_TYPE)
577 DFS_follow_tree_edge (TYPE_VALUES (expr));
578 else if (TREE_CODE (expr) == ARRAY_TYPE)
579 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
580 else if (RECORD_OR_UNION_TYPE_P (expr))
581 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
582 DFS_follow_tree_edge (t);
583 else if (TREE_CODE (expr) == FUNCTION_TYPE
584 || TREE_CODE (expr) == METHOD_TYPE)
585 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
586
587 if (!POINTER_TYPE_P (expr))
588 DFS_follow_tree_edge (TYPE_MINVAL (expr));
589 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
590 if (RECORD_OR_UNION_TYPE_P (expr))
591 DFS_follow_tree_edge (TYPE_BINFO (expr));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
595 {
596 DFS_follow_tree_edge (TREE_PURPOSE (expr));
597 DFS_follow_tree_edge (TREE_VALUE (expr));
598 DFS_follow_tree_edge (TREE_CHAIN (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
602 {
603 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
604 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
605 }
606
607 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
608 {
609 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
610 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
611 DFS_follow_tree_edge (TREE_BLOCK (expr));
612 }
613
614 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
615 {
616 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
617 /* ??? FIXME. See also streamer_write_chain. */
618 if (!(VAR_OR_FUNCTION_DECL_P (t)
619 && DECL_EXTERNAL (t)))
620 DFS_follow_tree_edge (t);
621
622 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
623
624 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
625 handle - those that represent inlined function scopes.
626 For the drop rest them on the floor instead of ICEing
627 in dwarf2out.c. */
628 if (inlined_function_outer_scope_p (expr))
629 {
630 tree ultimate_origin = block_ultimate_origin (expr);
631 DFS_follow_tree_edge (ultimate_origin);
632 }
633 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
634 information for early inlined BLOCKs so drop it on the floor instead
635 of ICEing in dwarf2out.c. */
636
637 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
638 streaming time. */
639
640 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
641 list is re-constructed from BLOCK_SUPERCONTEXT. */
642 }
643
644 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
645 {
646 unsigned i;
647 tree t;
648
649 /* Note that the number of BINFO slots has already been emitted in
650 EXPR's header (see streamer_write_tree_header) because this length
651 is needed to build the empty BINFO node on the reader side. */
652 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
653 DFS_follow_tree_edge (t);
654 DFS_follow_tree_edge (BINFO_OFFSET (expr));
655 DFS_follow_tree_edge (BINFO_VTABLE (expr));
656 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
657
658 /* The number of BINFO_BASE_ACCESSES has already been emitted in
659 EXPR's bitfield section. */
660 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
661 DFS_follow_tree_edge (t);
662
663 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
664 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
665 }
666
667 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
668 {
669 unsigned i;
670 tree index, value;
671
672 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
673 {
674 DFS_follow_tree_edge (index);
675 DFS_follow_tree_edge (value);
676 }
677 }
678
679 if (code == OMP_CLAUSE)
680 {
681 int i;
682 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
683 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
684 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
685 }
686
687 #undef DFS_follow_tree_edge
688 }
689
690 /* Return a hash value for the tree T. */
691
692 static hashval_t
693 hash_tree (struct streamer_tree_cache_d *cache, tree t)
694 {
695 #define visit(SIBLING) \
696 do { \
697 unsigned ix; \
698 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
699 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
700 } while (0)
701
702 /* Hash TS_BASE. */
703 enum tree_code code = TREE_CODE (t);
704 hashval_t v = iterative_hash_host_wide_int (code, 0);
705 if (!TYPE_P (t))
706 {
707 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
708 | (TREE_CONSTANT (t) << 1)
709 | (TREE_READONLY (t) << 2)
710 | (TREE_PUBLIC (t) << 3), v);
711 }
712 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
713 | (TREE_THIS_VOLATILE (t) << 1), v);
714 if (DECL_P (t))
715 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
716 else if (TYPE_P (t))
717 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
718 if (TYPE_P (t))
719 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
720 else
721 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
722 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
723 | (TREE_STATIC (t) << 1)
724 | (TREE_PROTECTED (t) << 2)
725 | (TREE_DEPRECATED (t) << 3), v);
726 if (code != TREE_BINFO)
727 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
728 if (TYPE_P (t))
729 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
730 | (TYPE_ADDR_SPACE (t) << 1), v);
731 else if (code == SSA_NAME)
732 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
733
734 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
735 {
736 int i;
737 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
738 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
739 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
740 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
741 }
742
743 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
744 {
745 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
746 v = iterative_hash_host_wide_int (r.cl, v);
747 v = iterative_hash_host_wide_int (r.decimal
748 | (r.sign << 1)
749 | (r.signalling << 2)
750 | (r.canonical << 3), v);
751 v = iterative_hash_host_wide_int (r.uexp, v);
752 for (unsigned i = 0; i < SIGSZ; ++i)
753 v = iterative_hash_host_wide_int (r.sig[i], v);
754 }
755
756 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
757 {
758 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
759 v = iterative_hash_host_wide_int (f.mode, v);
760 v = iterative_hash_host_wide_int (f.data.low, v);
761 v = iterative_hash_host_wide_int (f.data.high, v);
762 }
763
764 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
765 {
766 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
767 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
768 | (DECL_VIRTUAL_P (t) << 1)
769 | (DECL_IGNORED_P (t) << 2)
770 | (DECL_ABSTRACT (t) << 3)
771 | (DECL_ARTIFICIAL (t) << 4)
772 | (DECL_USER_ALIGN (t) << 5)
773 | (DECL_PRESERVE_P (t) << 6)
774 | (DECL_EXTERNAL (t) << 7)
775 | (DECL_GIMPLE_REG_P (t) << 8), v);
776 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
777 if (code == LABEL_DECL)
778 {
779 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
780 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
781 }
782 else if (code == FIELD_DECL)
783 {
784 v = iterative_hash_host_wide_int (DECL_PACKED (t)
785 | (DECL_NONADDRESSABLE_P (t) << 1),
786 v);
787 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
788 }
789 else if (code == VAR_DECL)
790 {
791 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
792 | (DECL_NONLOCAL_FRAME (t) << 1),
793 v);
794 }
795 if (code == RESULT_DECL
796 || code == PARM_DECL
797 || code == VAR_DECL)
798 {
799 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
800 if (code == VAR_DECL
801 || code == PARM_DECL)
802 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
803 }
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
807 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
808
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
810 {
811 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
812 | (DECL_DLLIMPORT_P (t) << 1)
813 | (DECL_WEAK (t) << 2)
814 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
815 | (DECL_COMDAT (t) << 4)
816 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
817 v);
818 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
819 if (code == VAR_DECL)
820 {
821 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
822 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
823 | (DECL_IN_CONSTANT_POOL (t) << 1),
824 v);
825 }
826 if (TREE_CODE (t) == FUNCTION_DECL)
827 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
828 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
829 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
830 v);
831 }
832
833 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
834 {
835 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
836 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
837 | (DECL_STATIC_DESTRUCTOR (t) << 1)
838 | (DECL_UNINLINABLE (t) << 2)
839 | (DECL_POSSIBLY_INLINED (t) << 3)
840 | (DECL_IS_NOVOPS (t) << 4)
841 | (DECL_IS_RETURNS_TWICE (t) << 5)
842 | (DECL_IS_MALLOC (t) << 6)
843 | (DECL_IS_OPERATOR_NEW (t) << 7)
844 | (DECL_DECLARED_INLINE_P (t) << 8)
845 | (DECL_STATIC_CHAIN (t) << 9)
846 | (DECL_NO_INLINE_WARNING_P (t) << 10)
847 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
848 | (DECL_NO_LIMIT_STACK (t) << 12)
849 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
850 | (DECL_PURE_P (t) << 14)
851 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
852 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
853 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
857 {
858 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
859 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
860 | (TYPE_NO_FORCE_BLK (t) << 1)
861 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
862 | (TYPE_PACKED (t) << 3)
863 | (TYPE_RESTRICT (t) << 4)
864 | (TYPE_USER_ALIGN (t) << 5)
865 | (TYPE_READONLY (t) << 6), v);
866 if (RECORD_OR_UNION_TYPE_P (t))
867 {
868 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
869 | (TYPE_FINAL_P (t) << 1), v);
870 }
871 else if (code == ARRAY_TYPE)
872 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
873 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
874 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
875 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
876 || (!in_lto_p
877 && get_alias_set (t) == 0))
878 ? 0 : -1, v);
879 }
880
881 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
882 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
883 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
884
885 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
886 gcc_unreachable ();
887
888 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
889 v = iterative_hash (t, sizeof (struct cl_optimization), v);
890
891 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
892 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
893
894 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
895 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
896
897 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
898 {
899 if (POINTER_TYPE_P (t))
900 {
901 /* For pointers factor in the pointed-to type recursively as
902 we cannot recurse through only pointers.
903 ??? We can generalize this by keeping track of the
904 in-SCC edges for each tree (or arbitrarily the first
905 such edge) and hashing that in in a second stage
906 (instead of the quadratic mixing of the SCC we do now). */
907 hashval_t x;
908 unsigned ix;
909 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
910 x = streamer_tree_cache_get_hash (cache, ix);
911 else
912 x = hash_tree (cache, TREE_TYPE (t));
913 v = iterative_hash_hashval_t (x, v);
914 }
915 else if (code != IDENTIFIER_NODE)
916 visit (TREE_TYPE (t));
917 }
918
919 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
920 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
921 visit (VECTOR_CST_ELT (t, i));
922
923 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
924 {
925 visit (TREE_REALPART (t));
926 visit (TREE_IMAGPART (t));
927 }
928
929 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
930 {
931 /* Drop names that were created for anonymous entities. */
932 if (DECL_NAME (t)
933 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
934 && ANON_AGGRNAME_P (DECL_NAME (t)))
935 ;
936 else
937 visit (DECL_NAME (t));
938 if (DECL_FILE_SCOPE_P (t))
939 ;
940 else
941 visit (DECL_CONTEXT (t));
942 }
943
944 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
945 {
946 visit (DECL_SIZE (t));
947 visit (DECL_SIZE_UNIT (t));
948 visit (DECL_ATTRIBUTES (t));
949 if ((code == VAR_DECL
950 || code == PARM_DECL)
951 && DECL_HAS_VALUE_EXPR_P (t))
952 visit (DECL_VALUE_EXPR (t));
953 if (code == VAR_DECL
954 && DECL_HAS_DEBUG_EXPR_P (t))
955 visit (DECL_DEBUG_EXPR (t));
956 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
957 be able to call get_symbol_initial_value. */
958 }
959
960 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
961 {
962 if (code == TYPE_DECL)
963 visit (DECL_ORIGINAL_TYPE (t));
964 }
965
966 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
967 {
968 if (DECL_ASSEMBLER_NAME_SET_P (t))
969 visit (DECL_ASSEMBLER_NAME (t));
970 }
971
972 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
973 {
974 visit (DECL_FIELD_OFFSET (t));
975 visit (DECL_BIT_FIELD_TYPE (t));
976 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
977 visit (DECL_FIELD_BIT_OFFSET (t));
978 visit (DECL_FCONTEXT (t));
979 }
980
981 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
982 {
983 visit (DECL_VINDEX (t));
984 visit (DECL_FUNCTION_PERSONALITY (t));
985 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
986 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
987 }
988
989 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
990 {
991 visit (TYPE_SIZE (t));
992 visit (TYPE_SIZE_UNIT (t));
993 visit (TYPE_ATTRIBUTES (t));
994 visit (TYPE_NAME (t));
995 visit (TYPE_MAIN_VARIANT (t));
996 if (TYPE_FILE_SCOPE_P (t))
997 ;
998 else
999 visit (TYPE_CONTEXT (t));
1000 visit (TYPE_STUB_DECL (t));
1001 }
1002
1003 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1004 {
1005 if (code == ENUMERAL_TYPE)
1006 visit (TYPE_VALUES (t));
1007 else if (code == ARRAY_TYPE)
1008 visit (TYPE_DOMAIN (t));
1009 else if (RECORD_OR_UNION_TYPE_P (t))
1010 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1011 visit (f);
1012 else if (code == FUNCTION_TYPE
1013 || code == METHOD_TYPE)
1014 visit (TYPE_ARG_TYPES (t));
1015 if (!POINTER_TYPE_P (t))
1016 visit (TYPE_MINVAL (t));
1017 visit (TYPE_MAXVAL (t));
1018 if (RECORD_OR_UNION_TYPE_P (t))
1019 visit (TYPE_BINFO (t));
1020 }
1021
1022 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1023 {
1024 visit (TREE_PURPOSE (t));
1025 visit (TREE_VALUE (t));
1026 visit (TREE_CHAIN (t));
1027 }
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1030 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1031 visit (TREE_VEC_ELT (t, i));
1032
1033 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1034 {
1035 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1036 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1037 visit (TREE_OPERAND (t, i));
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1041 {
1042 unsigned i;
1043 tree b;
1044 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1045 visit (b);
1046 visit (BINFO_OFFSET (t));
1047 visit (BINFO_VTABLE (t));
1048 visit (BINFO_VPTR_FIELD (t));
1049 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1050 visit (b);
1051 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1052 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1053 }
1054
1055 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1056 {
1057 unsigned i;
1058 tree index, value;
1059 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1060 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1061 {
1062 visit (index);
1063 visit (value);
1064 }
1065 }
1066
1067 if (code == OMP_CLAUSE)
1068 {
1069 int i;
1070
1071 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1072 switch (OMP_CLAUSE_CODE (t))
1073 {
1074 case OMP_CLAUSE_DEFAULT:
1075 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1076 break;
1077 case OMP_CLAUSE_SCHEDULE:
1078 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1079 break;
1080 case OMP_CLAUSE_DEPEND:
1081 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1082 break;
1083 case OMP_CLAUSE_MAP:
1084 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1085 break;
1086 case OMP_CLAUSE_PROC_BIND:
1087 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1088 break;
1089 case OMP_CLAUSE_REDUCTION:
1090 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1091 break;
1092 default:
1093 break;
1094 }
1095 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1096 visit (OMP_CLAUSE_OPERAND (t, i));
1097 visit (OMP_CLAUSE_CHAIN (t));
1098 }
1099
1100 return v;
1101
1102 #undef visit
1103 }
1104
1105 /* Compare two SCC entries by their hash value for qsorting them. */
1106
1107 static int
1108 scc_entry_compare (const void *p1_, const void *p2_)
1109 {
1110 const scc_entry *p1 = (const scc_entry *) p1_;
1111 const scc_entry *p2 = (const scc_entry *) p2_;
1112 if (p1->hash < p2->hash)
1113 return -1;
1114 else if (p1->hash > p2->hash)
1115 return 1;
1116 return 0;
1117 }
1118
1119 /* Return a hash value for the SCC on the SCC stack from FIRST with
1120 size SIZE. */
1121
1122 static hashval_t
1123 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1124 {
1125 /* Compute hash values for the SCC members. */
1126 for (unsigned i = 0; i < size; ++i)
1127 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1128
1129 if (size == 1)
1130 return sccstack[first].hash;
1131
1132 /* Sort the SCC of type, hash pairs so that when we mix in
1133 all members of the SCC the hash value becomes independent on
1134 the order we visited the SCC. Disregard hashes equal to
1135 the hash of the tree we mix into because we cannot guarantee
1136 a stable sort for those across different TUs. */
1137 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1138 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1139 for (unsigned i = 0; i < size; ++i)
1140 {
1141 hashval_t hash = sccstack[first+i].hash;
1142 hashval_t orig_hash = hash;
1143 unsigned j;
1144 /* Skip same hashes. */
1145 for (j = i + 1;
1146 j < size && sccstack[first+j].hash == orig_hash; ++j)
1147 ;
1148 for (; j < size; ++j)
1149 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1150 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1151 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1152 tem[i] = hash;
1153 }
1154 hashval_t scc_hash = 0;
1155 for (unsigned i = 0; i < size; ++i)
1156 {
1157 sccstack[first+i].hash = tem[i];
1158 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1159 }
1160 return scc_hash;
1161 }
1162
1163 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1164 already in the streamer cache. Main routine called for
1165 each visit of EXPR. */
1166
1167 static void
1168 DFS_write_tree (struct output_block *ob, sccs *from_state,
1169 tree expr, bool ref_p, bool this_ref_p)
1170 {
1171 unsigned ix;
1172 sccs **slot;
1173
1174 /* Handle special cases. */
1175 if (expr == NULL_TREE)
1176 return;
1177
1178 /* Do not DFS walk into indexable trees. */
1179 if (this_ref_p && tree_is_indexable (expr))
1180 return;
1181
1182 /* Check if we already streamed EXPR. */
1183 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1184 return;
1185
1186 slot = (sccs **)pointer_map_insert (sccstate, expr);
1187 sccs *cstate = *slot;
1188 if (!cstate)
1189 {
1190 scc_entry e = { expr, 0 };
1191 /* Not yet visited. DFS recurse and push it onto the stack. */
1192 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1193 sccstack.safe_push (e);
1194 cstate->dfsnum = next_dfs_num++;
1195 cstate->low = cstate->dfsnum;
1196
1197 if (streamer_handle_as_builtin_p (expr))
1198 ;
1199 else if (TREE_CODE (expr) == INTEGER_CST
1200 && !TREE_OVERFLOW (expr))
1201 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1202 else
1203 {
1204 DFS_write_tree_body (ob, expr, cstate, ref_p);
1205
1206 /* Walk any LTO-specific edges. */
1207 if (DECL_P (expr)
1208 && TREE_CODE (expr) != FUNCTION_DECL
1209 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1210 {
1211 /* Handle DECL_INITIAL for symbols. */
1212 tree initial = get_symbol_initial_value (ob, expr);
1213 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1214 }
1215 }
1216
1217 /* See if we found an SCC. */
1218 if (cstate->low == cstate->dfsnum)
1219 {
1220 unsigned first, size;
1221 tree x;
1222
1223 /* Pop the SCC and compute its size. */
1224 first = sccstack.length ();
1225 do
1226 {
1227 x = sccstack[--first].t;
1228 }
1229 while (x != expr);
1230 size = sccstack.length () - first;
1231
1232 /* No need to compute hashes for LTRANS units, we don't perform
1233 any merging there. */
1234 hashval_t scc_hash = 0;
1235 unsigned scc_entry_len = 0;
1236 if (!flag_wpa)
1237 {
1238 scc_hash = hash_scc (ob->writer_cache, first, size);
1239
1240 /* Put the entries with the least number of collisions first. */
1241 unsigned entry_start = 0;
1242 scc_entry_len = size + 1;
1243 for (unsigned i = 0; i < size;)
1244 {
1245 unsigned from = i;
1246 for (i = i + 1; i < size
1247 && (sccstack[first + i].hash
1248 == sccstack[first + from].hash); ++i)
1249 ;
1250 if (i - from < scc_entry_len)
1251 {
1252 scc_entry_len = i - from;
1253 entry_start = from;
1254 }
1255 }
1256 for (unsigned i = 0; i < scc_entry_len; ++i)
1257 {
1258 scc_entry tem = sccstack[first + i];
1259 sccstack[first + i] = sccstack[first + entry_start + i];
1260 sccstack[first + entry_start + i] = tem;
1261 }
1262 }
1263
1264 /* Write LTO_tree_scc. */
1265 streamer_write_record_start (ob, LTO_tree_scc);
1266 streamer_write_uhwi (ob, size);
1267 streamer_write_uhwi (ob, scc_hash);
1268
1269 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1270 All INTEGER_CSTs need to be handled this way as we need
1271 their type to materialize them. Also builtins are handled
1272 this way.
1273 ??? We still wrap these in LTO_tree_scc so at the
1274 input side we can properly identify the tree we want
1275 to ultimatively return. */
1276 if (size == 1)
1277 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1278 else
1279 {
1280 /* Write the size of the SCC entry candidates. */
1281 streamer_write_uhwi (ob, scc_entry_len);
1282
1283 /* Write all headers and populate the streamer cache. */
1284 for (unsigned i = 0; i < size; ++i)
1285 {
1286 hashval_t hash = sccstack[first+i].hash;
1287 tree t = sccstack[first+i].t;
1288 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1289 t, hash, &ix);
1290 gcc_assert (!exists_p);
1291
1292 if (!lto_is_streamable (t))
1293 internal_error ("tree code %qs is not supported "
1294 "in LTO streams",
1295 get_tree_code_name (TREE_CODE (t)));
1296
1297 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1298
1299 /* Write the header, containing everything needed to
1300 materialize EXPR on the reading side. */
1301 streamer_write_tree_header (ob, t);
1302 }
1303
1304 /* Write the bitpacks and tree references. */
1305 for (unsigned i = 0; i < size; ++i)
1306 {
1307 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1308
1309 /* Mark the end of the tree. */
1310 streamer_write_zero (ob);
1311 }
1312 }
1313
1314 /* Finally truncate the vector. */
1315 sccstack.truncate (first);
1316
1317 if (from_state)
1318 from_state->low = MIN (from_state->low, cstate->low);
1319 return;
1320 }
1321
1322 if (from_state)
1323 from_state->low = MIN (from_state->low, cstate->low);
1324 }
1325 gcc_checking_assert (from_state);
1326 if (cstate->dfsnum < from_state->dfsnum)
1327 from_state->low = MIN (cstate->dfsnum, from_state->low);
1328 }
1329
1330
1331 /* Emit the physical representation of tree node EXPR to output block
1332 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1333 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1334
1335 void
1336 lto_output_tree (struct output_block *ob, tree expr,
1337 bool ref_p, bool this_ref_p)
1338 {
1339 unsigned ix;
1340 bool existed_p;
1341
1342 if (expr == NULL_TREE)
1343 {
1344 streamer_write_record_start (ob, LTO_null);
1345 return;
1346 }
1347
1348 if (this_ref_p && tree_is_indexable (expr))
1349 {
1350 lto_output_tree_ref (ob, expr);
1351 return;
1352 }
1353
1354 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1355 if (existed_p)
1356 {
1357 /* If a node has already been streamed out, make sure that
1358 we don't write it more than once. Otherwise, the reader
1359 will instantiate two different nodes for the same object. */
1360 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1361 streamer_write_uhwi (ob, ix);
1362 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1363 lto_tree_code_to_tag (TREE_CODE (expr)));
1364 lto_stats.num_pickle_refs_output++;
1365 }
1366 else
1367 {
1368 /* This is the first time we see EXPR, write all reachable
1369 trees to OB. */
1370 static bool in_dfs_walk;
1371
1372 /* Protect against recursion which means disconnect between
1373 what tree edges we walk in the DFS walk and what edges
1374 we stream out. */
1375 gcc_assert (!in_dfs_walk);
1376
1377 /* Start the DFS walk. */
1378 /* Save ob state ... */
1379 /* let's see ... */
1380 in_dfs_walk = true;
1381 sccstate = pointer_map_create ();
1382 gcc_obstack_init (&sccstate_obstack);
1383 next_dfs_num = 1;
1384 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1385 sccstack.release ();
1386 pointer_map_destroy (sccstate);
1387 obstack_free (&sccstate_obstack, NULL);
1388 in_dfs_walk = false;
1389
1390 /* Finally append a reference to the tree we were writing.
1391 ??? If expr ended up as a singleton we could have
1392 inlined it here and avoid outputting a reference. */
1393 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1394 gcc_assert (existed_p);
1395 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1396 streamer_write_uhwi (ob, ix);
1397 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1398 lto_tree_code_to_tag (TREE_CODE (expr)));
1399 lto_stats.num_pickle_refs_output++;
1400 }
1401 }
1402
1403
1404 /* Output to OB a list of try/catch handlers starting with FIRST. */
1405
1406 static void
1407 output_eh_try_list (struct output_block *ob, eh_catch first)
1408 {
1409 eh_catch n;
1410
1411 for (n = first; n; n = n->next_catch)
1412 {
1413 streamer_write_record_start (ob, LTO_eh_catch);
1414 stream_write_tree (ob, n->type_list, true);
1415 stream_write_tree (ob, n->filter_list, true);
1416 stream_write_tree (ob, n->label, true);
1417 }
1418
1419 streamer_write_record_start (ob, LTO_null);
1420 }
1421
1422
1423 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1424 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1425 detect EH region sharing. */
1426
1427 static void
1428 output_eh_region (struct output_block *ob, eh_region r)
1429 {
1430 enum LTO_tags tag;
1431
1432 if (r == NULL)
1433 {
1434 streamer_write_record_start (ob, LTO_null);
1435 return;
1436 }
1437
1438 if (r->type == ERT_CLEANUP)
1439 tag = LTO_ert_cleanup;
1440 else if (r->type == ERT_TRY)
1441 tag = LTO_ert_try;
1442 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1443 tag = LTO_ert_allowed_exceptions;
1444 else if (r->type == ERT_MUST_NOT_THROW)
1445 tag = LTO_ert_must_not_throw;
1446 else
1447 gcc_unreachable ();
1448
1449 streamer_write_record_start (ob, tag);
1450 streamer_write_hwi (ob, r->index);
1451
1452 if (r->outer)
1453 streamer_write_hwi (ob, r->outer->index);
1454 else
1455 streamer_write_zero (ob);
1456
1457 if (r->inner)
1458 streamer_write_hwi (ob, r->inner->index);
1459 else
1460 streamer_write_zero (ob);
1461
1462 if (r->next_peer)
1463 streamer_write_hwi (ob, r->next_peer->index);
1464 else
1465 streamer_write_zero (ob);
1466
1467 if (r->type == ERT_TRY)
1468 {
1469 output_eh_try_list (ob, r->u.eh_try.first_catch);
1470 }
1471 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1472 {
1473 stream_write_tree (ob, r->u.allowed.type_list, true);
1474 stream_write_tree (ob, r->u.allowed.label, true);
1475 streamer_write_uhwi (ob, r->u.allowed.filter);
1476 }
1477 else if (r->type == ERT_MUST_NOT_THROW)
1478 {
1479 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1480 bitpack_d bp = bitpack_create (ob->main_stream);
1481 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1482 streamer_write_bitpack (&bp);
1483 }
1484
1485 if (r->landing_pads)
1486 streamer_write_hwi (ob, r->landing_pads->index);
1487 else
1488 streamer_write_zero (ob);
1489 }
1490
1491
1492 /* Output landing pad LP to OB. */
1493
1494 static void
1495 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1496 {
1497 if (lp == NULL)
1498 {
1499 streamer_write_record_start (ob, LTO_null);
1500 return;
1501 }
1502
1503 streamer_write_record_start (ob, LTO_eh_landing_pad);
1504 streamer_write_hwi (ob, lp->index);
1505 if (lp->next_lp)
1506 streamer_write_hwi (ob, lp->next_lp->index);
1507 else
1508 streamer_write_zero (ob);
1509
1510 if (lp->region)
1511 streamer_write_hwi (ob, lp->region->index);
1512 else
1513 streamer_write_zero (ob);
1514
1515 stream_write_tree (ob, lp->post_landing_pad, true);
1516 }
1517
1518
1519 /* Output the existing eh_table to OB. */
1520
1521 static void
1522 output_eh_regions (struct output_block *ob, struct function *fn)
1523 {
1524 if (fn->eh && fn->eh->region_tree)
1525 {
1526 unsigned i;
1527 eh_region eh;
1528 eh_landing_pad lp;
1529 tree ttype;
1530
1531 streamer_write_record_start (ob, LTO_eh_table);
1532
1533 /* Emit the index of the root of the EH region tree. */
1534 streamer_write_hwi (ob, fn->eh->region_tree->index);
1535
1536 /* Emit all the EH regions in the region array. */
1537 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1538 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1539 output_eh_region (ob, eh);
1540
1541 /* Emit all landing pads. */
1542 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1543 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1544 output_eh_lp (ob, lp);
1545
1546 /* Emit all the runtime type data. */
1547 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1548 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1549 stream_write_tree (ob, ttype, true);
1550
1551 /* Emit the table of action chains. */
1552 if (targetm.arm_eabi_unwinder)
1553 {
1554 tree t;
1555 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1556 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1557 stream_write_tree (ob, t, true);
1558 }
1559 else
1560 {
1561 uchar c;
1562 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1563 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1564 streamer_write_char_stream (ob->main_stream, c);
1565 }
1566 }
1567
1568 /* The LTO_null either terminates the record or indicates that there
1569 are no eh_records at all. */
1570 streamer_write_record_start (ob, LTO_null);
1571 }
1572
1573
1574 /* Output all of the active ssa names to the ssa_names stream. */
1575
1576 static void
1577 output_ssa_names (struct output_block *ob, struct function *fn)
1578 {
1579 unsigned int i, len;
1580
1581 len = vec_safe_length (SSANAMES (fn));
1582 streamer_write_uhwi (ob, len);
1583
1584 for (i = 1; i < len; i++)
1585 {
1586 tree ptr = (*SSANAMES (fn))[i];
1587
1588 if (ptr == NULL_TREE
1589 || SSA_NAME_IN_FREE_LIST (ptr)
1590 || virtual_operand_p (ptr))
1591 continue;
1592
1593 streamer_write_uhwi (ob, i);
1594 streamer_write_char_stream (ob->main_stream,
1595 SSA_NAME_IS_DEFAULT_DEF (ptr));
1596 if (SSA_NAME_VAR (ptr))
1597 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1598 else
1599 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1600 stream_write_tree (ob, TREE_TYPE (ptr), true);
1601 }
1602
1603 streamer_write_zero (ob);
1604 }
1605
1606
1607 /* Output a wide-int. */
1608
1609 static void
1610 streamer_write_wi (struct output_block *ob,
1611 const widest_int &w)
1612 {
1613 int len = w.get_len ();
1614
1615 streamer_write_uhwi (ob, w.get_precision ());
1616 streamer_write_uhwi (ob, len);
1617 for (int i = 0; i < len; i++)
1618 streamer_write_hwi (ob, w.elt (i));
1619 }
1620
1621
1622 /* Output the cfg. */
1623
1624 static void
1625 output_cfg (struct output_block *ob, struct function *fn)
1626 {
1627 struct lto_output_stream *tmp_stream = ob->main_stream;
1628 basic_block bb;
1629
1630 ob->main_stream = ob->cfg_stream;
1631
1632 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1633 profile_status_for_fn (fn));
1634
1635 /* Output the number of the highest basic block. */
1636 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1637
1638 FOR_ALL_BB_FN (bb, fn)
1639 {
1640 edge_iterator ei;
1641 edge e;
1642
1643 streamer_write_hwi (ob, bb->index);
1644
1645 /* Output the successors and the edge flags. */
1646 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1647 FOR_EACH_EDGE (e, ei, bb->succs)
1648 {
1649 streamer_write_uhwi (ob, e->dest->index);
1650 streamer_write_hwi (ob, e->probability);
1651 streamer_write_gcov_count (ob, e->count);
1652 streamer_write_uhwi (ob, e->flags);
1653 }
1654 }
1655
1656 streamer_write_hwi (ob, -1);
1657
1658 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1659 while (bb->next_bb)
1660 {
1661 streamer_write_hwi (ob, bb->next_bb->index);
1662 bb = bb->next_bb;
1663 }
1664
1665 streamer_write_hwi (ob, -1);
1666
1667 /* ??? The cfgloop interface is tied to cfun. */
1668 gcc_assert (cfun == fn);
1669
1670 /* Output the number of loops. */
1671 streamer_write_uhwi (ob, number_of_loops (fn));
1672
1673 /* Output each loop, skipping the tree root which has number zero. */
1674 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1675 {
1676 struct loop *loop = get_loop (fn, i);
1677
1678 /* Write the index of the loop header. That's enough to rebuild
1679 the loop tree on the reader side. Stream -1 for an unused
1680 loop entry. */
1681 if (!loop)
1682 {
1683 streamer_write_hwi (ob, -1);
1684 continue;
1685 }
1686 else
1687 streamer_write_hwi (ob, loop->header->index);
1688
1689 /* Write everything copy_loop_info copies. */
1690 streamer_write_enum (ob->main_stream,
1691 loop_estimation, EST_LAST, loop->estimate_state);
1692 streamer_write_hwi (ob, loop->any_upper_bound);
1693 if (loop->any_upper_bound)
1694 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1695 streamer_write_hwi (ob, loop->any_estimate);
1696 if (loop->any_estimate)
1697 streamer_write_wi (ob, loop->nb_iterations_estimate);
1698
1699 /* Write OMP SIMD related info. */
1700 streamer_write_hwi (ob, loop->safelen);
1701 streamer_write_hwi (ob, loop->dont_vectorize);
1702 streamer_write_hwi (ob, loop->force_vectorize);
1703 stream_write_tree (ob, loop->simduid, true);
1704 }
1705
1706 ob->main_stream = tmp_stream;
1707 }
1708
1709
1710 /* Create the header in the file using OB. If the section type is for
1711 a function, set FN to the decl for that function. */
1712
1713 void
1714 produce_asm (struct output_block *ob, tree fn)
1715 {
1716 enum lto_section_type section_type = ob->section_type;
1717 struct lto_function_header header;
1718 char *section_name;
1719 struct lto_output_stream *header_stream;
1720
1721 if (section_type == LTO_section_function_body)
1722 {
1723 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1724 section_name = lto_get_section_name (section_type, name, NULL);
1725 }
1726 else
1727 section_name = lto_get_section_name (section_type, NULL, NULL);
1728
1729 lto_begin_section (section_name, !flag_wpa);
1730 free (section_name);
1731
1732 /* The entire header is stream computed here. */
1733 memset (&header, 0, sizeof (struct lto_function_header));
1734
1735 /* Write the header. */
1736 header.lto_header.major_version = LTO_major_version;
1737 header.lto_header.minor_version = LTO_minor_version;
1738
1739 header.compressed_size = 0;
1740
1741 if (section_type == LTO_section_function_body)
1742 header.cfg_size = ob->cfg_stream->total_size;
1743 header.main_size = ob->main_stream->total_size;
1744 header.string_size = ob->string_stream->total_size;
1745
1746 header_stream = XCNEW (struct lto_output_stream);
1747 lto_output_data_stream (header_stream, &header, sizeof header);
1748 lto_write_stream (header_stream);
1749 free (header_stream);
1750
1751 /* Put all of the gimple and the string table out the asm file as a
1752 block of text. */
1753 if (section_type == LTO_section_function_body)
1754 lto_write_stream (ob->cfg_stream);
1755 lto_write_stream (ob->main_stream);
1756 lto_write_stream (ob->string_stream);
1757
1758 lto_end_section ();
1759 }
1760
1761
1762 /* Output the base body of struct function FN using output block OB. */
1763
1764 static void
1765 output_struct_function_base (struct output_block *ob, struct function *fn)
1766 {
1767 struct bitpack_d bp;
1768 unsigned i;
1769 tree t;
1770
1771 /* Output the static chain and non-local goto save area. */
1772 stream_write_tree (ob, fn->static_chain_decl, true);
1773 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1774
1775 /* Output all the local variables in the function. */
1776 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1777 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1778 stream_write_tree (ob, t, true);
1779
1780 /* Output current IL state of the function. */
1781 streamer_write_uhwi (ob, fn->curr_properties);
1782
1783 /* Write all the attributes for FN. */
1784 bp = bitpack_create (ob->main_stream);
1785 bp_pack_value (&bp, fn->is_thunk, 1);
1786 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1787 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1788 bp_pack_value (&bp, fn->returns_struct, 1);
1789 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1790 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1791 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1792 bp_pack_value (&bp, fn->after_inlining, 1);
1793 bp_pack_value (&bp, fn->stdarg, 1);
1794 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1795 bp_pack_value (&bp, fn->calls_alloca, 1);
1796 bp_pack_value (&bp, fn->calls_setjmp, 1);
1797 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1798 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1799 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1800 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1801
1802 /* Output the function start and end loci. */
1803 stream_output_location (ob, &bp, fn->function_start_locus);
1804 stream_output_location (ob, &bp, fn->function_end_locus);
1805
1806 streamer_write_bitpack (&bp);
1807 }
1808
1809
1810 /* Output the body of function NODE->DECL. */
1811
1812 static void
1813 output_function (struct cgraph_node *node)
1814 {
1815 tree function;
1816 struct function *fn;
1817 basic_block bb;
1818 struct output_block *ob;
1819
1820 function = node->decl;
1821 fn = DECL_STRUCT_FUNCTION (function);
1822 ob = create_output_block (LTO_section_function_body);
1823
1824 clear_line_info (ob);
1825 ob->cgraph_node = node;
1826
1827 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1828
1829 /* Set current_function_decl and cfun. */
1830 push_cfun (fn);
1831
1832 /* Make string 0 be a NULL string. */
1833 streamer_write_char_stream (ob->string_stream, 0);
1834
1835 streamer_write_record_start (ob, LTO_function);
1836
1837 /* Output decls for parameters and args. */
1838 stream_write_tree (ob, DECL_RESULT (function), true);
1839 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1840
1841 /* Output DECL_INITIAL for the function, which contains the tree of
1842 lexical scopes. */
1843 stream_write_tree (ob, DECL_INITIAL (function), true);
1844
1845 /* We also stream abstract functions where we stream only stuff needed for
1846 debug info. */
1847 if (gimple_has_body_p (function))
1848 {
1849 streamer_write_uhwi (ob, 1);
1850 output_struct_function_base (ob, fn);
1851
1852 /* Output all the SSA names used in the function. */
1853 output_ssa_names (ob, fn);
1854
1855 /* Output any exception handling regions. */
1856 output_eh_regions (ob, fn);
1857
1858
1859 /* We will renumber the statements. The code that does this uses
1860 the same ordering that we use for serializing them so we can use
1861 the same code on the other end and not have to write out the
1862 statement numbers. We do not assign UIDs to PHIs here because
1863 virtual PHIs get re-computed on-the-fly which would make numbers
1864 inconsistent. */
1865 set_gimple_stmt_max_uid (cfun, 0);
1866 FOR_ALL_BB_FN (bb, cfun)
1867 {
1868 gimple_stmt_iterator gsi;
1869 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1870 {
1871 gimple stmt = gsi_stmt (gsi);
1872
1873 /* Virtual PHIs are not going to be streamed. */
1874 if (!virtual_operand_p (gimple_phi_result (stmt)))
1875 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1876 }
1877 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1878 {
1879 gimple stmt = gsi_stmt (gsi);
1880 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1881 }
1882 }
1883 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1884 virtual phis now. */
1885 FOR_ALL_BB_FN (bb, cfun)
1886 {
1887 gimple_stmt_iterator gsi;
1888 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1889 {
1890 gimple stmt = gsi_stmt (gsi);
1891 if (virtual_operand_p (gimple_phi_result (stmt)))
1892 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1893 }
1894 }
1895
1896 /* Output the code for the function. */
1897 FOR_ALL_BB_FN (bb, fn)
1898 output_bb (ob, bb, fn);
1899
1900 /* The terminator for this function. */
1901 streamer_write_record_start (ob, LTO_null);
1902
1903 output_cfg (ob, fn);
1904
1905 pop_cfun ();
1906 }
1907 else
1908 streamer_write_uhwi (ob, 0);
1909
1910 /* Create a section to hold the pickled output of this function. */
1911 produce_asm (ob, function);
1912
1913 destroy_output_block (ob);
1914 }
1915
1916
1917 /* Emit toplevel asms. */
1918
1919 void
1920 lto_output_toplevel_asms (void)
1921 {
1922 struct output_block *ob;
1923 struct asm_node *can;
1924 char *section_name;
1925 struct lto_output_stream *header_stream;
1926 struct lto_asm_header header;
1927
1928 if (! asm_nodes)
1929 return;
1930
1931 ob = create_output_block (LTO_section_asm);
1932
1933 /* Make string 0 be a NULL string. */
1934 streamer_write_char_stream (ob->string_stream, 0);
1935
1936 for (can = asm_nodes; can; can = can->next)
1937 {
1938 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1939 streamer_write_hwi (ob, can->order);
1940 }
1941
1942 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1943
1944 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1945 lto_begin_section (section_name, !flag_wpa);
1946 free (section_name);
1947
1948 /* The entire header stream is computed here. */
1949 memset (&header, 0, sizeof (header));
1950
1951 /* Write the header. */
1952 header.lto_header.major_version = LTO_major_version;
1953 header.lto_header.minor_version = LTO_minor_version;
1954
1955 header.main_size = ob->main_stream->total_size;
1956 header.string_size = ob->string_stream->total_size;
1957
1958 header_stream = XCNEW (struct lto_output_stream);
1959 lto_output_data_stream (header_stream, &header, sizeof (header));
1960 lto_write_stream (header_stream);
1961 free (header_stream);
1962
1963 /* Put all of the gimple and the string table out the asm file as a
1964 block of text. */
1965 lto_write_stream (ob->main_stream);
1966 lto_write_stream (ob->string_stream);
1967
1968 lto_end_section ();
1969
1970 destroy_output_block (ob);
1971 }
1972
1973
1974 /* Copy the function body of NODE without deserializing. */
1975
1976 static void
1977 copy_function (struct cgraph_node *node)
1978 {
1979 tree function = node->decl;
1980 struct lto_file_decl_data *file_data = node->lto_file_data;
1981 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1982 const char *data;
1983 size_t len;
1984 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1985 char *section_name =
1986 lto_get_section_name (LTO_section_function_body, name, NULL);
1987 size_t i, j;
1988 struct lto_in_decl_state *in_state;
1989 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1990
1991 lto_begin_section (section_name, !flag_wpa);
1992 free (section_name);
1993
1994 /* We may have renamed the declaration, e.g., a static function. */
1995 name = lto_get_decl_name_mapping (file_data, name);
1996
1997 data = lto_get_section_data (file_data, LTO_section_function_body,
1998 name, &len);
1999 gcc_assert (data);
2000
2001 /* Do a bit copy of the function body. */
2002 lto_output_data_stream (output_stream, data, len);
2003 lto_write_stream (output_stream);
2004
2005 /* Copy decls. */
2006 in_state =
2007 lto_get_function_in_decl_state (node->lto_file_data, function);
2008 gcc_assert (in_state);
2009
2010 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2011 {
2012 size_t n = in_state->streams[i].size;
2013 tree *trees = in_state->streams[i].trees;
2014 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2015
2016 /* The out state must have the same indices and the in state.
2017 So just copy the vector. All the encoders in the in state
2018 must be empty where we reach here. */
2019 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2020 encoder->trees.reserve_exact (n);
2021 for (j = 0; j < n; j++)
2022 encoder->trees.safe_push (trees[j]);
2023 }
2024
2025 lto_free_section_data (file_data, LTO_section_function_body, name,
2026 data, len);
2027 free (output_stream);
2028 lto_end_section ();
2029 }
2030
2031 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2032
2033 static tree
2034 wrap_refs (tree *tp, int *ws, void *)
2035 {
2036 tree t = *tp;
2037 if (handled_component_p (t)
2038 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2039 {
2040 tree decl = TREE_OPERAND (t, 0);
2041 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2042 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2043 build1 (ADDR_EXPR, ptrtype, decl),
2044 build_int_cst (ptrtype, 0));
2045 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2046 *ws = 0;
2047 }
2048 else if (TREE_CODE (t) == CONSTRUCTOR)
2049 ;
2050 else if (!EXPR_P (t))
2051 *ws = 0;
2052 return NULL_TREE;
2053 }
2054
2055 /* Main entry point from the pass manager. */
2056
2057 void
2058 lto_output (void)
2059 {
2060 struct lto_out_decl_state *decl_state;
2061 #ifdef ENABLE_CHECKING
2062 bitmap output = lto_bitmap_alloc ();
2063 #endif
2064 int i, n_nodes;
2065 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2066
2067 /* Initialize the streamer. */
2068 lto_streamer_init ();
2069
2070 n_nodes = lto_symtab_encoder_size (encoder);
2071 /* Process only the functions with bodies. */
2072 for (i = 0; i < n_nodes; i++)
2073 {
2074 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2075 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2076 {
2077 if (lto_symtab_encoder_encode_body_p (encoder, node)
2078 && !node->alias)
2079 {
2080 #ifdef ENABLE_CHECKING
2081 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2082 bitmap_set_bit (output, DECL_UID (node->decl));
2083 #endif
2084 decl_state = lto_new_out_decl_state ();
2085 lto_push_out_decl_state (decl_state);
2086 if (gimple_has_body_p (node->decl) || !flag_wpa)
2087 output_function (node);
2088 else
2089 copy_function (node);
2090 gcc_assert (lto_get_out_decl_state () == decl_state);
2091 lto_pop_out_decl_state ();
2092 lto_record_function_out_decl_state (node->decl, decl_state);
2093 }
2094 }
2095 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2096 {
2097 /* Wrap symbol references inside the ctor in a type
2098 preserving MEM_REF. */
2099 tree ctor = DECL_INITIAL (node->decl);
2100 if (ctor && !in_lto_p)
2101 walk_tree (&ctor, wrap_refs, NULL, NULL);
2102 }
2103 }
2104
2105 /* Emit the callgraph after emitting function bodies. This needs to
2106 be done now to make sure that all the statements in every function
2107 have been renumbered so that edges can be associated with call
2108 statements using the statement UIDs. */
2109 output_symtab ();
2110
2111 #ifdef ENABLE_CHECKING
2112 lto_bitmap_free (output);
2113 #endif
2114 }
2115
2116 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2117 from it and required for correct representation of its semantics.
2118 Each node in ENCODER must be a global declaration or a type. A node
2119 is written only once, even if it appears multiple times in the
2120 vector. Certain transitively-reachable nodes, such as those
2121 representing expressions, may be duplicated, but such nodes
2122 must not appear in ENCODER itself. */
2123
2124 static void
2125 write_global_stream (struct output_block *ob,
2126 struct lto_tree_ref_encoder *encoder)
2127 {
2128 tree t;
2129 size_t index;
2130 const size_t size = lto_tree_ref_encoder_size (encoder);
2131
2132 for (index = 0; index < size; index++)
2133 {
2134 t = lto_tree_ref_encoder_get_tree (encoder, index);
2135 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2136 stream_write_tree (ob, t, false);
2137 }
2138 }
2139
2140
2141 /* Write a sequence of indices into the globals vector corresponding
2142 to the trees in ENCODER. These are used by the reader to map the
2143 indices used to refer to global entities within function bodies to
2144 their referents. */
2145
2146 static void
2147 write_global_references (struct output_block *ob,
2148 struct lto_output_stream *ref_stream,
2149 struct lto_tree_ref_encoder *encoder)
2150 {
2151 tree t;
2152 uint32_t index;
2153 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2154
2155 /* Write size as 32-bit unsigned. */
2156 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2157
2158 for (index = 0; index < size; index++)
2159 {
2160 uint32_t slot_num;
2161
2162 t = lto_tree_ref_encoder_get_tree (encoder, index);
2163 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2164 gcc_assert (slot_num != (unsigned)-1);
2165 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2166 }
2167 }
2168
2169
2170 /* Write all the streams in an lto_out_decl_state STATE using
2171 output block OB and output stream OUT_STREAM. */
2172
2173 void
2174 lto_output_decl_state_streams (struct output_block *ob,
2175 struct lto_out_decl_state *state)
2176 {
2177 int i;
2178
2179 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2180 write_global_stream (ob, &state->streams[i]);
2181 }
2182
2183
2184 /* Write all the references in an lto_out_decl_state STATE using
2185 output block OB and output stream OUT_STREAM. */
2186
2187 void
2188 lto_output_decl_state_refs (struct output_block *ob,
2189 struct lto_output_stream *out_stream,
2190 struct lto_out_decl_state *state)
2191 {
2192 unsigned i;
2193 uint32_t ref;
2194 tree decl;
2195
2196 /* Write reference to FUNCTION_DECL. If there is not function,
2197 write reference to void_type_node. */
2198 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2199 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2200 gcc_assert (ref != (unsigned)-1);
2201 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2202
2203 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2204 write_global_references (ob, out_stream, &state->streams[i]);
2205 }
2206
2207
2208 /* Return the written size of STATE. */
2209
2210 static size_t
2211 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2212 {
2213 int i;
2214 size_t size;
2215
2216 size = sizeof (int32_t); /* fn_ref. */
2217 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2218 {
2219 size += sizeof (int32_t); /* vector size. */
2220 size += (lto_tree_ref_encoder_size (&state->streams[i])
2221 * sizeof (int32_t));
2222 }
2223 return size;
2224 }
2225
2226
2227 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2228 so far. */
2229
2230 static void
2231 write_symbol (struct streamer_tree_cache_d *cache,
2232 struct lto_output_stream *stream,
2233 tree t, struct pointer_set_t *seen, bool alias)
2234 {
2235 const char *name;
2236 enum gcc_plugin_symbol_kind kind;
2237 enum gcc_plugin_symbol_visibility visibility;
2238 unsigned slot_num;
2239 uint64_t size;
2240 const char *comdat;
2241 unsigned char c;
2242
2243 /* None of the following kinds of symbols are needed in the
2244 symbol table. */
2245 if (!TREE_PUBLIC (t)
2246 || is_builtin_fn (t)
2247 || DECL_ABSTRACT (t)
2248 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2249 return;
2250 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2251
2252 gcc_assert (TREE_CODE (t) == VAR_DECL
2253 || TREE_CODE (t) == FUNCTION_DECL);
2254
2255 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2256
2257 /* This behaves like assemble_name_raw in varasm.c, performing the
2258 same name manipulations that ASM_OUTPUT_LABELREF does. */
2259 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2260
2261 if (pointer_set_contains (seen, name))
2262 return;
2263 pointer_set_insert (seen, name);
2264
2265 streamer_tree_cache_lookup (cache, t, &slot_num);
2266 gcc_assert (slot_num != (unsigned)-1);
2267
2268 if (DECL_EXTERNAL (t))
2269 {
2270 if (DECL_WEAK (t))
2271 kind = GCCPK_WEAKUNDEF;
2272 else
2273 kind = GCCPK_UNDEF;
2274 }
2275 else
2276 {
2277 if (DECL_WEAK (t))
2278 kind = GCCPK_WEAKDEF;
2279 else if (DECL_COMMON (t))
2280 kind = GCCPK_COMMON;
2281 else
2282 kind = GCCPK_DEF;
2283
2284 /* When something is defined, it should have node attached. */
2285 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2286 || varpool_get_node (t)->definition);
2287 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2288 || (cgraph_get_node (t)
2289 && cgraph_get_node (t)->definition));
2290 }
2291
2292 /* Imitate what default_elf_asm_output_external do.
2293 When symbol is external, we need to output it with DEFAULT visibility
2294 when compiling with -fvisibility=default, while with HIDDEN visibility
2295 when symbol has attribute (visibility("hidden")) specified.
2296 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2297 right. */
2298
2299 if (DECL_EXTERNAL (t)
2300 && !targetm.binds_local_p (t))
2301 visibility = GCCPV_DEFAULT;
2302 else
2303 switch (DECL_VISIBILITY (t))
2304 {
2305 case VISIBILITY_DEFAULT:
2306 visibility = GCCPV_DEFAULT;
2307 break;
2308 case VISIBILITY_PROTECTED:
2309 visibility = GCCPV_PROTECTED;
2310 break;
2311 case VISIBILITY_HIDDEN:
2312 visibility = GCCPV_HIDDEN;
2313 break;
2314 case VISIBILITY_INTERNAL:
2315 visibility = GCCPV_INTERNAL;
2316 break;
2317 }
2318
2319 if (kind == GCCPK_COMMON
2320 && DECL_SIZE_UNIT (t)
2321 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2322 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2323 else
2324 size = 0;
2325
2326 if (DECL_ONE_ONLY (t))
2327 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2328 else
2329 comdat = "";
2330
2331 lto_output_data_stream (stream, name, strlen (name) + 1);
2332 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2333 c = (unsigned char) kind;
2334 lto_output_data_stream (stream, &c, 1);
2335 c = (unsigned char) visibility;
2336 lto_output_data_stream (stream, &c, 1);
2337 lto_output_data_stream (stream, &size, 8);
2338 lto_output_data_stream (stream, &slot_num, 4);
2339 }
2340
2341 /* Return true if NODE should appear in the plugin symbol table. */
2342
2343 bool
2344 output_symbol_p (symtab_node *node)
2345 {
2346 struct cgraph_node *cnode;
2347 if (!symtab_real_symbol_p (node))
2348 return false;
2349 /* We keep external functions in symtab for sake of inlining
2350 and devirtualization. We do not want to see them in symbol table as
2351 references unless they are really used. */
2352 cnode = dyn_cast <cgraph_node *> (node);
2353 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2354 && cnode->callers)
2355 return true;
2356
2357 /* Ignore all references from external vars initializers - they are not really
2358 part of the compilation unit until they are used by folding. Some symbols,
2359 like references to external construction vtables can not be referred to at all.
2360 We decide this at can_refer_decl_in_current_unit_p. */
2361 if (!node->definition || DECL_EXTERNAL (node->decl))
2362 {
2363 int i;
2364 struct ipa_ref *ref;
2365 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2366 i, ref); i++)
2367 {
2368 if (ref->use == IPA_REF_ALIAS)
2369 continue;
2370 if (is_a <cgraph_node *> (ref->referring))
2371 return true;
2372 if (!DECL_EXTERNAL (ref->referring->decl))
2373 return true;
2374 }
2375 return false;
2376 }
2377 return true;
2378 }
2379
2380
2381 /* Write an IL symbol table to OB.
2382 SET and VSET are cgraph/varpool node sets we are outputting. */
2383
2384 static void
2385 produce_symtab (struct output_block *ob)
2386 {
2387 struct streamer_tree_cache_d *cache = ob->writer_cache;
2388 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2389 struct pointer_set_t *seen;
2390 struct lto_output_stream stream;
2391 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2392 lto_symtab_encoder_iterator lsei;
2393
2394 lto_begin_section (section_name, false);
2395 free (section_name);
2396
2397 seen = pointer_set_create ();
2398 memset (&stream, 0, sizeof (stream));
2399
2400 /* Write the symbol table.
2401 First write everything defined and then all declarations.
2402 This is necessary to handle cases where we have duplicated symbols. */
2403 for (lsei = lsei_start (encoder);
2404 !lsei_end_p (lsei); lsei_next (&lsei))
2405 {
2406 symtab_node *node = lsei_node (lsei);
2407
2408 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2409 continue;
2410 write_symbol (cache, &stream, node->decl, seen, false);
2411 }
2412 for (lsei = lsei_start (encoder);
2413 !lsei_end_p (lsei); lsei_next (&lsei))
2414 {
2415 symtab_node *node = lsei_node (lsei);
2416
2417 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2418 continue;
2419 write_symbol (cache, &stream, node->decl, seen, false);
2420 }
2421
2422 lto_write_stream (&stream);
2423 pointer_set_destroy (seen);
2424
2425 lto_end_section ();
2426 }
2427
2428
2429 /* This pass is run after all of the functions are serialized and all
2430 of the IPA passes have written their serialized forms. This pass
2431 causes the vector of all of the global decls and types used from
2432 this file to be written in to a section that can then be read in to
2433 recover these on other side. */
2434
2435 void
2436 produce_asm_for_decls (void)
2437 {
2438 struct lto_out_decl_state *out_state;
2439 struct lto_out_decl_state *fn_out_state;
2440 struct lto_decl_header header;
2441 char *section_name;
2442 struct output_block *ob;
2443 struct lto_output_stream *header_stream, *decl_state_stream;
2444 unsigned idx, num_fns;
2445 size_t decl_state_size;
2446 int32_t num_decl_states;
2447
2448 ob = create_output_block (LTO_section_decls);
2449 ob->global = true;
2450
2451 memset (&header, 0, sizeof (struct lto_decl_header));
2452
2453 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2454 lto_begin_section (section_name, !flag_wpa);
2455 free (section_name);
2456
2457 /* Make string 0 be a NULL string. */
2458 streamer_write_char_stream (ob->string_stream, 0);
2459
2460 gcc_assert (!alias_pairs);
2461
2462 /* Get rid of the global decl state hash tables to save some memory. */
2463 out_state = lto_get_out_decl_state ();
2464 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2465 if (out_state->streams[i].tree_hash_table)
2466 {
2467 delete out_state->streams[i].tree_hash_table;
2468 out_state->streams[i].tree_hash_table = NULL;
2469 }
2470
2471 /* Write the global symbols. */
2472 lto_output_decl_state_streams (ob, out_state);
2473 num_fns = lto_function_decl_states.length ();
2474 for (idx = 0; idx < num_fns; idx++)
2475 {
2476 fn_out_state =
2477 lto_function_decl_states[idx];
2478 lto_output_decl_state_streams (ob, fn_out_state);
2479 }
2480
2481 header.lto_header.major_version = LTO_major_version;
2482 header.lto_header.minor_version = LTO_minor_version;
2483
2484 /* Currently not used. This field would allow us to preallocate
2485 the globals vector, so that it need not be resized as it is extended. */
2486 header.num_nodes = -1;
2487
2488 /* Compute the total size of all decl out states. */
2489 decl_state_size = sizeof (int32_t);
2490 decl_state_size += lto_out_decl_state_written_size (out_state);
2491 for (idx = 0; idx < num_fns; idx++)
2492 {
2493 fn_out_state =
2494 lto_function_decl_states[idx];
2495 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2496 }
2497 header.decl_state_size = decl_state_size;
2498
2499 header.main_size = ob->main_stream->total_size;
2500 header.string_size = ob->string_stream->total_size;
2501
2502 header_stream = XCNEW (struct lto_output_stream);
2503 lto_output_data_stream (header_stream, &header, sizeof header);
2504 lto_write_stream (header_stream);
2505 free (header_stream);
2506
2507 /* Write the main out-decl state, followed by out-decl states of
2508 functions. */
2509 decl_state_stream = XCNEW (struct lto_output_stream);
2510 num_decl_states = num_fns + 1;
2511 lto_output_data_stream (decl_state_stream, &num_decl_states,
2512 sizeof (num_decl_states));
2513 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2514 for (idx = 0; idx < num_fns; idx++)
2515 {
2516 fn_out_state =
2517 lto_function_decl_states[idx];
2518 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2519 }
2520 lto_write_stream (decl_state_stream);
2521 free (decl_state_stream);
2522
2523 lto_write_stream (ob->main_stream);
2524 lto_write_stream (ob->string_stream);
2525
2526 lto_end_section ();
2527
2528 /* Write the symbol table. It is used by linker to determine dependencies
2529 and thus we can skip it for WPA. */
2530 if (!flag_wpa)
2531 produce_symtab (ob);
2532
2533 /* Write command line opts. */
2534 lto_write_options ();
2535
2536 /* Deallocate memory and clean up. */
2537 for (idx = 0; idx < num_fns; idx++)
2538 {
2539 fn_out_state =
2540 lto_function_decl_states[idx];
2541 lto_delete_out_decl_state (fn_out_state);
2542 }
2543 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2544 lto_function_decl_states.release ();
2545 destroy_output_block (ob);
2546 }