Change is-a.h to support typedefs of pointers
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 static void lto_write_tree (struct output_block*, tree, bool);
58
59 /* Clear the line info stored in DATA_IN. */
60
61 static void
62 clear_line_info (struct output_block *ob)
63 {
64 ob->current_file = NULL;
65 ob->current_line = 0;
66 ob->current_col = 0;
67 }
68
69
70 /* Create the output block and return it. SECTION_TYPE is
71 LTO_section_function_body or LTO_static_initializer. */
72
73 struct output_block *
74 create_output_block (enum lto_section_type section_type)
75 {
76 struct output_block *ob = XCNEW (struct output_block);
77
78 ob->section_type = section_type;
79 ob->decl_state = lto_get_out_decl_state ();
80 ob->main_stream = XCNEW (struct lto_output_stream);
81 ob->string_stream = XCNEW (struct lto_output_stream);
82 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
83
84 if (section_type == LTO_section_function_body)
85 ob->cfg_stream = XCNEW (struct lto_output_stream);
86
87 clear_line_info (ob);
88
89 ob->string_hash_table.create (37);
90 gcc_obstack_init (&ob->obstack);
91
92 return ob;
93 }
94
95
96 /* Destroy the output block OB. */
97
98 void
99 destroy_output_block (struct output_block *ob)
100 {
101 enum lto_section_type section_type = ob->section_type;
102
103 ob->string_hash_table.dispose ();
104
105 free (ob->main_stream);
106 free (ob->string_stream);
107 if (section_type == LTO_section_function_body)
108 free (ob->cfg_stream);
109
110 streamer_tree_cache_delete (ob->writer_cache);
111 obstack_free (&ob->obstack, NULL);
112
113 free (ob);
114 }
115
116
117 /* Look up NODE in the type table and write the index for it to OB. */
118
119 static void
120 output_type_ref (struct output_block *ob, tree node)
121 {
122 streamer_write_record_start (ob, LTO_type_ref);
123 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
124 }
125
126
127 /* Return true if tree node T is written to various tables. For these
128 nodes, we sometimes want to write their phyiscal representation
129 (via lto_output_tree), and sometimes we need to emit an index
130 reference into a table (via lto_output_tree_ref). */
131
132 static bool
133 tree_is_indexable (tree t)
134 {
135 /* Parameters and return values of functions of variably modified types
136 must go to global stream, because they may be used in the type
137 definition. */
138 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
139 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
140 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
141 || TREE_CODE (t) == TYPE_DECL
142 || TREE_CODE (t) == CONST_DECL
143 || TREE_CODE (t) == NAMELIST_DECL)
144 && decl_function_context (t))
145 return false;
146 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
147 return false;
148 /* Variably modified types need to be streamed alongside function
149 bodies because they can refer to local entities. Together with
150 them we have to localize their members as well.
151 ??? In theory that includes non-FIELD_DECLs as well. */
152 else if (TYPE_P (t)
153 && variably_modified_type_p (t, NULL_TREE))
154 return false;
155 else if (TREE_CODE (t) == FIELD_DECL
156 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
157 return false;
158 else
159 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
160 }
161
162
163 /* Output info about new location into bitpack BP.
164 After outputting bitpack, lto_output_location_data has
165 to be done to output actual data. */
166
167 void
168 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
169 location_t loc)
170 {
171 expanded_location xloc;
172
173 loc = LOCATION_LOCUS (loc);
174 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
175 if (loc == UNKNOWN_LOCATION)
176 return;
177
178 xloc = expand_location (loc);
179
180 bp_pack_value (bp, ob->current_file != xloc.file, 1);
181 bp_pack_value (bp, ob->current_line != xloc.line, 1);
182 bp_pack_value (bp, ob->current_col != xloc.column, 1);
183
184 if (ob->current_file != xloc.file)
185 bp_pack_var_len_unsigned (bp,
186 streamer_string_index (ob, xloc.file,
187 strlen (xloc.file) + 1,
188 true));
189 ob->current_file = xloc.file;
190
191 if (ob->current_line != xloc.line)
192 bp_pack_var_len_unsigned (bp, xloc.line);
193 ob->current_line = xloc.line;
194
195 if (ob->current_col != xloc.column)
196 bp_pack_var_len_unsigned (bp, xloc.column);
197 ob->current_col = xloc.column;
198 }
199
200
201 /* If EXPR is an indexable tree node, output a reference to it to
202 output block OB. Otherwise, output the physical representation of
203 EXPR to OB. */
204
205 static void
206 lto_output_tree_ref (struct output_block *ob, tree expr)
207 {
208 enum tree_code code;
209
210 if (TYPE_P (expr))
211 {
212 output_type_ref (ob, expr);
213 return;
214 }
215
216 code = TREE_CODE (expr);
217 switch (code)
218 {
219 case SSA_NAME:
220 streamer_write_record_start (ob, LTO_ssa_name_ref);
221 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
222 break;
223
224 case FIELD_DECL:
225 streamer_write_record_start (ob, LTO_field_decl_ref);
226 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
227 break;
228
229 case FUNCTION_DECL:
230 streamer_write_record_start (ob, LTO_function_decl_ref);
231 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
232 break;
233
234 case VAR_DECL:
235 case DEBUG_EXPR_DECL:
236 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
287 }
288 }
289
290
291 /* Return true if EXPR is a tree node that can be written to disk. */
292
293 static inline bool
294 lto_is_streamable (tree expr)
295 {
296 enum tree_code code = TREE_CODE (expr);
297
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
313 }
314
315
316 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
317
318 static tree
319 get_symbol_initial_value (struct output_block *ob, tree expr)
320 {
321 gcc_checking_assert (DECL_P (expr)
322 && TREE_CODE (expr) != FUNCTION_DECL
323 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
324
325 /* Handle DECL_INITIAL for symbols. */
326 tree initial = DECL_INITIAL (expr);
327 if (TREE_CODE (expr) == VAR_DECL
328 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
329 && !DECL_IN_CONSTANT_POOL (expr)
330 && initial)
331 {
332 lto_symtab_encoder_t encoder;
333 varpool_node *vnode;
334
335 encoder = ob->decl_state->symtab_node_encoder;
336 vnode = varpool_get_node (expr);
337 if (!vnode
338 || !lto_symtab_encoder_encode_initializer_p (encoder,
339 vnode))
340 initial = error_mark_node;
341 }
342
343 return initial;
344 }
345
346
347 /* Write a physical representation of tree node EXPR to output block
348 OB. If REF_P is true, the leaves of EXPR are emitted as references
349 via lto_output_tree_ref. IX is the index into the streamer cache
350 where EXPR is stored. */
351
352 static void
353 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
354 {
355 /* Pack all the non-pointer fields in EXPR into a bitpack and write
356 the resulting bitpack. */
357 bitpack_d bp = bitpack_create (ob->main_stream);
358 streamer_pack_tree_bitfields (ob, &bp, expr);
359 streamer_write_bitpack (&bp);
360
361 /* Write all the pointer fields in EXPR. */
362 streamer_write_tree_body (ob, expr, ref_p);
363
364 /* Write any LTO-specific data to OB. */
365 if (DECL_P (expr)
366 && TREE_CODE (expr) != FUNCTION_DECL
367 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
368 {
369 /* Handle DECL_INITIAL for symbols. */
370 tree initial = get_symbol_initial_value (ob, expr);
371 stream_write_tree (ob, initial, ref_p);
372 }
373 }
374
375 /* Write a physical representation of tree node EXPR to output block
376 OB. If REF_P is true, the leaves of EXPR are emitted as references
377 via lto_output_tree_ref. IX is the index into the streamer cache
378 where EXPR is stored. */
379
380 static void
381 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
382 {
383 if (!lto_is_streamable (expr))
384 internal_error ("tree code %qs is not supported in LTO streams",
385 get_tree_code_name (TREE_CODE (expr)));
386
387 /* Write the header, containing everything needed to materialize
388 EXPR on the reading side. */
389 streamer_write_tree_header (ob, expr);
390
391 lto_write_tree_1 (ob, expr, ref_p);
392
393 /* Mark the end of EXPR. */
394 streamer_write_zero (ob);
395 }
396
397 /* Emit the physical representation of tree node EXPR to output block
398 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
399 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
400
401 static void
402 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
403 bool ref_p, bool this_ref_p)
404 {
405 unsigned ix;
406
407 gcc_checking_assert (expr != NULL_TREE
408 && !(this_ref_p && tree_is_indexable (expr)));
409
410 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
411 expr, hash, &ix);
412 gcc_assert (!exists_p);
413 if (streamer_handle_as_builtin_p (expr))
414 {
415 /* MD and NORMAL builtins do not need to be written out
416 completely as they are always instantiated by the
417 compiler on startup. The only builtins that need to
418 be written out are BUILT_IN_FRONTEND. For all other
419 builtins, we simply write the class and code. */
420 streamer_write_builtin (ob, expr);
421 }
422 else if (TREE_CODE (expr) == INTEGER_CST
423 && !TREE_OVERFLOW (expr))
424 {
425 /* Shared INTEGER_CST nodes are special because they need their
426 original type to be materialized by the reader (to implement
427 TYPE_CACHED_VALUES). */
428 streamer_write_integer_cst (ob, expr, ref_p);
429 }
430 else
431 {
432 /* This is the first time we see EXPR, write its fields
433 to OB. */
434 lto_write_tree (ob, expr, ref_p);
435 }
436 }
437
438 struct sccs
439 {
440 unsigned int dfsnum;
441 unsigned int low;
442 };
443
444 struct scc_entry
445 {
446 tree t;
447 hashval_t hash;
448 };
449
450 static unsigned int next_dfs_num;
451 static vec<scc_entry> sccstack;
452 static struct pointer_map_t *sccstate;
453 static struct obstack sccstate_obstack;
454
455 static void
456 DFS_write_tree (struct output_block *ob, sccs *from_state,
457 tree expr, bool ref_p, bool this_ref_p);
458
459 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
460 DFS recurse for all tree edges originating from it. */
461
462 static void
463 DFS_write_tree_body (struct output_block *ob,
464 tree expr, sccs *expr_state, bool ref_p)
465 {
466 #define DFS_follow_tree_edge(DEST) \
467 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
468
469 enum tree_code code;
470
471 code = TREE_CODE (expr);
472
473 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
474 {
475 if (TREE_CODE (expr) != IDENTIFIER_NODE)
476 DFS_follow_tree_edge (TREE_TYPE (expr));
477 }
478
479 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
480 {
481 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
482 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
483 }
484
485 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
486 {
487 DFS_follow_tree_edge (TREE_REALPART (expr));
488 DFS_follow_tree_edge (TREE_IMAGPART (expr));
489 }
490
491 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
492 {
493 /* Drop names that were created for anonymous entities. */
494 if (DECL_NAME (expr)
495 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
496 && ANON_AGGRNAME_P (DECL_NAME (expr)))
497 ;
498 else
499 DFS_follow_tree_edge (DECL_NAME (expr));
500 DFS_follow_tree_edge (DECL_CONTEXT (expr));
501 }
502
503 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
504 {
505 DFS_follow_tree_edge (DECL_SIZE (expr));
506 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
507
508 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
509 special handling in LTO, it must be handled by streamer hooks. */
510
511 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
512
513 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
514 for early inlining so drop it on the floor instead of ICEing in
515 dwarf2out.c. */
516
517 if ((TREE_CODE (expr) == VAR_DECL
518 || TREE_CODE (expr) == PARM_DECL)
519 && DECL_HAS_VALUE_EXPR_P (expr))
520 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
521 if (TREE_CODE (expr) == VAR_DECL)
522 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
526 {
527 if (TREE_CODE (expr) == TYPE_DECL)
528 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
529 DFS_follow_tree_edge (DECL_VINDEX (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
533 {
534 /* Make sure we don't inadvertently set the assembler name. */
535 if (DECL_ASSEMBLER_NAME_SET_P (expr))
536 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
537 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
538 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
539 }
540
541 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
542 {
543 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
544 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
545 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
546 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
547 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
548 }
549
550 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
551 {
552 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
553 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
554 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
555 }
556
557 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
558 {
559 DFS_follow_tree_edge (TYPE_SIZE (expr));
560 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
561 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
562 DFS_follow_tree_edge (TYPE_NAME (expr));
563 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
564 reconstructed during fixup. */
565 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
566 during fixup. */
567 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
568 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
569 /* TYPE_CANONICAL is re-computed during type merging, so no need
570 to follow it here. */
571 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
572 }
573
574 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
575 {
576 if (TREE_CODE (expr) == ENUMERAL_TYPE)
577 DFS_follow_tree_edge (TYPE_VALUES (expr));
578 else if (TREE_CODE (expr) == ARRAY_TYPE)
579 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
580 else if (RECORD_OR_UNION_TYPE_P (expr))
581 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
582 DFS_follow_tree_edge (t);
583 else if (TREE_CODE (expr) == FUNCTION_TYPE
584 || TREE_CODE (expr) == METHOD_TYPE)
585 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
586
587 if (!POINTER_TYPE_P (expr))
588 DFS_follow_tree_edge (TYPE_MINVAL (expr));
589 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
590 if (RECORD_OR_UNION_TYPE_P (expr))
591 DFS_follow_tree_edge (TYPE_BINFO (expr));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
595 {
596 DFS_follow_tree_edge (TREE_PURPOSE (expr));
597 DFS_follow_tree_edge (TREE_VALUE (expr));
598 DFS_follow_tree_edge (TREE_CHAIN (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
602 {
603 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
604 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
605 }
606
607 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
608 {
609 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
610 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
611 DFS_follow_tree_edge (TREE_BLOCK (expr));
612 }
613
614 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
615 {
616 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
617 /* ??? FIXME. See also streamer_write_chain. */
618 if (!(VAR_OR_FUNCTION_DECL_P (t)
619 && DECL_EXTERNAL (t)))
620 DFS_follow_tree_edge (t);
621
622 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
623
624 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
625 handle - those that represent inlined function scopes.
626 For the drop rest them on the floor instead of ICEing
627 in dwarf2out.c. */
628 if (inlined_function_outer_scope_p (expr))
629 {
630 tree ultimate_origin = block_ultimate_origin (expr);
631 DFS_follow_tree_edge (ultimate_origin);
632 }
633 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
634 information for early inlined BLOCKs so drop it on the floor instead
635 of ICEing in dwarf2out.c. */
636
637 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
638 streaming time. */
639
640 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
641 list is re-constructed from BLOCK_SUPERCONTEXT. */
642 }
643
644 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
645 {
646 unsigned i;
647 tree t;
648
649 /* Note that the number of BINFO slots has already been emitted in
650 EXPR's header (see streamer_write_tree_header) because this length
651 is needed to build the empty BINFO node on the reader side. */
652 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
653 DFS_follow_tree_edge (t);
654 DFS_follow_tree_edge (BINFO_OFFSET (expr));
655 DFS_follow_tree_edge (BINFO_VTABLE (expr));
656 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
657
658 /* The number of BINFO_BASE_ACCESSES has already been emitted in
659 EXPR's bitfield section. */
660 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
661 DFS_follow_tree_edge (t);
662
663 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
664 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
665 }
666
667 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
668 {
669 unsigned i;
670 tree index, value;
671
672 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
673 {
674 DFS_follow_tree_edge (index);
675 DFS_follow_tree_edge (value);
676 }
677 }
678
679 if (code == OMP_CLAUSE)
680 {
681 int i;
682 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
683 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
684 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
685 }
686
687 #undef DFS_follow_tree_edge
688 }
689
690 /* Return a hash value for the tree T. */
691
692 static hashval_t
693 hash_tree (struct streamer_tree_cache_d *cache, tree t)
694 {
695 #define visit(SIBLING) \
696 do { \
697 unsigned ix; \
698 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
699 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
700 } while (0)
701
702 /* Hash TS_BASE. */
703 enum tree_code code = TREE_CODE (t);
704 hashval_t v = iterative_hash_host_wide_int (code, 0);
705 if (!TYPE_P (t))
706 {
707 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
708 | (TREE_CONSTANT (t) << 1)
709 | (TREE_READONLY (t) << 2)
710 | (TREE_PUBLIC (t) << 3), v);
711 }
712 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
713 | (TREE_THIS_VOLATILE (t) << 1), v);
714 if (DECL_P (t))
715 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
716 else if (TYPE_P (t))
717 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
718 if (TYPE_P (t))
719 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
720 else
721 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
722 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
723 | (TREE_STATIC (t) << 1)
724 | (TREE_PROTECTED (t) << 2)
725 | (TREE_DEPRECATED (t) << 3), v);
726 if (code != TREE_BINFO)
727 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
728 if (TYPE_P (t))
729 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
730 | (TYPE_ADDR_SPACE (t) << 1), v);
731 else if (code == SSA_NAME)
732 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
733
734 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
735 {
736 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
737 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
738 }
739
740 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
741 {
742 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
743 v = iterative_hash_host_wide_int (r.cl, v);
744 v = iterative_hash_host_wide_int (r.decimal
745 | (r.sign << 1)
746 | (r.signalling << 2)
747 | (r.canonical << 3), v);
748 v = iterative_hash_host_wide_int (r.uexp, v);
749 for (unsigned i = 0; i < SIGSZ; ++i)
750 v = iterative_hash_host_wide_int (r.sig[i], v);
751 }
752
753 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
754 {
755 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
756 v = iterative_hash_host_wide_int (f.mode, v);
757 v = iterative_hash_host_wide_int (f.data.low, v);
758 v = iterative_hash_host_wide_int (f.data.high, v);
759 }
760
761 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
762 {
763 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
764 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
765 | (DECL_VIRTUAL_P (t) << 1)
766 | (DECL_IGNORED_P (t) << 2)
767 | (DECL_ABSTRACT (t) << 3)
768 | (DECL_ARTIFICIAL (t) << 4)
769 | (DECL_USER_ALIGN (t) << 5)
770 | (DECL_PRESERVE_P (t) << 6)
771 | (DECL_EXTERNAL (t) << 7)
772 | (DECL_GIMPLE_REG_P (t) << 8), v);
773 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
774 if (code == LABEL_DECL)
775 {
776 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
777 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
778 }
779 else if (code == FIELD_DECL)
780 {
781 v = iterative_hash_host_wide_int (DECL_PACKED (t)
782 | (DECL_NONADDRESSABLE_P (t) << 1),
783 v);
784 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
785 }
786 else if (code == VAR_DECL)
787 {
788 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
789 | (DECL_NONLOCAL_FRAME (t) << 1),
790 v);
791 }
792 if (code == RESULT_DECL
793 || code == PARM_DECL
794 || code == VAR_DECL)
795 {
796 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
797 if (code == VAR_DECL
798 || code == PARM_DECL)
799 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
800 }
801 }
802
803 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
804 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
805
806 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
807 {
808 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
809 | (DECL_DLLIMPORT_P (t) << 1)
810 | (DECL_WEAK (t) << 2)
811 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
812 | (DECL_COMDAT (t) << 4)
813 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
814 v);
815 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
816 if (code == VAR_DECL)
817 {
818 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
819 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
820 | (DECL_IN_CONSTANT_POOL (t) << 1),
821 v);
822 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
823 }
824 if (TREE_CODE (t) == FUNCTION_DECL)
825 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
826 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
827 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
828 v);
829 if (VAR_OR_FUNCTION_DECL_P (t))
830 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
831 }
832
833 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
834 {
835 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
836 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
837 | (DECL_STATIC_DESTRUCTOR (t) << 1)
838 | (DECL_UNINLINABLE (t) << 2)
839 | (DECL_POSSIBLY_INLINED (t) << 3)
840 | (DECL_IS_NOVOPS (t) << 4)
841 | (DECL_IS_RETURNS_TWICE (t) << 5)
842 | (DECL_IS_MALLOC (t) << 6)
843 | (DECL_IS_OPERATOR_NEW (t) << 7)
844 | (DECL_DECLARED_INLINE_P (t) << 8)
845 | (DECL_STATIC_CHAIN (t) << 9)
846 | (DECL_NO_INLINE_WARNING_P (t) << 10)
847 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
848 | (DECL_NO_LIMIT_STACK (t) << 12)
849 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
850 | (DECL_PURE_P (t) << 14)
851 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
852 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
853 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
854 if (DECL_STATIC_DESTRUCTOR (t))
855 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
856 }
857
858 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
859 {
860 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
861 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
862 | (TYPE_NO_FORCE_BLK (t) << 1)
863 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
864 | (TYPE_PACKED (t) << 3)
865 | (TYPE_RESTRICT (t) << 4)
866 | (TYPE_USER_ALIGN (t) << 5)
867 | (TYPE_READONLY (t) << 6), v);
868 if (RECORD_OR_UNION_TYPE_P (t))
869 {
870 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
871 | (TYPE_FINAL_P (t) << 1), v);
872 }
873 else if (code == ARRAY_TYPE)
874 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
875 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
876 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
877 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
878 || (!in_lto_p
879 && get_alias_set (t) == 0))
880 ? 0 : -1, v);
881 }
882
883 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
884 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
885 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
886
887 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
888 gcc_unreachable ();
889
890 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
891 v = iterative_hash (t, sizeof (struct cl_optimization), v);
892
893 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
894 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
895
896 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
897 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
898
899 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
900 {
901 if (POINTER_TYPE_P (t))
902 {
903 /* For pointers factor in the pointed-to type recursively as
904 we cannot recurse through only pointers.
905 ??? We can generalize this by keeping track of the
906 in-SCC edges for each tree (or arbitrarily the first
907 such edge) and hashing that in in a second stage
908 (instead of the quadratic mixing of the SCC we do now). */
909 hashval_t x;
910 unsigned ix;
911 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
912 x = streamer_tree_cache_get_hash (cache, ix);
913 else
914 x = hash_tree (cache, TREE_TYPE (t));
915 v = iterative_hash_hashval_t (x, v);
916 }
917 else if (code != IDENTIFIER_NODE)
918 visit (TREE_TYPE (t));
919 }
920
921 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
922 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
923 visit (VECTOR_CST_ELT (t, i));
924
925 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
926 {
927 visit (TREE_REALPART (t));
928 visit (TREE_IMAGPART (t));
929 }
930
931 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
932 {
933 /* Drop names that were created for anonymous entities. */
934 if (DECL_NAME (t)
935 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
936 && ANON_AGGRNAME_P (DECL_NAME (t)))
937 ;
938 else
939 visit (DECL_NAME (t));
940 if (DECL_FILE_SCOPE_P (t))
941 ;
942 else
943 visit (DECL_CONTEXT (t));
944 }
945
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
947 {
948 visit (DECL_SIZE (t));
949 visit (DECL_SIZE_UNIT (t));
950 visit (DECL_ATTRIBUTES (t));
951 if ((code == VAR_DECL
952 || code == PARM_DECL)
953 && DECL_HAS_VALUE_EXPR_P (t))
954 visit (DECL_VALUE_EXPR (t));
955 if (code == VAR_DECL
956 && DECL_HAS_DEBUG_EXPR_P (t))
957 visit (DECL_DEBUG_EXPR (t));
958 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
959 be able to call get_symbol_initial_value. */
960 }
961
962 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
963 {
964 if (code == TYPE_DECL)
965 visit (DECL_ORIGINAL_TYPE (t));
966 visit (DECL_VINDEX (t));
967 }
968
969 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
970 {
971 if (DECL_ASSEMBLER_NAME_SET_P (t))
972 visit (DECL_ASSEMBLER_NAME (t));
973 visit (DECL_SECTION_NAME (t));
974 visit (DECL_COMDAT_GROUP (t));
975 }
976
977 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
978 {
979 visit (DECL_FIELD_OFFSET (t));
980 visit (DECL_BIT_FIELD_TYPE (t));
981 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
982 visit (DECL_FIELD_BIT_OFFSET (t));
983 visit (DECL_FCONTEXT (t));
984 }
985
986 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
987 {
988 visit (DECL_FUNCTION_PERSONALITY (t));
989 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
990 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
991 }
992
993 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
994 {
995 visit (TYPE_SIZE (t));
996 visit (TYPE_SIZE_UNIT (t));
997 visit (TYPE_ATTRIBUTES (t));
998 visit (TYPE_NAME (t));
999 visit (TYPE_MAIN_VARIANT (t));
1000 if (TYPE_FILE_SCOPE_P (t))
1001 ;
1002 else
1003 visit (TYPE_CONTEXT (t));
1004 visit (TYPE_STUB_DECL (t));
1005 }
1006
1007 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1008 {
1009 if (code == ENUMERAL_TYPE)
1010 visit (TYPE_VALUES (t));
1011 else if (code == ARRAY_TYPE)
1012 visit (TYPE_DOMAIN (t));
1013 else if (RECORD_OR_UNION_TYPE_P (t))
1014 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1015 visit (f);
1016 else if (code == FUNCTION_TYPE
1017 || code == METHOD_TYPE)
1018 visit (TYPE_ARG_TYPES (t));
1019 if (!POINTER_TYPE_P (t))
1020 visit (TYPE_MINVAL (t));
1021 visit (TYPE_MAXVAL (t));
1022 if (RECORD_OR_UNION_TYPE_P (t))
1023 visit (TYPE_BINFO (t));
1024 }
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1027 {
1028 visit (TREE_PURPOSE (t));
1029 visit (TREE_VALUE (t));
1030 visit (TREE_CHAIN (t));
1031 }
1032
1033 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1034 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1035 visit (TREE_VEC_ELT (t, i));
1036
1037 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1038 {
1039 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1040 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1041 visit (TREE_OPERAND (t, i));
1042 }
1043
1044 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1045 {
1046 unsigned i;
1047 tree b;
1048 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1049 visit (b);
1050 visit (BINFO_OFFSET (t));
1051 visit (BINFO_VTABLE (t));
1052 visit (BINFO_VPTR_FIELD (t));
1053 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1054 visit (b);
1055 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1056 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1060 {
1061 unsigned i;
1062 tree index, value;
1063 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1065 {
1066 visit (index);
1067 visit (value);
1068 }
1069 }
1070
1071 if (code == OMP_CLAUSE)
1072 {
1073 int i;
1074
1075 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1076 switch (OMP_CLAUSE_CODE (t))
1077 {
1078 case OMP_CLAUSE_DEFAULT:
1079 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1080 break;
1081 case OMP_CLAUSE_SCHEDULE:
1082 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1083 break;
1084 case OMP_CLAUSE_DEPEND:
1085 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1086 break;
1087 case OMP_CLAUSE_MAP:
1088 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1089 break;
1090 case OMP_CLAUSE_PROC_BIND:
1091 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1092 break;
1093 case OMP_CLAUSE_REDUCTION:
1094 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1095 break;
1096 default:
1097 break;
1098 }
1099 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1100 visit (OMP_CLAUSE_OPERAND (t, i));
1101 visit (OMP_CLAUSE_CHAIN (t));
1102 }
1103
1104 return v;
1105
1106 #undef visit
1107 }
1108
1109 /* Compare two SCC entries by their hash value for qsorting them. */
1110
1111 static int
1112 scc_entry_compare (const void *p1_, const void *p2_)
1113 {
1114 const scc_entry *p1 = (const scc_entry *) p1_;
1115 const scc_entry *p2 = (const scc_entry *) p2_;
1116 if (p1->hash < p2->hash)
1117 return -1;
1118 else if (p1->hash > p2->hash)
1119 return 1;
1120 return 0;
1121 }
1122
1123 /* Return a hash value for the SCC on the SCC stack from FIRST with
1124 size SIZE. */
1125
1126 static hashval_t
1127 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1128 {
1129 /* Compute hash values for the SCC members. */
1130 for (unsigned i = 0; i < size; ++i)
1131 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1132
1133 if (size == 1)
1134 return sccstack[first].hash;
1135
1136 /* Sort the SCC of type, hash pairs so that when we mix in
1137 all members of the SCC the hash value becomes independent on
1138 the order we visited the SCC. Disregard hashes equal to
1139 the hash of the tree we mix into because we cannot guarantee
1140 a stable sort for those across different TUs. */
1141 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1142 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1143 for (unsigned i = 0; i < size; ++i)
1144 {
1145 hashval_t hash = sccstack[first+i].hash;
1146 hashval_t orig_hash = hash;
1147 unsigned j;
1148 /* Skip same hashes. */
1149 for (j = i + 1;
1150 j < size && sccstack[first+j].hash == orig_hash; ++j)
1151 ;
1152 for (; j < size; ++j)
1153 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1154 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1155 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1156 tem[i] = hash;
1157 }
1158 hashval_t scc_hash = 0;
1159 for (unsigned i = 0; i < size; ++i)
1160 {
1161 sccstack[first+i].hash = tem[i];
1162 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1163 }
1164 return scc_hash;
1165 }
1166
1167 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1168 already in the streamer cache. Main routine called for
1169 each visit of EXPR. */
1170
1171 static void
1172 DFS_write_tree (struct output_block *ob, sccs *from_state,
1173 tree expr, bool ref_p, bool this_ref_p)
1174 {
1175 unsigned ix;
1176 sccs **slot;
1177
1178 /* Handle special cases. */
1179 if (expr == NULL_TREE)
1180 return;
1181
1182 /* Do not DFS walk into indexable trees. */
1183 if (this_ref_p && tree_is_indexable (expr))
1184 return;
1185
1186 /* Check if we already streamed EXPR. */
1187 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1188 return;
1189
1190 slot = (sccs **)pointer_map_insert (sccstate, expr);
1191 sccs *cstate = *slot;
1192 if (!cstate)
1193 {
1194 scc_entry e = { expr, 0 };
1195 /* Not yet visited. DFS recurse and push it onto the stack. */
1196 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1197 sccstack.safe_push (e);
1198 cstate->dfsnum = next_dfs_num++;
1199 cstate->low = cstate->dfsnum;
1200
1201 if (streamer_handle_as_builtin_p (expr))
1202 ;
1203 else if (TREE_CODE (expr) == INTEGER_CST
1204 && !TREE_OVERFLOW (expr))
1205 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1206 else
1207 {
1208 DFS_write_tree_body (ob, expr, cstate, ref_p);
1209
1210 /* Walk any LTO-specific edges. */
1211 if (DECL_P (expr)
1212 && TREE_CODE (expr) != FUNCTION_DECL
1213 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1214 {
1215 /* Handle DECL_INITIAL for symbols. */
1216 tree initial = get_symbol_initial_value (ob, expr);
1217 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1218 }
1219 }
1220
1221 /* See if we found an SCC. */
1222 if (cstate->low == cstate->dfsnum)
1223 {
1224 unsigned first, size;
1225 tree x;
1226
1227 /* Pop the SCC and compute its size. */
1228 first = sccstack.length ();
1229 do
1230 {
1231 x = sccstack[--first].t;
1232 }
1233 while (x != expr);
1234 size = sccstack.length () - first;
1235
1236 /* No need to compute hashes for LTRANS units, we don't perform
1237 any merging there. */
1238 hashval_t scc_hash = 0;
1239 unsigned scc_entry_len = 0;
1240 if (!flag_wpa)
1241 {
1242 scc_hash = hash_scc (ob->writer_cache, first, size);
1243
1244 /* Put the entries with the least number of collisions first. */
1245 unsigned entry_start = 0;
1246 scc_entry_len = size + 1;
1247 for (unsigned i = 0; i < size;)
1248 {
1249 unsigned from = i;
1250 for (i = i + 1; i < size
1251 && (sccstack[first + i].hash
1252 == sccstack[first + from].hash); ++i)
1253 ;
1254 if (i - from < scc_entry_len)
1255 {
1256 scc_entry_len = i - from;
1257 entry_start = from;
1258 }
1259 }
1260 for (unsigned i = 0; i < scc_entry_len; ++i)
1261 {
1262 scc_entry tem = sccstack[first + i];
1263 sccstack[first + i] = sccstack[first + entry_start + i];
1264 sccstack[first + entry_start + i] = tem;
1265 }
1266 }
1267
1268 /* Write LTO_tree_scc. */
1269 streamer_write_record_start (ob, LTO_tree_scc);
1270 streamer_write_uhwi (ob, size);
1271 streamer_write_uhwi (ob, scc_hash);
1272
1273 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1274 All INTEGER_CSTs need to be handled this way as we need
1275 their type to materialize them. Also builtins are handled
1276 this way.
1277 ??? We still wrap these in LTO_tree_scc so at the
1278 input side we can properly identify the tree we want
1279 to ultimatively return. */
1280 if (size == 1)
1281 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1282 else
1283 {
1284 /* Write the size of the SCC entry candidates. */
1285 streamer_write_uhwi (ob, scc_entry_len);
1286
1287 /* Write all headers and populate the streamer cache. */
1288 for (unsigned i = 0; i < size; ++i)
1289 {
1290 hashval_t hash = sccstack[first+i].hash;
1291 tree t = sccstack[first+i].t;
1292 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1293 t, hash, &ix);
1294 gcc_assert (!exists_p);
1295
1296 if (!lto_is_streamable (t))
1297 internal_error ("tree code %qs is not supported "
1298 "in LTO streams",
1299 get_tree_code_name (TREE_CODE (t)));
1300
1301 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1302
1303 /* Write the header, containing everything needed to
1304 materialize EXPR on the reading side. */
1305 streamer_write_tree_header (ob, t);
1306 }
1307
1308 /* Write the bitpacks and tree references. */
1309 for (unsigned i = 0; i < size; ++i)
1310 {
1311 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1312
1313 /* Mark the end of the tree. */
1314 streamer_write_zero (ob);
1315 }
1316 }
1317
1318 /* Finally truncate the vector. */
1319 sccstack.truncate (first);
1320
1321 if (from_state)
1322 from_state->low = MIN (from_state->low, cstate->low);
1323 return;
1324 }
1325
1326 if (from_state)
1327 from_state->low = MIN (from_state->low, cstate->low);
1328 }
1329 gcc_checking_assert (from_state);
1330 if (cstate->dfsnum < from_state->dfsnum)
1331 from_state->low = MIN (cstate->dfsnum, from_state->low);
1332 }
1333
1334
1335 /* Emit the physical representation of tree node EXPR to output block
1336 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1337 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1338
1339 void
1340 lto_output_tree (struct output_block *ob, tree expr,
1341 bool ref_p, bool this_ref_p)
1342 {
1343 unsigned ix;
1344 bool existed_p;
1345
1346 if (expr == NULL_TREE)
1347 {
1348 streamer_write_record_start (ob, LTO_null);
1349 return;
1350 }
1351
1352 if (this_ref_p && tree_is_indexable (expr))
1353 {
1354 lto_output_tree_ref (ob, expr);
1355 return;
1356 }
1357
1358 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1359 if (existed_p)
1360 {
1361 /* If a node has already been streamed out, make sure that
1362 we don't write it more than once. Otherwise, the reader
1363 will instantiate two different nodes for the same object. */
1364 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1365 streamer_write_uhwi (ob, ix);
1366 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1367 lto_tree_code_to_tag (TREE_CODE (expr)));
1368 lto_stats.num_pickle_refs_output++;
1369 }
1370 else
1371 {
1372 /* This is the first time we see EXPR, write all reachable
1373 trees to OB. */
1374 static bool in_dfs_walk;
1375
1376 /* Protect against recursion which means disconnect between
1377 what tree edges we walk in the DFS walk and what edges
1378 we stream out. */
1379 gcc_assert (!in_dfs_walk);
1380
1381 /* Start the DFS walk. */
1382 /* Save ob state ... */
1383 /* let's see ... */
1384 in_dfs_walk = true;
1385 sccstate = pointer_map_create ();
1386 gcc_obstack_init (&sccstate_obstack);
1387 next_dfs_num = 1;
1388 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1389 sccstack.release ();
1390 pointer_map_destroy (sccstate);
1391 obstack_free (&sccstate_obstack, NULL);
1392 in_dfs_walk = false;
1393
1394 /* Finally append a reference to the tree we were writing.
1395 ??? If expr ended up as a singleton we could have
1396 inlined it here and avoid outputting a reference. */
1397 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1398 gcc_assert (existed_p);
1399 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1400 streamer_write_uhwi (ob, ix);
1401 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1402 lto_tree_code_to_tag (TREE_CODE (expr)));
1403 lto_stats.num_pickle_refs_output++;
1404 }
1405 }
1406
1407
1408 /* Output to OB a list of try/catch handlers starting with FIRST. */
1409
1410 static void
1411 output_eh_try_list (struct output_block *ob, eh_catch first)
1412 {
1413 eh_catch n;
1414
1415 for (n = first; n; n = n->next_catch)
1416 {
1417 streamer_write_record_start (ob, LTO_eh_catch);
1418 stream_write_tree (ob, n->type_list, true);
1419 stream_write_tree (ob, n->filter_list, true);
1420 stream_write_tree (ob, n->label, true);
1421 }
1422
1423 streamer_write_record_start (ob, LTO_null);
1424 }
1425
1426
1427 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1428 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1429 detect EH region sharing. */
1430
1431 static void
1432 output_eh_region (struct output_block *ob, eh_region r)
1433 {
1434 enum LTO_tags tag;
1435
1436 if (r == NULL)
1437 {
1438 streamer_write_record_start (ob, LTO_null);
1439 return;
1440 }
1441
1442 if (r->type == ERT_CLEANUP)
1443 tag = LTO_ert_cleanup;
1444 else if (r->type == ERT_TRY)
1445 tag = LTO_ert_try;
1446 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1447 tag = LTO_ert_allowed_exceptions;
1448 else if (r->type == ERT_MUST_NOT_THROW)
1449 tag = LTO_ert_must_not_throw;
1450 else
1451 gcc_unreachable ();
1452
1453 streamer_write_record_start (ob, tag);
1454 streamer_write_hwi (ob, r->index);
1455
1456 if (r->outer)
1457 streamer_write_hwi (ob, r->outer->index);
1458 else
1459 streamer_write_zero (ob);
1460
1461 if (r->inner)
1462 streamer_write_hwi (ob, r->inner->index);
1463 else
1464 streamer_write_zero (ob);
1465
1466 if (r->next_peer)
1467 streamer_write_hwi (ob, r->next_peer->index);
1468 else
1469 streamer_write_zero (ob);
1470
1471 if (r->type == ERT_TRY)
1472 {
1473 output_eh_try_list (ob, r->u.eh_try.first_catch);
1474 }
1475 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1476 {
1477 stream_write_tree (ob, r->u.allowed.type_list, true);
1478 stream_write_tree (ob, r->u.allowed.label, true);
1479 streamer_write_uhwi (ob, r->u.allowed.filter);
1480 }
1481 else if (r->type == ERT_MUST_NOT_THROW)
1482 {
1483 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1484 bitpack_d bp = bitpack_create (ob->main_stream);
1485 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1486 streamer_write_bitpack (&bp);
1487 }
1488
1489 if (r->landing_pads)
1490 streamer_write_hwi (ob, r->landing_pads->index);
1491 else
1492 streamer_write_zero (ob);
1493 }
1494
1495
1496 /* Output landing pad LP to OB. */
1497
1498 static void
1499 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1500 {
1501 if (lp == NULL)
1502 {
1503 streamer_write_record_start (ob, LTO_null);
1504 return;
1505 }
1506
1507 streamer_write_record_start (ob, LTO_eh_landing_pad);
1508 streamer_write_hwi (ob, lp->index);
1509 if (lp->next_lp)
1510 streamer_write_hwi (ob, lp->next_lp->index);
1511 else
1512 streamer_write_zero (ob);
1513
1514 if (lp->region)
1515 streamer_write_hwi (ob, lp->region->index);
1516 else
1517 streamer_write_zero (ob);
1518
1519 stream_write_tree (ob, lp->post_landing_pad, true);
1520 }
1521
1522
1523 /* Output the existing eh_table to OB. */
1524
1525 static void
1526 output_eh_regions (struct output_block *ob, struct function *fn)
1527 {
1528 if (fn->eh && fn->eh->region_tree)
1529 {
1530 unsigned i;
1531 eh_region eh;
1532 eh_landing_pad lp;
1533 tree ttype;
1534
1535 streamer_write_record_start (ob, LTO_eh_table);
1536
1537 /* Emit the index of the root of the EH region tree. */
1538 streamer_write_hwi (ob, fn->eh->region_tree->index);
1539
1540 /* Emit all the EH regions in the region array. */
1541 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1542 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1543 output_eh_region (ob, eh);
1544
1545 /* Emit all landing pads. */
1546 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1547 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1548 output_eh_lp (ob, lp);
1549
1550 /* Emit all the runtime type data. */
1551 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1552 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1553 stream_write_tree (ob, ttype, true);
1554
1555 /* Emit the table of action chains. */
1556 if (targetm.arm_eabi_unwinder)
1557 {
1558 tree t;
1559 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1560 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1561 stream_write_tree (ob, t, true);
1562 }
1563 else
1564 {
1565 uchar c;
1566 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1567 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1568 streamer_write_char_stream (ob->main_stream, c);
1569 }
1570 }
1571
1572 /* The LTO_null either terminates the record or indicates that there
1573 are no eh_records at all. */
1574 streamer_write_record_start (ob, LTO_null);
1575 }
1576
1577
1578 /* Output all of the active ssa names to the ssa_names stream. */
1579
1580 static void
1581 output_ssa_names (struct output_block *ob, struct function *fn)
1582 {
1583 unsigned int i, len;
1584
1585 len = vec_safe_length (SSANAMES (fn));
1586 streamer_write_uhwi (ob, len);
1587
1588 for (i = 1; i < len; i++)
1589 {
1590 tree ptr = (*SSANAMES (fn))[i];
1591
1592 if (ptr == NULL_TREE
1593 || SSA_NAME_IN_FREE_LIST (ptr)
1594 || virtual_operand_p (ptr))
1595 continue;
1596
1597 streamer_write_uhwi (ob, i);
1598 streamer_write_char_stream (ob->main_stream,
1599 SSA_NAME_IS_DEFAULT_DEF (ptr));
1600 if (SSA_NAME_VAR (ptr))
1601 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1602 else
1603 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1604 stream_write_tree (ob, TREE_TYPE (ptr), true);
1605 }
1606
1607 streamer_write_zero (ob);
1608 }
1609
1610
1611 /* Output the cfg. */
1612
1613 static void
1614 output_cfg (struct output_block *ob, struct function *fn)
1615 {
1616 struct lto_output_stream *tmp_stream = ob->main_stream;
1617 basic_block bb;
1618
1619 ob->main_stream = ob->cfg_stream;
1620
1621 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1622 profile_status_for_fn (fn));
1623
1624 /* Output the number of the highest basic block. */
1625 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1626
1627 FOR_ALL_BB_FN (bb, fn)
1628 {
1629 edge_iterator ei;
1630 edge e;
1631
1632 streamer_write_hwi (ob, bb->index);
1633
1634 /* Output the successors and the edge flags. */
1635 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1636 FOR_EACH_EDGE (e, ei, bb->succs)
1637 {
1638 streamer_write_uhwi (ob, e->dest->index);
1639 streamer_write_hwi (ob, e->probability);
1640 streamer_write_gcov_count (ob, e->count);
1641 streamer_write_uhwi (ob, e->flags);
1642 }
1643 }
1644
1645 streamer_write_hwi (ob, -1);
1646
1647 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1648 while (bb->next_bb)
1649 {
1650 streamer_write_hwi (ob, bb->next_bb->index);
1651 bb = bb->next_bb;
1652 }
1653
1654 streamer_write_hwi (ob, -1);
1655
1656 /* ??? The cfgloop interface is tied to cfun. */
1657 gcc_assert (cfun == fn);
1658
1659 /* Output the number of loops. */
1660 streamer_write_uhwi (ob, number_of_loops (fn));
1661
1662 /* Output each loop, skipping the tree root which has number zero. */
1663 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1664 {
1665 struct loop *loop = get_loop (fn, i);
1666
1667 /* Write the index of the loop header. That's enough to rebuild
1668 the loop tree on the reader side. Stream -1 for an unused
1669 loop entry. */
1670 if (!loop)
1671 {
1672 streamer_write_hwi (ob, -1);
1673 continue;
1674 }
1675 else
1676 streamer_write_hwi (ob, loop->header->index);
1677
1678 /* Write everything copy_loop_info copies. */
1679 streamer_write_enum (ob->main_stream,
1680 loop_estimation, EST_LAST, loop->estimate_state);
1681 streamer_write_hwi (ob, loop->any_upper_bound);
1682 if (loop->any_upper_bound)
1683 {
1684 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1685 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1686 }
1687 streamer_write_hwi (ob, loop->any_estimate);
1688 if (loop->any_estimate)
1689 {
1690 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1691 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1692 }
1693
1694 /* Write OMP SIMD related info. */
1695 streamer_write_hwi (ob, loop->safelen);
1696 streamer_write_hwi (ob, loop->dont_vectorize);
1697 streamer_write_hwi (ob, loop->force_vectorize);
1698 stream_write_tree (ob, loop->simduid, true);
1699 }
1700
1701 ob->main_stream = tmp_stream;
1702 }
1703
1704
1705 /* Create the header in the file using OB. If the section type is for
1706 a function, set FN to the decl for that function. */
1707
1708 void
1709 produce_asm (struct output_block *ob, tree fn)
1710 {
1711 enum lto_section_type section_type = ob->section_type;
1712 struct lto_function_header header;
1713 char *section_name;
1714 struct lto_output_stream *header_stream;
1715
1716 if (section_type == LTO_section_function_body)
1717 {
1718 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1719 section_name = lto_get_section_name (section_type, name, NULL);
1720 }
1721 else
1722 section_name = lto_get_section_name (section_type, NULL, NULL);
1723
1724 lto_begin_section (section_name, !flag_wpa);
1725 free (section_name);
1726
1727 /* The entire header is stream computed here. */
1728 memset (&header, 0, sizeof (struct lto_function_header));
1729
1730 /* Write the header. */
1731 header.lto_header.major_version = LTO_major_version;
1732 header.lto_header.minor_version = LTO_minor_version;
1733
1734 header.compressed_size = 0;
1735
1736 if (section_type == LTO_section_function_body)
1737 header.cfg_size = ob->cfg_stream->total_size;
1738 header.main_size = ob->main_stream->total_size;
1739 header.string_size = ob->string_stream->total_size;
1740
1741 header_stream = XCNEW (struct lto_output_stream);
1742 lto_output_data_stream (header_stream, &header, sizeof header);
1743 lto_write_stream (header_stream);
1744 free (header_stream);
1745
1746 /* Put all of the gimple and the string table out the asm file as a
1747 block of text. */
1748 if (section_type == LTO_section_function_body)
1749 lto_write_stream (ob->cfg_stream);
1750 lto_write_stream (ob->main_stream);
1751 lto_write_stream (ob->string_stream);
1752
1753 lto_end_section ();
1754 }
1755
1756
1757 /* Output the base body of struct function FN using output block OB. */
1758
1759 static void
1760 output_struct_function_base (struct output_block *ob, struct function *fn)
1761 {
1762 struct bitpack_d bp;
1763 unsigned i;
1764 tree t;
1765
1766 /* Output the static chain and non-local goto save area. */
1767 stream_write_tree (ob, fn->static_chain_decl, true);
1768 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1769
1770 /* Output all the local variables in the function. */
1771 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1772 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1773 stream_write_tree (ob, t, true);
1774
1775 /* Output current IL state of the function. */
1776 streamer_write_uhwi (ob, fn->curr_properties);
1777
1778 /* Write all the attributes for FN. */
1779 bp = bitpack_create (ob->main_stream);
1780 bp_pack_value (&bp, fn->is_thunk, 1);
1781 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1782 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1783 bp_pack_value (&bp, fn->returns_struct, 1);
1784 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1785 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1786 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1787 bp_pack_value (&bp, fn->after_inlining, 1);
1788 bp_pack_value (&bp, fn->stdarg, 1);
1789 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1790 bp_pack_value (&bp, fn->calls_alloca, 1);
1791 bp_pack_value (&bp, fn->calls_setjmp, 1);
1792 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1793 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1794 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1795 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1796
1797 /* Output the function start and end loci. */
1798 stream_output_location (ob, &bp, fn->function_start_locus);
1799 stream_output_location (ob, &bp, fn->function_end_locus);
1800
1801 streamer_write_bitpack (&bp);
1802 }
1803
1804
1805 /* Output the body of function NODE->DECL. */
1806
1807 static void
1808 output_function (struct cgraph_node *node)
1809 {
1810 tree function;
1811 struct function *fn;
1812 basic_block bb;
1813 struct output_block *ob;
1814
1815 function = node->decl;
1816 fn = DECL_STRUCT_FUNCTION (function);
1817 ob = create_output_block (LTO_section_function_body);
1818
1819 clear_line_info (ob);
1820 ob->cgraph_node = node;
1821
1822 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1823
1824 /* Set current_function_decl and cfun. */
1825 push_cfun (fn);
1826
1827 /* Make string 0 be a NULL string. */
1828 streamer_write_char_stream (ob->string_stream, 0);
1829
1830 streamer_write_record_start (ob, LTO_function);
1831
1832 /* Output decls for parameters and args. */
1833 stream_write_tree (ob, DECL_RESULT (function), true);
1834 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1835
1836 /* Output DECL_INITIAL for the function, which contains the tree of
1837 lexical scopes. */
1838 stream_write_tree (ob, DECL_INITIAL (function), true);
1839
1840 /* We also stream abstract functions where we stream only stuff needed for
1841 debug info. */
1842 if (gimple_has_body_p (function))
1843 {
1844 streamer_write_uhwi (ob, 1);
1845 output_struct_function_base (ob, fn);
1846
1847 /* Output all the SSA names used in the function. */
1848 output_ssa_names (ob, fn);
1849
1850 /* Output any exception handling regions. */
1851 output_eh_regions (ob, fn);
1852
1853
1854 /* We will renumber the statements. The code that does this uses
1855 the same ordering that we use for serializing them so we can use
1856 the same code on the other end and not have to write out the
1857 statement numbers. We do not assign UIDs to PHIs here because
1858 virtual PHIs get re-computed on-the-fly which would make numbers
1859 inconsistent. */
1860 set_gimple_stmt_max_uid (cfun, 0);
1861 FOR_ALL_BB_FN (bb, cfun)
1862 {
1863 gimple_stmt_iterator gsi;
1864 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1865 {
1866 gimple stmt = gsi_stmt (gsi);
1867
1868 /* Virtual PHIs are not going to be streamed. */
1869 if (!virtual_operand_p (gimple_phi_result (stmt)))
1870 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1871 }
1872 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1873 {
1874 gimple stmt = gsi_stmt (gsi);
1875 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1876 }
1877 }
1878 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1879 virtual phis now. */
1880 FOR_ALL_BB_FN (bb, cfun)
1881 {
1882 gimple_stmt_iterator gsi;
1883 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1884 {
1885 gimple stmt = gsi_stmt (gsi);
1886 if (virtual_operand_p (gimple_phi_result (stmt)))
1887 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1888 }
1889 }
1890
1891 /* Output the code for the function. */
1892 FOR_ALL_BB_FN (bb, fn)
1893 output_bb (ob, bb, fn);
1894
1895 /* The terminator for this function. */
1896 streamer_write_record_start (ob, LTO_null);
1897
1898 output_cfg (ob, fn);
1899
1900 pop_cfun ();
1901 }
1902 else
1903 streamer_write_uhwi (ob, 0);
1904
1905 /* Create a section to hold the pickled output of this function. */
1906 produce_asm (ob, function);
1907
1908 destroy_output_block (ob);
1909 }
1910
1911
1912 /* Emit toplevel asms. */
1913
1914 void
1915 lto_output_toplevel_asms (void)
1916 {
1917 struct output_block *ob;
1918 struct asm_node *can;
1919 char *section_name;
1920 struct lto_output_stream *header_stream;
1921 struct lto_asm_header header;
1922
1923 if (! asm_nodes)
1924 return;
1925
1926 ob = create_output_block (LTO_section_asm);
1927
1928 /* Make string 0 be a NULL string. */
1929 streamer_write_char_stream (ob->string_stream, 0);
1930
1931 for (can = asm_nodes; can; can = can->next)
1932 {
1933 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1934 streamer_write_hwi (ob, can->order);
1935 }
1936
1937 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1938
1939 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1940 lto_begin_section (section_name, !flag_wpa);
1941 free (section_name);
1942
1943 /* The entire header stream is computed here. */
1944 memset (&header, 0, sizeof (header));
1945
1946 /* Write the header. */
1947 header.lto_header.major_version = LTO_major_version;
1948 header.lto_header.minor_version = LTO_minor_version;
1949
1950 header.main_size = ob->main_stream->total_size;
1951 header.string_size = ob->string_stream->total_size;
1952
1953 header_stream = XCNEW (struct lto_output_stream);
1954 lto_output_data_stream (header_stream, &header, sizeof (header));
1955 lto_write_stream (header_stream);
1956 free (header_stream);
1957
1958 /* Put all of the gimple and the string table out the asm file as a
1959 block of text. */
1960 lto_write_stream (ob->main_stream);
1961 lto_write_stream (ob->string_stream);
1962
1963 lto_end_section ();
1964
1965 destroy_output_block (ob);
1966 }
1967
1968
1969 /* Copy the function body of NODE without deserializing. */
1970
1971 static void
1972 copy_function (struct cgraph_node *node)
1973 {
1974 tree function = node->decl;
1975 struct lto_file_decl_data *file_data = node->lto_file_data;
1976 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1977 const char *data;
1978 size_t len;
1979 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1980 char *section_name =
1981 lto_get_section_name (LTO_section_function_body, name, NULL);
1982 size_t i, j;
1983 struct lto_in_decl_state *in_state;
1984 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1985
1986 lto_begin_section (section_name, !flag_wpa);
1987 free (section_name);
1988
1989 /* We may have renamed the declaration, e.g., a static function. */
1990 name = lto_get_decl_name_mapping (file_data, name);
1991
1992 data = lto_get_section_data (file_data, LTO_section_function_body,
1993 name, &len);
1994 gcc_assert (data);
1995
1996 /* Do a bit copy of the function body. */
1997 lto_output_data_stream (output_stream, data, len);
1998 lto_write_stream (output_stream);
1999
2000 /* Copy decls. */
2001 in_state =
2002 lto_get_function_in_decl_state (node->lto_file_data, function);
2003 gcc_assert (in_state);
2004
2005 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2006 {
2007 size_t n = in_state->streams[i].size;
2008 tree *trees = in_state->streams[i].trees;
2009 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2010
2011 /* The out state must have the same indices and the in state.
2012 So just copy the vector. All the encoders in the in state
2013 must be empty where we reach here. */
2014 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2015 encoder->trees.reserve_exact (n);
2016 for (j = 0; j < n; j++)
2017 encoder->trees.safe_push (trees[j]);
2018 }
2019
2020 lto_free_section_data (file_data, LTO_section_function_body, name,
2021 data, len);
2022 free (output_stream);
2023 lto_end_section ();
2024 }
2025
2026 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2027
2028 static tree
2029 wrap_refs (tree *tp, int *ws, void *)
2030 {
2031 tree t = *tp;
2032 if (handled_component_p (t)
2033 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2034 {
2035 tree decl = TREE_OPERAND (t, 0);
2036 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2037 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2038 build1 (ADDR_EXPR, ptrtype, decl),
2039 build_int_cst (ptrtype, 0));
2040 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2041 *ws = 0;
2042 }
2043 else if (TREE_CODE (t) == CONSTRUCTOR)
2044 ;
2045 else if (!EXPR_P (t))
2046 *ws = 0;
2047 return NULL_TREE;
2048 }
2049
2050 /* Main entry point from the pass manager. */
2051
2052 void
2053 lto_output (void)
2054 {
2055 struct lto_out_decl_state *decl_state;
2056 #ifdef ENABLE_CHECKING
2057 bitmap output = lto_bitmap_alloc ();
2058 #endif
2059 int i, n_nodes;
2060 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2061
2062 /* Initialize the streamer. */
2063 lto_streamer_init ();
2064
2065 n_nodes = lto_symtab_encoder_size (encoder);
2066 /* Process only the functions with bodies. */
2067 for (i = 0; i < n_nodes; i++)
2068 {
2069 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2070 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2071 {
2072 if (lto_symtab_encoder_encode_body_p (encoder, node)
2073 && !node->alias)
2074 {
2075 #ifdef ENABLE_CHECKING
2076 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2077 bitmap_set_bit (output, DECL_UID (node->decl));
2078 #endif
2079 decl_state = lto_new_out_decl_state ();
2080 lto_push_out_decl_state (decl_state);
2081 if (gimple_has_body_p (node->decl) || !flag_wpa)
2082 output_function (node);
2083 else
2084 copy_function (node);
2085 gcc_assert (lto_get_out_decl_state () == decl_state);
2086 lto_pop_out_decl_state ();
2087 lto_record_function_out_decl_state (node->decl, decl_state);
2088 }
2089 }
2090 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2091 {
2092 /* Wrap symbol references inside the ctor in a type
2093 preserving MEM_REF. */
2094 tree ctor = DECL_INITIAL (node->decl);
2095 if (ctor && !in_lto_p)
2096 walk_tree (&ctor, wrap_refs, NULL, NULL);
2097 }
2098 }
2099
2100 /* Emit the callgraph after emitting function bodies. This needs to
2101 be done now to make sure that all the statements in every function
2102 have been renumbered so that edges can be associated with call
2103 statements using the statement UIDs. */
2104 output_symtab ();
2105
2106 #ifdef ENABLE_CHECKING
2107 lto_bitmap_free (output);
2108 #endif
2109 }
2110
2111 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2112 from it and required for correct representation of its semantics.
2113 Each node in ENCODER must be a global declaration or a type. A node
2114 is written only once, even if it appears multiple times in the
2115 vector. Certain transitively-reachable nodes, such as those
2116 representing expressions, may be duplicated, but such nodes
2117 must not appear in ENCODER itself. */
2118
2119 static void
2120 write_global_stream (struct output_block *ob,
2121 struct lto_tree_ref_encoder *encoder)
2122 {
2123 tree t;
2124 size_t index;
2125 const size_t size = lto_tree_ref_encoder_size (encoder);
2126
2127 for (index = 0; index < size; index++)
2128 {
2129 t = lto_tree_ref_encoder_get_tree (encoder, index);
2130 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2131 stream_write_tree (ob, t, false);
2132 }
2133 }
2134
2135
2136 /* Write a sequence of indices into the globals vector corresponding
2137 to the trees in ENCODER. These are used by the reader to map the
2138 indices used to refer to global entities within function bodies to
2139 their referents. */
2140
2141 static void
2142 write_global_references (struct output_block *ob,
2143 struct lto_output_stream *ref_stream,
2144 struct lto_tree_ref_encoder *encoder)
2145 {
2146 tree t;
2147 uint32_t index;
2148 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2149
2150 /* Write size as 32-bit unsigned. */
2151 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2152
2153 for (index = 0; index < size; index++)
2154 {
2155 uint32_t slot_num;
2156
2157 t = lto_tree_ref_encoder_get_tree (encoder, index);
2158 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2159 gcc_assert (slot_num != (unsigned)-1);
2160 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2161 }
2162 }
2163
2164
2165 /* Write all the streams in an lto_out_decl_state STATE using
2166 output block OB and output stream OUT_STREAM. */
2167
2168 void
2169 lto_output_decl_state_streams (struct output_block *ob,
2170 struct lto_out_decl_state *state)
2171 {
2172 int i;
2173
2174 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2175 write_global_stream (ob, &state->streams[i]);
2176 }
2177
2178
2179 /* Write all the references in an lto_out_decl_state STATE using
2180 output block OB and output stream OUT_STREAM. */
2181
2182 void
2183 lto_output_decl_state_refs (struct output_block *ob,
2184 struct lto_output_stream *out_stream,
2185 struct lto_out_decl_state *state)
2186 {
2187 unsigned i;
2188 uint32_t ref;
2189 tree decl;
2190
2191 /* Write reference to FUNCTION_DECL. If there is not function,
2192 write reference to void_type_node. */
2193 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2194 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2195 gcc_assert (ref != (unsigned)-1);
2196 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2197
2198 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2199 write_global_references (ob, out_stream, &state->streams[i]);
2200 }
2201
2202
2203 /* Return the written size of STATE. */
2204
2205 static size_t
2206 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2207 {
2208 int i;
2209 size_t size;
2210
2211 size = sizeof (int32_t); /* fn_ref. */
2212 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2213 {
2214 size += sizeof (int32_t); /* vector size. */
2215 size += (lto_tree_ref_encoder_size (&state->streams[i])
2216 * sizeof (int32_t));
2217 }
2218 return size;
2219 }
2220
2221
2222 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2223 so far. */
2224
2225 static void
2226 write_symbol (struct streamer_tree_cache_d *cache,
2227 struct lto_output_stream *stream,
2228 tree t, struct pointer_set_t *seen, bool alias)
2229 {
2230 const char *name;
2231 enum gcc_plugin_symbol_kind kind;
2232 enum gcc_plugin_symbol_visibility visibility;
2233 unsigned slot_num;
2234 unsigned HOST_WIDEST_INT size;
2235 const char *comdat;
2236 unsigned char c;
2237
2238 /* None of the following kinds of symbols are needed in the
2239 symbol table. */
2240 if (!TREE_PUBLIC (t)
2241 || is_builtin_fn (t)
2242 || DECL_ABSTRACT (t)
2243 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2244 return;
2245 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2246
2247 gcc_assert (TREE_CODE (t) == VAR_DECL
2248 || TREE_CODE (t) == FUNCTION_DECL);
2249
2250 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2251
2252 /* This behaves like assemble_name_raw in varasm.c, performing the
2253 same name manipulations that ASM_OUTPUT_LABELREF does. */
2254 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2255
2256 if (pointer_set_contains (seen, name))
2257 return;
2258 pointer_set_insert (seen, name);
2259
2260 streamer_tree_cache_lookup (cache, t, &slot_num);
2261 gcc_assert (slot_num != (unsigned)-1);
2262
2263 if (DECL_EXTERNAL (t))
2264 {
2265 if (DECL_WEAK (t))
2266 kind = GCCPK_WEAKUNDEF;
2267 else
2268 kind = GCCPK_UNDEF;
2269 }
2270 else
2271 {
2272 if (DECL_WEAK (t))
2273 kind = GCCPK_WEAKDEF;
2274 else if (DECL_COMMON (t))
2275 kind = GCCPK_COMMON;
2276 else
2277 kind = GCCPK_DEF;
2278
2279 /* When something is defined, it should have node attached. */
2280 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2281 || varpool_get_node (t)->definition);
2282 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2283 || (cgraph_get_node (t)
2284 && cgraph_get_node (t)->definition));
2285 }
2286
2287 /* Imitate what default_elf_asm_output_external do.
2288 When symbol is external, we need to output it with DEFAULT visibility
2289 when compiling with -fvisibility=default, while with HIDDEN visibility
2290 when symbol has attribute (visibility("hidden")) specified.
2291 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2292 right. */
2293
2294 if (DECL_EXTERNAL (t)
2295 && !targetm.binds_local_p (t))
2296 visibility = GCCPV_DEFAULT;
2297 else
2298 switch (DECL_VISIBILITY (t))
2299 {
2300 case VISIBILITY_DEFAULT:
2301 visibility = GCCPV_DEFAULT;
2302 break;
2303 case VISIBILITY_PROTECTED:
2304 visibility = GCCPV_PROTECTED;
2305 break;
2306 case VISIBILITY_HIDDEN:
2307 visibility = GCCPV_HIDDEN;
2308 break;
2309 case VISIBILITY_INTERNAL:
2310 visibility = GCCPV_INTERNAL;
2311 break;
2312 }
2313
2314 if (kind == GCCPK_COMMON
2315 && DECL_SIZE_UNIT (t)
2316 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2317 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2318 else
2319 size = 0;
2320
2321 if (DECL_ONE_ONLY (t))
2322 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2323 else
2324 comdat = "";
2325
2326 lto_output_data_stream (stream, name, strlen (name) + 1);
2327 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2328 c = (unsigned char) kind;
2329 lto_output_data_stream (stream, &c, 1);
2330 c = (unsigned char) visibility;
2331 lto_output_data_stream (stream, &c, 1);
2332 lto_output_data_stream (stream, &size, 8);
2333 lto_output_data_stream (stream, &slot_num, 4);
2334 }
2335
2336 /* Return true if NODE should appear in the plugin symbol table. */
2337
2338 bool
2339 output_symbol_p (symtab_node *node)
2340 {
2341 struct cgraph_node *cnode;
2342 if (!symtab_real_symbol_p (node))
2343 return false;
2344 /* We keep external functions in symtab for sake of inlining
2345 and devirtualization. We do not want to see them in symbol table as
2346 references unless they are really used. */
2347 cnode = dyn_cast <cgraph_node *> (node);
2348 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2349 && cnode->callers)
2350 return true;
2351
2352 /* Ignore all references from external vars initializers - they are not really
2353 part of the compilation unit until they are used by folding. Some symbols,
2354 like references to external construction vtables can not be referred to at all.
2355 We decide this at can_refer_decl_in_current_unit_p. */
2356 if (!node->definition || DECL_EXTERNAL (node->decl))
2357 {
2358 int i;
2359 struct ipa_ref *ref;
2360 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2361 i, ref); i++)
2362 {
2363 if (ref->use == IPA_REF_ALIAS)
2364 continue;
2365 if (is_a <cgraph_node *> (ref->referring))
2366 return true;
2367 if (!DECL_EXTERNAL (ref->referring->decl))
2368 return true;
2369 }
2370 return false;
2371 }
2372 return true;
2373 }
2374
2375
2376 /* Write an IL symbol table to OB.
2377 SET and VSET are cgraph/varpool node sets we are outputting. */
2378
2379 static void
2380 produce_symtab (struct output_block *ob)
2381 {
2382 struct streamer_tree_cache_d *cache = ob->writer_cache;
2383 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2384 struct pointer_set_t *seen;
2385 struct lto_output_stream stream;
2386 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2387 lto_symtab_encoder_iterator lsei;
2388
2389 lto_begin_section (section_name, false);
2390 free (section_name);
2391
2392 seen = pointer_set_create ();
2393 memset (&stream, 0, sizeof (stream));
2394
2395 /* Write the symbol table.
2396 First write everything defined and then all declarations.
2397 This is necessary to handle cases where we have duplicated symbols. */
2398 for (lsei = lsei_start (encoder);
2399 !lsei_end_p (lsei); lsei_next (&lsei))
2400 {
2401 symtab_node *node = lsei_node (lsei);
2402
2403 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2404 continue;
2405 write_symbol (cache, &stream, node->decl, seen, false);
2406 }
2407 for (lsei = lsei_start (encoder);
2408 !lsei_end_p (lsei); lsei_next (&lsei))
2409 {
2410 symtab_node *node = lsei_node (lsei);
2411
2412 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2413 continue;
2414 write_symbol (cache, &stream, node->decl, seen, false);
2415 }
2416
2417 lto_write_stream (&stream);
2418 pointer_set_destroy (seen);
2419
2420 lto_end_section ();
2421 }
2422
2423
2424 /* This pass is run after all of the functions are serialized and all
2425 of the IPA passes have written their serialized forms. This pass
2426 causes the vector of all of the global decls and types used from
2427 this file to be written in to a section that can then be read in to
2428 recover these on other side. */
2429
2430 void
2431 produce_asm_for_decls (void)
2432 {
2433 struct lto_out_decl_state *out_state;
2434 struct lto_out_decl_state *fn_out_state;
2435 struct lto_decl_header header;
2436 char *section_name;
2437 struct output_block *ob;
2438 struct lto_output_stream *header_stream, *decl_state_stream;
2439 unsigned idx, num_fns;
2440 size_t decl_state_size;
2441 int32_t num_decl_states;
2442
2443 ob = create_output_block (LTO_section_decls);
2444 ob->global = true;
2445
2446 memset (&header, 0, sizeof (struct lto_decl_header));
2447
2448 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2449 lto_begin_section (section_name, !flag_wpa);
2450 free (section_name);
2451
2452 /* Make string 0 be a NULL string. */
2453 streamer_write_char_stream (ob->string_stream, 0);
2454
2455 gcc_assert (!alias_pairs);
2456
2457 /* Get rid of the global decl state hash tables to save some memory. */
2458 out_state = lto_get_out_decl_state ();
2459 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2460 if (out_state->streams[i].tree_hash_table)
2461 {
2462 delete out_state->streams[i].tree_hash_table;
2463 out_state->streams[i].tree_hash_table = NULL;
2464 }
2465
2466 /* Write the global symbols. */
2467 lto_output_decl_state_streams (ob, out_state);
2468 num_fns = lto_function_decl_states.length ();
2469 for (idx = 0; idx < num_fns; idx++)
2470 {
2471 fn_out_state =
2472 lto_function_decl_states[idx];
2473 lto_output_decl_state_streams (ob, fn_out_state);
2474 }
2475
2476 header.lto_header.major_version = LTO_major_version;
2477 header.lto_header.minor_version = LTO_minor_version;
2478
2479 /* Currently not used. This field would allow us to preallocate
2480 the globals vector, so that it need not be resized as it is extended. */
2481 header.num_nodes = -1;
2482
2483 /* Compute the total size of all decl out states. */
2484 decl_state_size = sizeof (int32_t);
2485 decl_state_size += lto_out_decl_state_written_size (out_state);
2486 for (idx = 0; idx < num_fns; idx++)
2487 {
2488 fn_out_state =
2489 lto_function_decl_states[idx];
2490 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2491 }
2492 header.decl_state_size = decl_state_size;
2493
2494 header.main_size = ob->main_stream->total_size;
2495 header.string_size = ob->string_stream->total_size;
2496
2497 header_stream = XCNEW (struct lto_output_stream);
2498 lto_output_data_stream (header_stream, &header, sizeof header);
2499 lto_write_stream (header_stream);
2500 free (header_stream);
2501
2502 /* Write the main out-decl state, followed by out-decl states of
2503 functions. */
2504 decl_state_stream = XCNEW (struct lto_output_stream);
2505 num_decl_states = num_fns + 1;
2506 lto_output_data_stream (decl_state_stream, &num_decl_states,
2507 sizeof (num_decl_states));
2508 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2509 for (idx = 0; idx < num_fns; idx++)
2510 {
2511 fn_out_state =
2512 lto_function_decl_states[idx];
2513 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2514 }
2515 lto_write_stream (decl_state_stream);
2516 free (decl_state_stream);
2517
2518 lto_write_stream (ob->main_stream);
2519 lto_write_stream (ob->string_stream);
2520
2521 lto_end_section ();
2522
2523 /* Write the symbol table. It is used by linker to determine dependencies
2524 and thus we can skip it for WPA. */
2525 if (!flag_wpa)
2526 produce_symtab (ob);
2527
2528 /* Write command line opts. */
2529 lto_write_options ();
2530
2531 /* Deallocate memory and clean up. */
2532 for (idx = 0; idx < num_fns; idx++)
2533 {
2534 fn_out_state =
2535 lto_function_decl_states[idx];
2536 lto_delete_out_decl_state (fn_out_state);
2537 }
2538 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2539 lto_function_decl_states.release ();
2540 destroy_output_block (ob);
2541 }