Rename profile_status_for_function to profile_status_for_fn.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 static void lto_write_tree (struct output_block*, tree, bool);
58
59 /* Clear the line info stored in DATA_IN. */
60
61 static void
62 clear_line_info (struct output_block *ob)
63 {
64 ob->current_file = NULL;
65 ob->current_line = 0;
66 ob->current_col = 0;
67 }
68
69
70 /* Create the output block and return it. SECTION_TYPE is
71 LTO_section_function_body or LTO_static_initializer. */
72
73 struct output_block *
74 create_output_block (enum lto_section_type section_type)
75 {
76 struct output_block *ob = XCNEW (struct output_block);
77
78 ob->section_type = section_type;
79 ob->decl_state = lto_get_out_decl_state ();
80 ob->main_stream = XCNEW (struct lto_output_stream);
81 ob->string_stream = XCNEW (struct lto_output_stream);
82 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
83
84 if (section_type == LTO_section_function_body)
85 ob->cfg_stream = XCNEW (struct lto_output_stream);
86
87 clear_line_info (ob);
88
89 ob->string_hash_table.create (37);
90 gcc_obstack_init (&ob->obstack);
91
92 return ob;
93 }
94
95
96 /* Destroy the output block OB. */
97
98 void
99 destroy_output_block (struct output_block *ob)
100 {
101 enum lto_section_type section_type = ob->section_type;
102
103 ob->string_hash_table.dispose ();
104
105 free (ob->main_stream);
106 free (ob->string_stream);
107 if (section_type == LTO_section_function_body)
108 free (ob->cfg_stream);
109
110 streamer_tree_cache_delete (ob->writer_cache);
111 obstack_free (&ob->obstack, NULL);
112
113 free (ob);
114 }
115
116
117 /* Look up NODE in the type table and write the index for it to OB. */
118
119 static void
120 output_type_ref (struct output_block *ob, tree node)
121 {
122 streamer_write_record_start (ob, LTO_type_ref);
123 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
124 }
125
126
127 /* Return true if tree node T is written to various tables. For these
128 nodes, we sometimes want to write their phyiscal representation
129 (via lto_output_tree), and sometimes we need to emit an index
130 reference into a table (via lto_output_tree_ref). */
131
132 static bool
133 tree_is_indexable (tree t)
134 {
135 /* Parameters and return values of functions of variably modified types
136 must go to global stream, because they may be used in the type
137 definition. */
138 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
139 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
140 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
141 || TREE_CODE (t) == TYPE_DECL
142 || TREE_CODE (t) == CONST_DECL)
143 && decl_function_context (t))
144 return false;
145 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
146 return false;
147 /* Variably modified types need to be streamed alongside function
148 bodies because they can refer to local entities. Together with
149 them we have to localize their members as well.
150 ??? In theory that includes non-FIELD_DECLs as well. */
151 else if (TYPE_P (t)
152 && variably_modified_type_p (t, NULL_TREE))
153 return false;
154 else if (TREE_CODE (t) == FIELD_DECL
155 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
156 return false;
157 else
158 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
159 }
160
161
162 /* Output info about new location into bitpack BP.
163 After outputting bitpack, lto_output_location_data has
164 to be done to output actual data. */
165
166 void
167 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
168 location_t loc)
169 {
170 expanded_location xloc;
171
172 loc = LOCATION_LOCUS (loc);
173 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
174 if (loc == UNKNOWN_LOCATION)
175 return;
176
177 xloc = expand_location (loc);
178
179 bp_pack_value (bp, ob->current_file != xloc.file, 1);
180 bp_pack_value (bp, ob->current_line != xloc.line, 1);
181 bp_pack_value (bp, ob->current_col != xloc.column, 1);
182
183 if (ob->current_file != xloc.file)
184 bp_pack_var_len_unsigned (bp,
185 streamer_string_index (ob, xloc.file,
186 strlen (xloc.file) + 1,
187 true));
188 ob->current_file = xloc.file;
189
190 if (ob->current_line != xloc.line)
191 bp_pack_var_len_unsigned (bp, xloc.line);
192 ob->current_line = xloc.line;
193
194 if (ob->current_col != xloc.column)
195 bp_pack_var_len_unsigned (bp, xloc.column);
196 ob->current_col = xloc.column;
197 }
198
199
200 /* If EXPR is an indexable tree node, output a reference to it to
201 output block OB. Otherwise, output the physical representation of
202 EXPR to OB. */
203
204 static void
205 lto_output_tree_ref (struct output_block *ob, tree expr)
206 {
207 enum tree_code code;
208
209 if (TYPE_P (expr))
210 {
211 output_type_ref (ob, expr);
212 return;
213 }
214
215 code = TREE_CODE (expr);
216 switch (code)
217 {
218 case SSA_NAME:
219 streamer_write_record_start (ob, LTO_ssa_name_ref);
220 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
221 break;
222
223 case FIELD_DECL:
224 streamer_write_record_start (ob, LTO_field_decl_ref);
225 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
227
228 case FUNCTION_DECL:
229 streamer_write_record_start (ob, LTO_function_decl_ref);
230 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case VAR_DECL:
234 case DEBUG_EXPR_DECL:
235 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
236 case PARM_DECL:
237 streamer_write_record_start (ob, LTO_global_decl_ref);
238 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
239 break;
240
241 case CONST_DECL:
242 streamer_write_record_start (ob, LTO_const_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case IMPORTED_DECL:
247 gcc_assert (decl_function_context (expr) == NULL);
248 streamer_write_record_start (ob, LTO_imported_decl_ref);
249 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
251
252 case TYPE_DECL:
253 streamer_write_record_start (ob, LTO_type_decl_ref);
254 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case NAMELIST_DECL:
258 {
259 unsigned i;
260 tree value, tmp;
261
262 streamer_write_record_start (ob, LTO_namelist_decl_ref);
263 stream_write_tree (ob, DECL_NAME (expr), true);
264 tmp = NAMELIST_DECL_ASSOCIATED_DECL (expr);
265 gcc_assert (tmp != NULL_TREE);
266 streamer_write_uhwi (ob, CONSTRUCTOR_ELTS (tmp)->length());
267 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (tmp), i, value)
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, value);
269 break;
270 }
271
272 case NAMESPACE_DECL:
273 streamer_write_record_start (ob, LTO_namespace_decl_ref);
274 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case LABEL_DECL:
278 streamer_write_record_start (ob, LTO_label_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case RESULT_DECL:
283 streamer_write_record_start (ob, LTO_result_decl_ref);
284 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 case TRANSLATION_UNIT_DECL:
288 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
289 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
290 break;
291
292 default:
293 /* No other node is indexable, so it should have been handled by
294 lto_output_tree. */
295 gcc_unreachable ();
296 }
297 }
298
299
300 /* Return true if EXPR is a tree node that can be written to disk. */
301
302 static inline bool
303 lto_is_streamable (tree expr)
304 {
305 enum tree_code code = TREE_CODE (expr);
306
307 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
308 name version in lto_output_tree_ref (see output_ssa_names). */
309 return !is_lang_specific (expr)
310 && code != SSA_NAME
311 && code != CALL_EXPR
312 && code != LANG_TYPE
313 && code != MODIFY_EXPR
314 && code != INIT_EXPR
315 && code != TARGET_EXPR
316 && code != BIND_EXPR
317 && code != WITH_CLEANUP_EXPR
318 && code != STATEMENT_LIST
319 && (code == CASE_LABEL_EXPR
320 || code == DECL_EXPR
321 || TREE_CODE_CLASS (code) != tcc_statement);
322 }
323
324
325 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
326
327 static tree
328 get_symbol_initial_value (struct output_block *ob, tree expr)
329 {
330 gcc_checking_assert (DECL_P (expr)
331 && TREE_CODE (expr) != FUNCTION_DECL
332 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
333
334 /* Handle DECL_INITIAL for symbols. */
335 tree initial = DECL_INITIAL (expr);
336 if (TREE_CODE (expr) == VAR_DECL
337 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
338 && !DECL_IN_CONSTANT_POOL (expr)
339 && initial)
340 {
341 lto_symtab_encoder_t encoder;
342 varpool_node *vnode;
343
344 encoder = ob->decl_state->symtab_node_encoder;
345 vnode = varpool_get_node (expr);
346 if (!vnode
347 || !lto_symtab_encoder_encode_initializer_p (encoder,
348 vnode))
349 initial = error_mark_node;
350 }
351
352 return initial;
353 }
354
355
356 /* Write a physical representation of tree node EXPR to output block
357 OB. If REF_P is true, the leaves of EXPR are emitted as references
358 via lto_output_tree_ref. IX is the index into the streamer cache
359 where EXPR is stored. */
360
361 static void
362 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
363 {
364 /* Pack all the non-pointer fields in EXPR into a bitpack and write
365 the resulting bitpack. */
366 bitpack_d bp = bitpack_create (ob->main_stream);
367 streamer_pack_tree_bitfields (ob, &bp, expr);
368 streamer_write_bitpack (&bp);
369
370 /* Write all the pointer fields in EXPR. */
371 streamer_write_tree_body (ob, expr, ref_p);
372
373 /* Write any LTO-specific data to OB. */
374 if (DECL_P (expr)
375 && TREE_CODE (expr) != FUNCTION_DECL
376 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
377 {
378 /* Handle DECL_INITIAL for symbols. */
379 tree initial = get_symbol_initial_value (ob, expr);
380 stream_write_tree (ob, initial, ref_p);
381 }
382 }
383
384 /* Write a physical representation of tree node EXPR to output block
385 OB. If REF_P is true, the leaves of EXPR are emitted as references
386 via lto_output_tree_ref. IX is the index into the streamer cache
387 where EXPR is stored. */
388
389 static void
390 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
391 {
392 if (!lto_is_streamable (expr))
393 internal_error ("tree code %qs is not supported in LTO streams",
394 get_tree_code_name (TREE_CODE (expr)));
395
396 /* Write the header, containing everything needed to materialize
397 EXPR on the reading side. */
398 streamer_write_tree_header (ob, expr);
399
400 lto_write_tree_1 (ob, expr, ref_p);
401
402 /* Mark the end of EXPR. */
403 streamer_write_zero (ob);
404 }
405
406 /* Emit the physical representation of tree node EXPR to output block
407 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
408 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
409
410 static void
411 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
412 bool ref_p, bool this_ref_p)
413 {
414 unsigned ix;
415
416 gcc_checking_assert (expr != NULL_TREE
417 && !(this_ref_p && tree_is_indexable (expr)));
418
419 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
420 expr, hash, &ix);
421 gcc_assert (!exists_p);
422 if (streamer_handle_as_builtin_p (expr))
423 {
424 /* MD and NORMAL builtins do not need to be written out
425 completely as they are always instantiated by the
426 compiler on startup. The only builtins that need to
427 be written out are BUILT_IN_FRONTEND. For all other
428 builtins, we simply write the class and code. */
429 streamer_write_builtin (ob, expr);
430 }
431 else if (TREE_CODE (expr) == INTEGER_CST
432 && !TREE_OVERFLOW (expr))
433 {
434 /* Shared INTEGER_CST nodes are special because they need their
435 original type to be materialized by the reader (to implement
436 TYPE_CACHED_VALUES). */
437 streamer_write_integer_cst (ob, expr, ref_p);
438 }
439 else
440 {
441 /* This is the first time we see EXPR, write its fields
442 to OB. */
443 lto_write_tree (ob, expr, ref_p);
444 }
445 }
446
447 struct sccs
448 {
449 unsigned int dfsnum;
450 unsigned int low;
451 };
452
453 struct scc_entry
454 {
455 tree t;
456 hashval_t hash;
457 };
458
459 static unsigned int next_dfs_num;
460 static vec<scc_entry> sccstack;
461 static struct pointer_map_t *sccstate;
462 static struct obstack sccstate_obstack;
463
464 static void
465 DFS_write_tree (struct output_block *ob, sccs *from_state,
466 tree expr, bool ref_p, bool this_ref_p);
467
468 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
469 DFS recurse for all tree edges originating from it. */
470
471 static void
472 DFS_write_tree_body (struct output_block *ob,
473 tree expr, sccs *expr_state, bool ref_p)
474 {
475 #define DFS_follow_tree_edge(DEST) \
476 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
477
478 enum tree_code code;
479
480 code = TREE_CODE (expr);
481
482 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
483 {
484 if (TREE_CODE (expr) != IDENTIFIER_NODE)
485 DFS_follow_tree_edge (TREE_TYPE (expr));
486 }
487
488 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
489 {
490 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
491 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
492 }
493
494 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
495 {
496 DFS_follow_tree_edge (TREE_REALPART (expr));
497 DFS_follow_tree_edge (TREE_IMAGPART (expr));
498 }
499
500 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
501 {
502 /* Drop names that were created for anonymous entities. */
503 if (DECL_NAME (expr)
504 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
505 && ANON_AGGRNAME_P (DECL_NAME (expr)))
506 ;
507 else
508 DFS_follow_tree_edge (DECL_NAME (expr));
509 DFS_follow_tree_edge (DECL_CONTEXT (expr));
510 }
511
512 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
513 {
514 DFS_follow_tree_edge (DECL_SIZE (expr));
515 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
516
517 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
518 special handling in LTO, it must be handled by streamer hooks. */
519
520 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
521
522 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
523 for early inlining so drop it on the floor instead of ICEing in
524 dwarf2out.c. */
525
526 if ((TREE_CODE (expr) == VAR_DECL
527 || TREE_CODE (expr) == PARM_DECL)
528 && DECL_HAS_VALUE_EXPR_P (expr))
529 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
530 if (TREE_CODE (expr) == VAR_DECL)
531 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
532 }
533
534 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
535 {
536 if (TREE_CODE (expr) == TYPE_DECL)
537 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
538 DFS_follow_tree_edge (DECL_VINDEX (expr));
539 }
540
541 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
542 {
543 /* Make sure we don't inadvertently set the assembler name. */
544 if (DECL_ASSEMBLER_NAME_SET_P (expr))
545 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
546 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
547 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
548 }
549
550 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
551 {
552 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
553 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
554 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
555 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
556 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
557 }
558
559 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
560 {
561 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
562 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
563 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
564 }
565
566 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
567 {
568 DFS_follow_tree_edge (TYPE_SIZE (expr));
569 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
570 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
571 DFS_follow_tree_edge (TYPE_NAME (expr));
572 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
573 reconstructed during fixup. */
574 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
575 during fixup. */
576 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
577 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
578 /* TYPE_CANONICAL is re-computed during type merging, so no need
579 to follow it here. */
580 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
584 {
585 if (TREE_CODE (expr) == ENUMERAL_TYPE)
586 DFS_follow_tree_edge (TYPE_VALUES (expr));
587 else if (TREE_CODE (expr) == ARRAY_TYPE)
588 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
589 else if (RECORD_OR_UNION_TYPE_P (expr))
590 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
591 DFS_follow_tree_edge (t);
592 else if (TREE_CODE (expr) == FUNCTION_TYPE
593 || TREE_CODE (expr) == METHOD_TYPE)
594 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
595
596 if (!POINTER_TYPE_P (expr))
597 DFS_follow_tree_edge (TYPE_MINVAL (expr));
598 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
599 if (RECORD_OR_UNION_TYPE_P (expr))
600 DFS_follow_tree_edge (TYPE_BINFO (expr));
601 }
602
603 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
604 {
605 DFS_follow_tree_edge (TREE_PURPOSE (expr));
606 DFS_follow_tree_edge (TREE_VALUE (expr));
607 DFS_follow_tree_edge (TREE_CHAIN (expr));
608 }
609
610 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
611 {
612 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
613 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
614 }
615
616 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
617 {
618 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
619 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
620 DFS_follow_tree_edge (TREE_BLOCK (expr));
621 }
622
623 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
624 {
625 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
626 /* ??? FIXME. See also streamer_write_chain. */
627 if (!(VAR_OR_FUNCTION_DECL_P (t)
628 && DECL_EXTERNAL (t)))
629 DFS_follow_tree_edge (t);
630
631 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
632
633 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
634 handle - those that represent inlined function scopes.
635 For the drop rest them on the floor instead of ICEing
636 in dwarf2out.c. */
637 if (inlined_function_outer_scope_p (expr))
638 {
639 tree ultimate_origin = block_ultimate_origin (expr);
640 DFS_follow_tree_edge (ultimate_origin);
641 }
642 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
643 information for early inlined BLOCKs so drop it on the floor instead
644 of ICEing in dwarf2out.c. */
645
646 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
647 streaming time. */
648
649 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
650 list is re-constructed from BLOCK_SUPERCONTEXT. */
651 }
652
653 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
654 {
655 unsigned i;
656 tree t;
657
658 /* Note that the number of BINFO slots has already been emitted in
659 EXPR's header (see streamer_write_tree_header) because this length
660 is needed to build the empty BINFO node on the reader side. */
661 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
662 DFS_follow_tree_edge (t);
663 DFS_follow_tree_edge (BINFO_OFFSET (expr));
664 DFS_follow_tree_edge (BINFO_VTABLE (expr));
665 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
666
667 /* The number of BINFO_BASE_ACCESSES has already been emitted in
668 EXPR's bitfield section. */
669 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
670 DFS_follow_tree_edge (t);
671
672 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
673 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
674 }
675
676 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
677 {
678 unsigned i;
679 tree index, value;
680
681 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
682 {
683 DFS_follow_tree_edge (index);
684 DFS_follow_tree_edge (value);
685 }
686 }
687
688 if (code == OMP_CLAUSE)
689 {
690 int i;
691 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
692 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
693 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
694 }
695
696 #undef DFS_follow_tree_edge
697 }
698
699 /* Return a hash value for the tree T. */
700
701 static hashval_t
702 hash_tree (struct streamer_tree_cache_d *cache, tree t)
703 {
704 #define visit(SIBLING) \
705 do { \
706 unsigned ix; \
707 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
708 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
709 } while (0)
710
711 /* Hash TS_BASE. */
712 enum tree_code code = TREE_CODE (t);
713 hashval_t v = iterative_hash_host_wide_int (code, 0);
714 if (!TYPE_P (t))
715 {
716 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
717 | (TREE_CONSTANT (t) << 1)
718 | (TREE_READONLY (t) << 2)
719 | (TREE_PUBLIC (t) << 3), v);
720 }
721 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
722 | (TREE_THIS_VOLATILE (t) << 1), v);
723 if (DECL_P (t))
724 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
725 else if (TYPE_P (t))
726 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
727 if (TYPE_P (t))
728 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
729 else
730 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
731 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
732 | (TREE_STATIC (t) << 1)
733 | (TREE_PROTECTED (t) << 2)
734 | (TREE_DEPRECATED (t) << 3), v);
735 if (code != TREE_BINFO)
736 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
737 if (TYPE_P (t))
738 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
739 | (TYPE_ADDR_SPACE (t) << 1), v);
740 else if (code == SSA_NAME)
741 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
742
743 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
744 {
745 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
746 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
747 }
748
749 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
750 {
751 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
752 v = iterative_hash_host_wide_int (r.cl, v);
753 v = iterative_hash_host_wide_int (r.decimal
754 | (r.sign << 1)
755 | (r.signalling << 2)
756 | (r.canonical << 3), v);
757 v = iterative_hash_host_wide_int (r.uexp, v);
758 for (unsigned i = 0; i < SIGSZ; ++i)
759 v = iterative_hash_host_wide_int (r.sig[i], v);
760 }
761
762 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
763 {
764 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
765 v = iterative_hash_host_wide_int (f.mode, v);
766 v = iterative_hash_host_wide_int (f.data.low, v);
767 v = iterative_hash_host_wide_int (f.data.high, v);
768 }
769
770 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
771 {
772 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
773 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
774 | (DECL_VIRTUAL_P (t) << 1)
775 | (DECL_IGNORED_P (t) << 2)
776 | (DECL_ABSTRACT (t) << 3)
777 | (DECL_ARTIFICIAL (t) << 4)
778 | (DECL_USER_ALIGN (t) << 5)
779 | (DECL_PRESERVE_P (t) << 6)
780 | (DECL_EXTERNAL (t) << 7)
781 | (DECL_GIMPLE_REG_P (t) << 8), v);
782 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
783 if (code == LABEL_DECL)
784 {
785 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
786 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
787 }
788 else if (code == FIELD_DECL)
789 {
790 v = iterative_hash_host_wide_int (DECL_PACKED (t)
791 | (DECL_NONADDRESSABLE_P (t) << 1),
792 v);
793 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
794 }
795 else if (code == VAR_DECL)
796 {
797 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
798 | (DECL_NONLOCAL_FRAME (t) << 1),
799 v);
800 }
801 if (code == RESULT_DECL
802 || code == PARM_DECL
803 || code == VAR_DECL)
804 {
805 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
806 if (code == VAR_DECL
807 || code == PARM_DECL)
808 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
809 }
810 }
811
812 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
813 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
814
815 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
816 {
817 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
818 | (DECL_DLLIMPORT_P (t) << 1)
819 | (DECL_WEAK (t) << 2)
820 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
821 | (DECL_COMDAT (t) << 4)
822 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
823 v);
824 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
825 if (code == VAR_DECL)
826 {
827 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
828 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
829 | (DECL_IN_CONSTANT_POOL (t) << 1),
830 v);
831 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
832 }
833 if (TREE_CODE (t) == FUNCTION_DECL)
834 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
835 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
836 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
837 v);
838 if (VAR_OR_FUNCTION_DECL_P (t))
839 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
840 }
841
842 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
843 {
844 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
845 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
846 | (DECL_STATIC_DESTRUCTOR (t) << 1)
847 | (DECL_UNINLINABLE (t) << 2)
848 | (DECL_POSSIBLY_INLINED (t) << 3)
849 | (DECL_IS_NOVOPS (t) << 4)
850 | (DECL_IS_RETURNS_TWICE (t) << 5)
851 | (DECL_IS_MALLOC (t) << 6)
852 | (DECL_IS_OPERATOR_NEW (t) << 7)
853 | (DECL_DECLARED_INLINE_P (t) << 8)
854 | (DECL_STATIC_CHAIN (t) << 9)
855 | (DECL_NO_INLINE_WARNING_P (t) << 10)
856 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
857 | (DECL_NO_LIMIT_STACK (t) << 12)
858 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
859 | (DECL_PURE_P (t) << 14)
860 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
861 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
862 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
863 if (DECL_STATIC_DESTRUCTOR (t))
864 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
865 }
866
867 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
868 {
869 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
870 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
871 | (TYPE_NO_FORCE_BLK (t) << 1)
872 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
873 | (TYPE_PACKED (t) << 3)
874 | (TYPE_RESTRICT (t) << 4)
875 | (TYPE_USER_ALIGN (t) << 5)
876 | (TYPE_READONLY (t) << 6), v);
877 if (RECORD_OR_UNION_TYPE_P (t))
878 {
879 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
880 | (TYPE_FINAL_P (t) << 1), v);
881 }
882 else if (code == ARRAY_TYPE)
883 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
884 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
885 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
886 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
887 || (!in_lto_p
888 && get_alias_set (t) == 0))
889 ? 0 : -1, v);
890 }
891
892 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
893 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
894 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
895
896 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
897 v = iterative_hash (t, sizeof (struct cl_target_option), v);
898
899 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
900 v = iterative_hash (t, sizeof (struct cl_optimization), v);
901
902 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
903 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
904
905 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
906 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
907
908 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
909 {
910 if (POINTER_TYPE_P (t))
911 {
912 /* For pointers factor in the pointed-to type recursively as
913 we cannot recurse through only pointers.
914 ??? We can generalize this by keeping track of the
915 in-SCC edges for each tree (or arbitrarily the first
916 such edge) and hashing that in in a second stage
917 (instead of the quadratic mixing of the SCC we do now). */
918 hashval_t x;
919 unsigned ix;
920 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
921 x = streamer_tree_cache_get_hash (cache, ix);
922 else
923 x = hash_tree (cache, TREE_TYPE (t));
924 v = iterative_hash_hashval_t (x, v);
925 }
926 else if (code != IDENTIFIER_NODE)
927 visit (TREE_TYPE (t));
928 }
929
930 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
931 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
932 visit (VECTOR_CST_ELT (t, i));
933
934 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
935 {
936 visit (TREE_REALPART (t));
937 visit (TREE_IMAGPART (t));
938 }
939
940 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
941 {
942 /* Drop names that were created for anonymous entities. */
943 if (DECL_NAME (t)
944 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
945 && ANON_AGGRNAME_P (DECL_NAME (t)))
946 ;
947 else
948 visit (DECL_NAME (t));
949 if (DECL_FILE_SCOPE_P (t))
950 ;
951 else
952 visit (DECL_CONTEXT (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
956 {
957 visit (DECL_SIZE (t));
958 visit (DECL_SIZE_UNIT (t));
959 visit (DECL_ATTRIBUTES (t));
960 if ((code == VAR_DECL
961 || code == PARM_DECL)
962 && DECL_HAS_VALUE_EXPR_P (t))
963 visit (DECL_VALUE_EXPR (t));
964 if (code == VAR_DECL
965 && DECL_HAS_DEBUG_EXPR_P (t))
966 visit (DECL_DEBUG_EXPR (t));
967 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
968 be able to call get_symbol_initial_value. */
969 }
970
971 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
972 {
973 if (code == TYPE_DECL)
974 visit (DECL_ORIGINAL_TYPE (t));
975 visit (DECL_VINDEX (t));
976 }
977
978 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
979 {
980 if (DECL_ASSEMBLER_NAME_SET_P (t))
981 visit (DECL_ASSEMBLER_NAME (t));
982 visit (DECL_SECTION_NAME (t));
983 visit (DECL_COMDAT_GROUP (t));
984 }
985
986 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
987 {
988 visit (DECL_FIELD_OFFSET (t));
989 visit (DECL_BIT_FIELD_TYPE (t));
990 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
991 visit (DECL_FIELD_BIT_OFFSET (t));
992 visit (DECL_FCONTEXT (t));
993 }
994
995 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
996 {
997 visit (DECL_FUNCTION_PERSONALITY (t));
998 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
999 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1003 {
1004 visit (TYPE_SIZE (t));
1005 visit (TYPE_SIZE_UNIT (t));
1006 visit (TYPE_ATTRIBUTES (t));
1007 visit (TYPE_NAME (t));
1008 visit (TYPE_MAIN_VARIANT (t));
1009 if (TYPE_FILE_SCOPE_P (t))
1010 ;
1011 else
1012 visit (TYPE_CONTEXT (t));
1013 visit (TYPE_STUB_DECL (t));
1014 }
1015
1016 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1017 {
1018 if (code == ENUMERAL_TYPE)
1019 visit (TYPE_VALUES (t));
1020 else if (code == ARRAY_TYPE)
1021 visit (TYPE_DOMAIN (t));
1022 else if (RECORD_OR_UNION_TYPE_P (t))
1023 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1024 visit (f);
1025 else if (code == FUNCTION_TYPE
1026 || code == METHOD_TYPE)
1027 visit (TYPE_ARG_TYPES (t));
1028 if (!POINTER_TYPE_P (t))
1029 visit (TYPE_MINVAL (t));
1030 visit (TYPE_MAXVAL (t));
1031 if (RECORD_OR_UNION_TYPE_P (t))
1032 visit (TYPE_BINFO (t));
1033 }
1034
1035 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1036 {
1037 visit (TREE_PURPOSE (t));
1038 visit (TREE_VALUE (t));
1039 visit (TREE_CHAIN (t));
1040 }
1041
1042 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1043 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1044 visit (TREE_VEC_ELT (t, i));
1045
1046 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1047 {
1048 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1049 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1050 visit (TREE_OPERAND (t, i));
1051 }
1052
1053 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1054 {
1055 unsigned i;
1056 tree b;
1057 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1058 visit (b);
1059 visit (BINFO_OFFSET (t));
1060 visit (BINFO_VTABLE (t));
1061 visit (BINFO_VPTR_FIELD (t));
1062 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1063 visit (b);
1064 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1065 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1066 }
1067
1068 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1069 {
1070 unsigned i;
1071 tree index, value;
1072 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1073 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1074 {
1075 visit (index);
1076 visit (value);
1077 }
1078 }
1079
1080 if (code == OMP_CLAUSE)
1081 {
1082 int i;
1083
1084 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1085 switch (OMP_CLAUSE_CODE (t))
1086 {
1087 case OMP_CLAUSE_DEFAULT:
1088 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1089 break;
1090 case OMP_CLAUSE_SCHEDULE:
1091 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1092 break;
1093 case OMP_CLAUSE_DEPEND:
1094 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1095 break;
1096 case OMP_CLAUSE_MAP:
1097 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1098 break;
1099 case OMP_CLAUSE_PROC_BIND:
1100 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1101 break;
1102 case OMP_CLAUSE_REDUCTION:
1103 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1104 break;
1105 default:
1106 break;
1107 }
1108 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1109 visit (OMP_CLAUSE_OPERAND (t, i));
1110 visit (OMP_CLAUSE_CHAIN (t));
1111 }
1112
1113 return v;
1114
1115 #undef visit
1116 }
1117
1118 /* Compare two SCC entries by their hash value for qsorting them. */
1119
1120 static int
1121 scc_entry_compare (const void *p1_, const void *p2_)
1122 {
1123 const scc_entry *p1 = (const scc_entry *) p1_;
1124 const scc_entry *p2 = (const scc_entry *) p2_;
1125 if (p1->hash < p2->hash)
1126 return -1;
1127 else if (p1->hash > p2->hash)
1128 return 1;
1129 return 0;
1130 }
1131
1132 /* Return a hash value for the SCC on the SCC stack from FIRST with
1133 size SIZE. */
1134
1135 static hashval_t
1136 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1137 {
1138 /* Compute hash values for the SCC members. */
1139 for (unsigned i = 0; i < size; ++i)
1140 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1141
1142 if (size == 1)
1143 return sccstack[first].hash;
1144
1145 /* Sort the SCC of type, hash pairs so that when we mix in
1146 all members of the SCC the hash value becomes independent on
1147 the order we visited the SCC. Disregard hashes equal to
1148 the hash of the tree we mix into because we cannot guarantee
1149 a stable sort for those across different TUs. */
1150 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1151 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1152 for (unsigned i = 0; i < size; ++i)
1153 {
1154 hashval_t hash = sccstack[first+i].hash;
1155 hashval_t orig_hash = hash;
1156 unsigned j;
1157 /* Skip same hashes. */
1158 for (j = i + 1;
1159 j < size && sccstack[first+j].hash == orig_hash; ++j)
1160 ;
1161 for (; j < size; ++j)
1162 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1163 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1164 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1165 tem[i] = hash;
1166 }
1167 hashval_t scc_hash = 0;
1168 for (unsigned i = 0; i < size; ++i)
1169 {
1170 sccstack[first+i].hash = tem[i];
1171 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1172 }
1173 return scc_hash;
1174 }
1175
1176 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1177 already in the streamer cache. Main routine called for
1178 each visit of EXPR. */
1179
1180 static void
1181 DFS_write_tree (struct output_block *ob, sccs *from_state,
1182 tree expr, bool ref_p, bool this_ref_p)
1183 {
1184 unsigned ix;
1185 sccs **slot;
1186
1187 /* Handle special cases. */
1188 if (expr == NULL_TREE)
1189 return;
1190
1191 /* Do not DFS walk into indexable trees. */
1192 if (this_ref_p && tree_is_indexable (expr))
1193 return;
1194
1195 /* Check if we already streamed EXPR. */
1196 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1197 return;
1198
1199 slot = (sccs **)pointer_map_insert (sccstate, expr);
1200 sccs *cstate = *slot;
1201 if (!cstate)
1202 {
1203 scc_entry e = { expr, 0 };
1204 /* Not yet visited. DFS recurse and push it onto the stack. */
1205 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1206 sccstack.safe_push (e);
1207 cstate->dfsnum = next_dfs_num++;
1208 cstate->low = cstate->dfsnum;
1209
1210 if (streamer_handle_as_builtin_p (expr))
1211 ;
1212 else if (TREE_CODE (expr) == INTEGER_CST
1213 && !TREE_OVERFLOW (expr))
1214 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1215 else
1216 {
1217 DFS_write_tree_body (ob, expr, cstate, ref_p);
1218
1219 /* Walk any LTO-specific edges. */
1220 if (DECL_P (expr)
1221 && TREE_CODE (expr) != FUNCTION_DECL
1222 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1223 {
1224 /* Handle DECL_INITIAL for symbols. */
1225 tree initial = get_symbol_initial_value (ob, expr);
1226 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1227 }
1228 }
1229
1230 /* See if we found an SCC. */
1231 if (cstate->low == cstate->dfsnum)
1232 {
1233 unsigned first, size;
1234 tree x;
1235
1236 /* Pop the SCC and compute its size. */
1237 first = sccstack.length ();
1238 do
1239 {
1240 x = sccstack[--first].t;
1241 }
1242 while (x != expr);
1243 size = sccstack.length () - first;
1244
1245 /* No need to compute hashes for LTRANS units, we don't perform
1246 any merging there. */
1247 hashval_t scc_hash = 0;
1248 unsigned scc_entry_len = 0;
1249 if (!flag_wpa)
1250 {
1251 scc_hash = hash_scc (ob->writer_cache, first, size);
1252
1253 /* Put the entries with the least number of collisions first. */
1254 unsigned entry_start = 0;
1255 scc_entry_len = size + 1;
1256 for (unsigned i = 0; i < size;)
1257 {
1258 unsigned from = i;
1259 for (i = i + 1; i < size
1260 && (sccstack[first + i].hash
1261 == sccstack[first + from].hash); ++i)
1262 ;
1263 if (i - from < scc_entry_len)
1264 {
1265 scc_entry_len = i - from;
1266 entry_start = from;
1267 }
1268 }
1269 for (unsigned i = 0; i < scc_entry_len; ++i)
1270 {
1271 scc_entry tem = sccstack[first + i];
1272 sccstack[first + i] = sccstack[first + entry_start + i];
1273 sccstack[first + entry_start + i] = tem;
1274 }
1275 }
1276
1277 /* Write LTO_tree_scc. */
1278 streamer_write_record_start (ob, LTO_tree_scc);
1279 streamer_write_uhwi (ob, size);
1280 streamer_write_uhwi (ob, scc_hash);
1281
1282 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1283 All INTEGER_CSTs need to be handled this way as we need
1284 their type to materialize them. Also builtins are handled
1285 this way.
1286 ??? We still wrap these in LTO_tree_scc so at the
1287 input side we can properly identify the tree we want
1288 to ultimatively return. */
1289 size_t old_len = ob->writer_cache->nodes.length ();
1290 if (size == 1)
1291 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1292 else
1293 {
1294 /* Write the size of the SCC entry candidates. */
1295 streamer_write_uhwi (ob, scc_entry_len);
1296
1297 /* Write all headers and populate the streamer cache. */
1298 for (unsigned i = 0; i < size; ++i)
1299 {
1300 hashval_t hash = sccstack[first+i].hash;
1301 tree t = sccstack[first+i].t;
1302 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1303 t, hash, &ix);
1304 gcc_assert (!exists_p);
1305
1306 if (!lto_is_streamable (t))
1307 internal_error ("tree code %qs is not supported "
1308 "in LTO streams",
1309 get_tree_code_name (TREE_CODE (t)));
1310
1311 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1312
1313 /* Write the header, containing everything needed to
1314 materialize EXPR on the reading side. */
1315 streamer_write_tree_header (ob, t);
1316 }
1317
1318 /* Write the bitpacks and tree references. */
1319 for (unsigned i = 0; i < size; ++i)
1320 {
1321 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1322
1323 /* Mark the end of the tree. */
1324 streamer_write_zero (ob);
1325 }
1326 }
1327 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1328
1329 /* Finally truncate the vector. */
1330 sccstack.truncate (first);
1331
1332 if (from_state)
1333 from_state->low = MIN (from_state->low, cstate->low);
1334 return;
1335 }
1336
1337 if (from_state)
1338 from_state->low = MIN (from_state->low, cstate->low);
1339 }
1340 gcc_checking_assert (from_state);
1341 if (cstate->dfsnum < from_state->dfsnum)
1342 from_state->low = MIN (cstate->dfsnum, from_state->low);
1343 }
1344
1345
1346 /* Emit the physical representation of tree node EXPR to output block
1347 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1348 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1349
1350 void
1351 lto_output_tree (struct output_block *ob, tree expr,
1352 bool ref_p, bool this_ref_p)
1353 {
1354 unsigned ix;
1355 bool existed_p;
1356
1357 if (expr == NULL_TREE)
1358 {
1359 streamer_write_record_start (ob, LTO_null);
1360 return;
1361 }
1362
1363 if (this_ref_p && tree_is_indexable (expr))
1364 {
1365 lto_output_tree_ref (ob, expr);
1366 return;
1367 }
1368
1369 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1370 if (existed_p)
1371 {
1372 /* If a node has already been streamed out, make sure that
1373 we don't write it more than once. Otherwise, the reader
1374 will instantiate two different nodes for the same object. */
1375 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1376 streamer_write_uhwi (ob, ix);
1377 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1378 lto_tree_code_to_tag (TREE_CODE (expr)));
1379 lto_stats.num_pickle_refs_output++;
1380 }
1381 else
1382 {
1383 /* This is the first time we see EXPR, write all reachable
1384 trees to OB. */
1385 static bool in_dfs_walk;
1386
1387 /* Protect against recursion which means disconnect between
1388 what tree edges we walk in the DFS walk and what edges
1389 we stream out. */
1390 gcc_assert (!in_dfs_walk);
1391
1392 /* Start the DFS walk. */
1393 /* Save ob state ... */
1394 /* let's see ... */
1395 in_dfs_walk = true;
1396 sccstate = pointer_map_create ();
1397 gcc_obstack_init (&sccstate_obstack);
1398 next_dfs_num = 1;
1399 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1400 sccstack.release ();
1401 pointer_map_destroy (sccstate);
1402 obstack_free (&sccstate_obstack, NULL);
1403 in_dfs_walk = false;
1404
1405 /* Finally append a reference to the tree we were writing.
1406 ??? If expr ended up as a singleton we could have
1407 inlined it here and avoid outputting a reference. */
1408 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1409 gcc_assert (existed_p);
1410 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1411 streamer_write_uhwi (ob, ix);
1412 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1413 lto_tree_code_to_tag (TREE_CODE (expr)));
1414 lto_stats.num_pickle_refs_output++;
1415 }
1416 }
1417
1418
1419 /* Output to OB a list of try/catch handlers starting with FIRST. */
1420
1421 static void
1422 output_eh_try_list (struct output_block *ob, eh_catch first)
1423 {
1424 eh_catch n;
1425
1426 for (n = first; n; n = n->next_catch)
1427 {
1428 streamer_write_record_start (ob, LTO_eh_catch);
1429 stream_write_tree (ob, n->type_list, true);
1430 stream_write_tree (ob, n->filter_list, true);
1431 stream_write_tree (ob, n->label, true);
1432 }
1433
1434 streamer_write_record_start (ob, LTO_null);
1435 }
1436
1437
1438 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1439 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1440 detect EH region sharing. */
1441
1442 static void
1443 output_eh_region (struct output_block *ob, eh_region r)
1444 {
1445 enum LTO_tags tag;
1446
1447 if (r == NULL)
1448 {
1449 streamer_write_record_start (ob, LTO_null);
1450 return;
1451 }
1452
1453 if (r->type == ERT_CLEANUP)
1454 tag = LTO_ert_cleanup;
1455 else if (r->type == ERT_TRY)
1456 tag = LTO_ert_try;
1457 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1458 tag = LTO_ert_allowed_exceptions;
1459 else if (r->type == ERT_MUST_NOT_THROW)
1460 tag = LTO_ert_must_not_throw;
1461 else
1462 gcc_unreachable ();
1463
1464 streamer_write_record_start (ob, tag);
1465 streamer_write_hwi (ob, r->index);
1466
1467 if (r->outer)
1468 streamer_write_hwi (ob, r->outer->index);
1469 else
1470 streamer_write_zero (ob);
1471
1472 if (r->inner)
1473 streamer_write_hwi (ob, r->inner->index);
1474 else
1475 streamer_write_zero (ob);
1476
1477 if (r->next_peer)
1478 streamer_write_hwi (ob, r->next_peer->index);
1479 else
1480 streamer_write_zero (ob);
1481
1482 if (r->type == ERT_TRY)
1483 {
1484 output_eh_try_list (ob, r->u.eh_try.first_catch);
1485 }
1486 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1487 {
1488 stream_write_tree (ob, r->u.allowed.type_list, true);
1489 stream_write_tree (ob, r->u.allowed.label, true);
1490 streamer_write_uhwi (ob, r->u.allowed.filter);
1491 }
1492 else if (r->type == ERT_MUST_NOT_THROW)
1493 {
1494 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1495 bitpack_d bp = bitpack_create (ob->main_stream);
1496 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1497 streamer_write_bitpack (&bp);
1498 }
1499
1500 if (r->landing_pads)
1501 streamer_write_hwi (ob, r->landing_pads->index);
1502 else
1503 streamer_write_zero (ob);
1504 }
1505
1506
1507 /* Output landing pad LP to OB. */
1508
1509 static void
1510 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1511 {
1512 if (lp == NULL)
1513 {
1514 streamer_write_record_start (ob, LTO_null);
1515 return;
1516 }
1517
1518 streamer_write_record_start (ob, LTO_eh_landing_pad);
1519 streamer_write_hwi (ob, lp->index);
1520 if (lp->next_lp)
1521 streamer_write_hwi (ob, lp->next_lp->index);
1522 else
1523 streamer_write_zero (ob);
1524
1525 if (lp->region)
1526 streamer_write_hwi (ob, lp->region->index);
1527 else
1528 streamer_write_zero (ob);
1529
1530 stream_write_tree (ob, lp->post_landing_pad, true);
1531 }
1532
1533
1534 /* Output the existing eh_table to OB. */
1535
1536 static void
1537 output_eh_regions (struct output_block *ob, struct function *fn)
1538 {
1539 if (fn->eh && fn->eh->region_tree)
1540 {
1541 unsigned i;
1542 eh_region eh;
1543 eh_landing_pad lp;
1544 tree ttype;
1545
1546 streamer_write_record_start (ob, LTO_eh_table);
1547
1548 /* Emit the index of the root of the EH region tree. */
1549 streamer_write_hwi (ob, fn->eh->region_tree->index);
1550
1551 /* Emit all the EH regions in the region array. */
1552 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1553 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1554 output_eh_region (ob, eh);
1555
1556 /* Emit all landing pads. */
1557 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1558 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1559 output_eh_lp (ob, lp);
1560
1561 /* Emit all the runtime type data. */
1562 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1563 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1564 stream_write_tree (ob, ttype, true);
1565
1566 /* Emit the table of action chains. */
1567 if (targetm.arm_eabi_unwinder)
1568 {
1569 tree t;
1570 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1571 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1572 stream_write_tree (ob, t, true);
1573 }
1574 else
1575 {
1576 uchar c;
1577 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1578 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1579 streamer_write_char_stream (ob->main_stream, c);
1580 }
1581 }
1582
1583 /* The LTO_null either terminates the record or indicates that there
1584 are no eh_records at all. */
1585 streamer_write_record_start (ob, LTO_null);
1586 }
1587
1588
1589 /* Output all of the active ssa names to the ssa_names stream. */
1590
1591 static void
1592 output_ssa_names (struct output_block *ob, struct function *fn)
1593 {
1594 unsigned int i, len;
1595
1596 len = vec_safe_length (SSANAMES (fn));
1597 streamer_write_uhwi (ob, len);
1598
1599 for (i = 1; i < len; i++)
1600 {
1601 tree ptr = (*SSANAMES (fn))[i];
1602
1603 if (ptr == NULL_TREE
1604 || SSA_NAME_IN_FREE_LIST (ptr)
1605 || virtual_operand_p (ptr))
1606 continue;
1607
1608 streamer_write_uhwi (ob, i);
1609 streamer_write_char_stream (ob->main_stream,
1610 SSA_NAME_IS_DEFAULT_DEF (ptr));
1611 if (SSA_NAME_VAR (ptr))
1612 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1613 else
1614 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1615 stream_write_tree (ob, TREE_TYPE (ptr), true);
1616 }
1617
1618 streamer_write_zero (ob);
1619 }
1620
1621
1622 /* Output the cfg. */
1623
1624 static void
1625 output_cfg (struct output_block *ob, struct function *fn)
1626 {
1627 struct lto_output_stream *tmp_stream = ob->main_stream;
1628 basic_block bb;
1629
1630 ob->main_stream = ob->cfg_stream;
1631
1632 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1633 profile_status_for_fn (fn));
1634
1635 /* Output the number of the highest basic block. */
1636 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1637
1638 FOR_ALL_BB_FN (bb, fn)
1639 {
1640 edge_iterator ei;
1641 edge e;
1642
1643 streamer_write_hwi (ob, bb->index);
1644
1645 /* Output the successors and the edge flags. */
1646 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1647 FOR_EACH_EDGE (e, ei, bb->succs)
1648 {
1649 streamer_write_uhwi (ob, e->dest->index);
1650 streamer_write_hwi (ob, e->probability);
1651 streamer_write_gcov_count (ob, e->count);
1652 streamer_write_uhwi (ob, e->flags);
1653 }
1654 }
1655
1656 streamer_write_hwi (ob, -1);
1657
1658 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1659 while (bb->next_bb)
1660 {
1661 streamer_write_hwi (ob, bb->next_bb->index);
1662 bb = bb->next_bb;
1663 }
1664
1665 streamer_write_hwi (ob, -1);
1666
1667 /* ??? The cfgloop interface is tied to cfun. */
1668 gcc_assert (cfun == fn);
1669
1670 /* Output the number of loops. */
1671 streamer_write_uhwi (ob, number_of_loops (fn));
1672
1673 /* Output each loop, skipping the tree root which has number zero. */
1674 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1675 {
1676 struct loop *loop = get_loop (fn, i);
1677
1678 /* Write the index of the loop header. That's enough to rebuild
1679 the loop tree on the reader side. Stream -1 for an unused
1680 loop entry. */
1681 if (!loop)
1682 {
1683 streamer_write_hwi (ob, -1);
1684 continue;
1685 }
1686 else
1687 streamer_write_hwi (ob, loop->header->index);
1688
1689 /* Write everything copy_loop_info copies. */
1690 streamer_write_enum (ob->main_stream,
1691 loop_estimation, EST_LAST, loop->estimate_state);
1692 streamer_write_hwi (ob, loop->any_upper_bound);
1693 if (loop->any_upper_bound)
1694 {
1695 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1696 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1697 }
1698 streamer_write_hwi (ob, loop->any_estimate);
1699 if (loop->any_estimate)
1700 {
1701 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1702 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1703 }
1704
1705 /* Write OMP SIMD related info. */
1706 streamer_write_hwi (ob, loop->safelen);
1707 streamer_write_hwi (ob, loop->force_vect);
1708 stream_write_tree (ob, loop->simduid, true);
1709 }
1710
1711 ob->main_stream = tmp_stream;
1712 }
1713
1714
1715 /* Create the header in the file using OB. If the section type is for
1716 a function, set FN to the decl for that function. */
1717
1718 void
1719 produce_asm (struct output_block *ob, tree fn)
1720 {
1721 enum lto_section_type section_type = ob->section_type;
1722 struct lto_function_header header;
1723 char *section_name;
1724 struct lto_output_stream *header_stream;
1725
1726 if (section_type == LTO_section_function_body)
1727 {
1728 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1729 section_name = lto_get_section_name (section_type, name, NULL);
1730 }
1731 else
1732 section_name = lto_get_section_name (section_type, NULL, NULL);
1733
1734 lto_begin_section (section_name, !flag_wpa);
1735 free (section_name);
1736
1737 /* The entire header is stream computed here. */
1738 memset (&header, 0, sizeof (struct lto_function_header));
1739
1740 /* Write the header. */
1741 header.lto_header.major_version = LTO_major_version;
1742 header.lto_header.minor_version = LTO_minor_version;
1743
1744 header.compressed_size = 0;
1745
1746 if (section_type == LTO_section_function_body)
1747 header.cfg_size = ob->cfg_stream->total_size;
1748 header.main_size = ob->main_stream->total_size;
1749 header.string_size = ob->string_stream->total_size;
1750
1751 header_stream = XCNEW (struct lto_output_stream);
1752 lto_output_data_stream (header_stream, &header, sizeof header);
1753 lto_write_stream (header_stream);
1754 free (header_stream);
1755
1756 /* Put all of the gimple and the string table out the asm file as a
1757 block of text. */
1758 if (section_type == LTO_section_function_body)
1759 lto_write_stream (ob->cfg_stream);
1760 lto_write_stream (ob->main_stream);
1761 lto_write_stream (ob->string_stream);
1762
1763 lto_end_section ();
1764 }
1765
1766
1767 /* Output the base body of struct function FN using output block OB. */
1768
1769 static void
1770 output_struct_function_base (struct output_block *ob, struct function *fn)
1771 {
1772 struct bitpack_d bp;
1773 unsigned i;
1774 tree t;
1775
1776 /* Output the static chain and non-local goto save area. */
1777 stream_write_tree (ob, fn->static_chain_decl, true);
1778 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1779
1780 /* Output all the local variables in the function. */
1781 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1782 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1783 stream_write_tree (ob, t, true);
1784
1785 /* Output current IL state of the function. */
1786 streamer_write_uhwi (ob, fn->curr_properties);
1787
1788 /* Write all the attributes for FN. */
1789 bp = bitpack_create (ob->main_stream);
1790 bp_pack_value (&bp, fn->is_thunk, 1);
1791 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1792 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1793 bp_pack_value (&bp, fn->returns_struct, 1);
1794 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1795 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1796 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1797 bp_pack_value (&bp, fn->after_inlining, 1);
1798 bp_pack_value (&bp, fn->stdarg, 1);
1799 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1800 bp_pack_value (&bp, fn->calls_alloca, 1);
1801 bp_pack_value (&bp, fn->calls_setjmp, 1);
1802 bp_pack_value (&bp, fn->has_force_vect_loops, 1);
1803 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1804 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1805 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1806
1807 /* Output the function start and end loci. */
1808 stream_output_location (ob, &bp, fn->function_start_locus);
1809 stream_output_location (ob, &bp, fn->function_end_locus);
1810
1811 streamer_write_bitpack (&bp);
1812 }
1813
1814
1815 /* Output the body of function NODE->DECL. */
1816
1817 static void
1818 output_function (struct cgraph_node *node)
1819 {
1820 tree function;
1821 struct function *fn;
1822 basic_block bb;
1823 struct output_block *ob;
1824
1825 function = node->decl;
1826 fn = DECL_STRUCT_FUNCTION (function);
1827 ob = create_output_block (LTO_section_function_body);
1828
1829 clear_line_info (ob);
1830 ob->cgraph_node = node;
1831
1832 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1833
1834 /* Set current_function_decl and cfun. */
1835 push_cfun (fn);
1836
1837 /* Make string 0 be a NULL string. */
1838 streamer_write_char_stream (ob->string_stream, 0);
1839
1840 streamer_write_record_start (ob, LTO_function);
1841
1842 /* Output decls for parameters and args. */
1843 stream_write_tree (ob, DECL_RESULT (function), true);
1844 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1845
1846 /* Output DECL_INITIAL for the function, which contains the tree of
1847 lexical scopes. */
1848 stream_write_tree (ob, DECL_INITIAL (function), true);
1849
1850 /* We also stream abstract functions where we stream only stuff needed for
1851 debug info. */
1852 if (gimple_has_body_p (function))
1853 {
1854 streamer_write_uhwi (ob, 1);
1855 output_struct_function_base (ob, fn);
1856
1857 /* Output all the SSA names used in the function. */
1858 output_ssa_names (ob, fn);
1859
1860 /* Output any exception handling regions. */
1861 output_eh_regions (ob, fn);
1862
1863
1864 /* We will renumber the statements. The code that does this uses
1865 the same ordering that we use for serializing them so we can use
1866 the same code on the other end and not have to write out the
1867 statement numbers. We do not assign UIDs to PHIs here because
1868 virtual PHIs get re-computed on-the-fly which would make numbers
1869 inconsistent. */
1870 set_gimple_stmt_max_uid (cfun, 0);
1871 FOR_ALL_BB (bb)
1872 {
1873 gimple_stmt_iterator gsi;
1874 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1875 {
1876 gimple stmt = gsi_stmt (gsi);
1877
1878 /* Virtual PHIs are not going to be streamed. */
1879 if (!virtual_operand_p (gimple_phi_result (stmt)))
1880 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1881 }
1882 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1883 {
1884 gimple stmt = gsi_stmt (gsi);
1885 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1886 }
1887 }
1888 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1889 virtual phis now. */
1890 FOR_ALL_BB (bb)
1891 {
1892 gimple_stmt_iterator gsi;
1893 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1894 {
1895 gimple stmt = gsi_stmt (gsi);
1896 if (virtual_operand_p (gimple_phi_result (stmt)))
1897 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1898 }
1899 }
1900
1901 /* Output the code for the function. */
1902 FOR_ALL_BB_FN (bb, fn)
1903 output_bb (ob, bb, fn);
1904
1905 /* The terminator for this function. */
1906 streamer_write_record_start (ob, LTO_null);
1907
1908 output_cfg (ob, fn);
1909
1910 pop_cfun ();
1911 }
1912 else
1913 streamer_write_uhwi (ob, 0);
1914
1915 /* Create a section to hold the pickled output of this function. */
1916 produce_asm (ob, function);
1917
1918 destroy_output_block (ob);
1919 }
1920
1921
1922 /* Emit toplevel asms. */
1923
1924 void
1925 lto_output_toplevel_asms (void)
1926 {
1927 struct output_block *ob;
1928 struct asm_node *can;
1929 char *section_name;
1930 struct lto_output_stream *header_stream;
1931 struct lto_asm_header header;
1932
1933 if (! asm_nodes)
1934 return;
1935
1936 ob = create_output_block (LTO_section_asm);
1937
1938 /* Make string 0 be a NULL string. */
1939 streamer_write_char_stream (ob->string_stream, 0);
1940
1941 for (can = asm_nodes; can; can = can->next)
1942 {
1943 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1944 streamer_write_hwi (ob, can->order);
1945 }
1946
1947 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1948
1949 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1950 lto_begin_section (section_name, !flag_wpa);
1951 free (section_name);
1952
1953 /* The entire header stream is computed here. */
1954 memset (&header, 0, sizeof (header));
1955
1956 /* Write the header. */
1957 header.lto_header.major_version = LTO_major_version;
1958 header.lto_header.minor_version = LTO_minor_version;
1959
1960 header.main_size = ob->main_stream->total_size;
1961 header.string_size = ob->string_stream->total_size;
1962
1963 header_stream = XCNEW (struct lto_output_stream);
1964 lto_output_data_stream (header_stream, &header, sizeof (header));
1965 lto_write_stream (header_stream);
1966 free (header_stream);
1967
1968 /* Put all of the gimple and the string table out the asm file as a
1969 block of text. */
1970 lto_write_stream (ob->main_stream);
1971 lto_write_stream (ob->string_stream);
1972
1973 lto_end_section ();
1974
1975 destroy_output_block (ob);
1976 }
1977
1978
1979 /* Copy the function body of NODE without deserializing. */
1980
1981 static void
1982 copy_function (struct cgraph_node *node)
1983 {
1984 tree function = node->decl;
1985 struct lto_file_decl_data *file_data = node->lto_file_data;
1986 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1987 const char *data;
1988 size_t len;
1989 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1990 char *section_name =
1991 lto_get_section_name (LTO_section_function_body, name, NULL);
1992 size_t i, j;
1993 struct lto_in_decl_state *in_state;
1994 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1995
1996 lto_begin_section (section_name, !flag_wpa);
1997 free (section_name);
1998
1999 /* We may have renamed the declaration, e.g., a static function. */
2000 name = lto_get_decl_name_mapping (file_data, name);
2001
2002 data = lto_get_section_data (file_data, LTO_section_function_body,
2003 name, &len);
2004 gcc_assert (data);
2005
2006 /* Do a bit copy of the function body. */
2007 lto_output_data_stream (output_stream, data, len);
2008 lto_write_stream (output_stream);
2009
2010 /* Copy decls. */
2011 in_state =
2012 lto_get_function_in_decl_state (node->lto_file_data, function);
2013 gcc_assert (in_state);
2014
2015 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2016 {
2017 size_t n = in_state->streams[i].size;
2018 tree *trees = in_state->streams[i].trees;
2019 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2020
2021 /* The out state must have the same indices and the in state.
2022 So just copy the vector. All the encoders in the in state
2023 must be empty where we reach here. */
2024 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2025 encoder->trees.reserve_exact (n);
2026 for (j = 0; j < n; j++)
2027 encoder->trees.safe_push (trees[j]);
2028 }
2029
2030 lto_free_section_data (file_data, LTO_section_function_body, name,
2031 data, len);
2032 free (output_stream);
2033 lto_end_section ();
2034 }
2035
2036
2037 /* Main entry point from the pass manager. */
2038
2039 void
2040 lto_output (void)
2041 {
2042 struct lto_out_decl_state *decl_state;
2043 #ifdef ENABLE_CHECKING
2044 bitmap output = lto_bitmap_alloc ();
2045 #endif
2046 int i, n_nodes;
2047 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2048
2049 /* Initialize the streamer. */
2050 lto_streamer_init ();
2051
2052 n_nodes = lto_symtab_encoder_size (encoder);
2053 /* Process only the functions with bodies. */
2054 for (i = 0; i < n_nodes; i++)
2055 {
2056 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2057 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2058 if (node
2059 && lto_symtab_encoder_encode_body_p (encoder, node)
2060 && !node->alias)
2061 {
2062 #ifdef ENABLE_CHECKING
2063 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2064 bitmap_set_bit (output, DECL_UID (node->decl));
2065 #endif
2066 decl_state = lto_new_out_decl_state ();
2067 lto_push_out_decl_state (decl_state);
2068 if (gimple_has_body_p (node->decl) || !flag_wpa)
2069 output_function (node);
2070 else
2071 copy_function (node);
2072 gcc_assert (lto_get_out_decl_state () == decl_state);
2073 lto_pop_out_decl_state ();
2074 lto_record_function_out_decl_state (node->decl, decl_state);
2075 }
2076 }
2077
2078 /* Emit the callgraph after emitting function bodies. This needs to
2079 be done now to make sure that all the statements in every function
2080 have been renumbered so that edges can be associated with call
2081 statements using the statement UIDs. */
2082 output_symtab ();
2083
2084 #ifdef ENABLE_CHECKING
2085 lto_bitmap_free (output);
2086 #endif
2087 }
2088
2089 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2090 from it and required for correct representation of its semantics.
2091 Each node in ENCODER must be a global declaration or a type. A node
2092 is written only once, even if it appears multiple times in the
2093 vector. Certain transitively-reachable nodes, such as those
2094 representing expressions, may be duplicated, but such nodes
2095 must not appear in ENCODER itself. */
2096
2097 static void
2098 write_global_stream (struct output_block *ob,
2099 struct lto_tree_ref_encoder *encoder)
2100 {
2101 tree t;
2102 size_t index;
2103 const size_t size = lto_tree_ref_encoder_size (encoder);
2104
2105 for (index = 0; index < size; index++)
2106 {
2107 t = lto_tree_ref_encoder_get_tree (encoder, index);
2108 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2109 stream_write_tree (ob, t, false);
2110 }
2111 }
2112
2113
2114 /* Write a sequence of indices into the globals vector corresponding
2115 to the trees in ENCODER. These are used by the reader to map the
2116 indices used to refer to global entities within function bodies to
2117 their referents. */
2118
2119 static void
2120 write_global_references (struct output_block *ob,
2121 struct lto_output_stream *ref_stream,
2122 struct lto_tree_ref_encoder *encoder)
2123 {
2124 tree t;
2125 uint32_t index;
2126 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2127
2128 /* Write size as 32-bit unsigned. */
2129 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2130
2131 for (index = 0; index < size; index++)
2132 {
2133 uint32_t slot_num;
2134
2135 t = lto_tree_ref_encoder_get_tree (encoder, index);
2136 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2137 gcc_assert (slot_num != (unsigned)-1);
2138 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2139 }
2140 }
2141
2142
2143 /* Write all the streams in an lto_out_decl_state STATE using
2144 output block OB and output stream OUT_STREAM. */
2145
2146 void
2147 lto_output_decl_state_streams (struct output_block *ob,
2148 struct lto_out_decl_state *state)
2149 {
2150 int i;
2151
2152 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2153 write_global_stream (ob, &state->streams[i]);
2154 }
2155
2156
2157 /* Write all the references in an lto_out_decl_state STATE using
2158 output block OB and output stream OUT_STREAM. */
2159
2160 void
2161 lto_output_decl_state_refs (struct output_block *ob,
2162 struct lto_output_stream *out_stream,
2163 struct lto_out_decl_state *state)
2164 {
2165 unsigned i;
2166 uint32_t ref;
2167 tree decl;
2168
2169 /* Write reference to FUNCTION_DECL. If there is not function,
2170 write reference to void_type_node. */
2171 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2172 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2173 gcc_assert (ref != (unsigned)-1);
2174 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2175
2176 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2177 write_global_references (ob, out_stream, &state->streams[i]);
2178 }
2179
2180
2181 /* Return the written size of STATE. */
2182
2183 static size_t
2184 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2185 {
2186 int i;
2187 size_t size;
2188
2189 size = sizeof (int32_t); /* fn_ref. */
2190 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2191 {
2192 size += sizeof (int32_t); /* vector size. */
2193 size += (lto_tree_ref_encoder_size (&state->streams[i])
2194 * sizeof (int32_t));
2195 }
2196 return size;
2197 }
2198
2199
2200 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2201 so far. */
2202
2203 static void
2204 write_symbol (struct streamer_tree_cache_d *cache,
2205 struct lto_output_stream *stream,
2206 tree t, struct pointer_set_t *seen, bool alias)
2207 {
2208 const char *name;
2209 enum gcc_plugin_symbol_kind kind;
2210 enum gcc_plugin_symbol_visibility visibility;
2211 unsigned slot_num;
2212 unsigned HOST_WIDEST_INT size;
2213 const char *comdat;
2214 unsigned char c;
2215
2216 /* None of the following kinds of symbols are needed in the
2217 symbol table. */
2218 if (!TREE_PUBLIC (t)
2219 || is_builtin_fn (t)
2220 || DECL_ABSTRACT (t)
2221 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2222 return;
2223 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2224
2225 gcc_assert (TREE_CODE (t) == VAR_DECL
2226 || TREE_CODE (t) == FUNCTION_DECL);
2227
2228 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2229
2230 /* This behaves like assemble_name_raw in varasm.c, performing the
2231 same name manipulations that ASM_OUTPUT_LABELREF does. */
2232 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2233
2234 if (pointer_set_contains (seen, name))
2235 return;
2236 pointer_set_insert (seen, name);
2237
2238 streamer_tree_cache_lookup (cache, t, &slot_num);
2239 gcc_assert (slot_num != (unsigned)-1);
2240
2241 if (DECL_EXTERNAL (t))
2242 {
2243 if (DECL_WEAK (t))
2244 kind = GCCPK_WEAKUNDEF;
2245 else
2246 kind = GCCPK_UNDEF;
2247 }
2248 else
2249 {
2250 if (DECL_WEAK (t))
2251 kind = GCCPK_WEAKDEF;
2252 else if (DECL_COMMON (t))
2253 kind = GCCPK_COMMON;
2254 else
2255 kind = GCCPK_DEF;
2256
2257 /* When something is defined, it should have node attached. */
2258 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2259 || varpool_get_node (t)->definition);
2260 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2261 || (cgraph_get_node (t)
2262 && cgraph_get_node (t)->definition));
2263 }
2264
2265 /* Imitate what default_elf_asm_output_external do.
2266 When symbol is external, we need to output it with DEFAULT visibility
2267 when compiling with -fvisibility=default, while with HIDDEN visibility
2268 when symbol has attribute (visibility("hidden")) specified.
2269 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2270 right. */
2271
2272 if (DECL_EXTERNAL (t)
2273 && !targetm.binds_local_p (t))
2274 visibility = GCCPV_DEFAULT;
2275 else
2276 switch (DECL_VISIBILITY (t))
2277 {
2278 case VISIBILITY_DEFAULT:
2279 visibility = GCCPV_DEFAULT;
2280 break;
2281 case VISIBILITY_PROTECTED:
2282 visibility = GCCPV_PROTECTED;
2283 break;
2284 case VISIBILITY_HIDDEN:
2285 visibility = GCCPV_HIDDEN;
2286 break;
2287 case VISIBILITY_INTERNAL:
2288 visibility = GCCPV_INTERNAL;
2289 break;
2290 }
2291
2292 if (kind == GCCPK_COMMON
2293 && DECL_SIZE_UNIT (t)
2294 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2295 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2296 else
2297 size = 0;
2298
2299 if (DECL_ONE_ONLY (t))
2300 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2301 else
2302 comdat = "";
2303
2304 lto_output_data_stream (stream, name, strlen (name) + 1);
2305 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2306 c = (unsigned char) kind;
2307 lto_output_data_stream (stream, &c, 1);
2308 c = (unsigned char) visibility;
2309 lto_output_data_stream (stream, &c, 1);
2310 lto_output_data_stream (stream, &size, 8);
2311 lto_output_data_stream (stream, &slot_num, 4);
2312 }
2313
2314 /* Return true if NODE should appear in the plugin symbol table. */
2315
2316 bool
2317 output_symbol_p (symtab_node *node)
2318 {
2319 struct cgraph_node *cnode;
2320 if (!symtab_real_symbol_p (node))
2321 return false;
2322 /* We keep external functions in symtab for sake of inlining
2323 and devirtualization. We do not want to see them in symbol table as
2324 references unless they are really used. */
2325 cnode = dyn_cast <cgraph_node> (node);
2326 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2327 && cnode->callers)
2328 return true;
2329
2330 /* Ignore all references from external vars initializers - they are not really
2331 part of the compilation unit until they are used by folding. Some symbols,
2332 like references to external construction vtables can not be referred to at all.
2333 We decide this at can_refer_decl_in_current_unit_p. */
2334 if (!node->definition || DECL_EXTERNAL (node->decl))
2335 {
2336 int i;
2337 struct ipa_ref *ref;
2338 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2339 i, ref); i++)
2340 {
2341 if (ref->use == IPA_REF_ALIAS)
2342 continue;
2343 if (is_a <cgraph_node> (ref->referring))
2344 return true;
2345 if (!DECL_EXTERNAL (ref->referring->decl))
2346 return true;
2347 }
2348 return false;
2349 }
2350 return true;
2351 }
2352
2353
2354 /* Write an IL symbol table to OB.
2355 SET and VSET are cgraph/varpool node sets we are outputting. */
2356
2357 static void
2358 produce_symtab (struct output_block *ob)
2359 {
2360 struct streamer_tree_cache_d *cache = ob->writer_cache;
2361 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2362 struct pointer_set_t *seen;
2363 struct lto_output_stream stream;
2364 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2365 lto_symtab_encoder_iterator lsei;
2366
2367 lto_begin_section (section_name, false);
2368 free (section_name);
2369
2370 seen = pointer_set_create ();
2371 memset (&stream, 0, sizeof (stream));
2372
2373 /* Write the symbol table.
2374 First write everything defined and then all declarations.
2375 This is necessary to handle cases where we have duplicated symbols. */
2376 for (lsei = lsei_start (encoder);
2377 !lsei_end_p (lsei); lsei_next (&lsei))
2378 {
2379 symtab_node *node = lsei_node (lsei);
2380
2381 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2382 continue;
2383 write_symbol (cache, &stream, node->decl, seen, false);
2384 }
2385 for (lsei = lsei_start (encoder);
2386 !lsei_end_p (lsei); lsei_next (&lsei))
2387 {
2388 symtab_node *node = lsei_node (lsei);
2389
2390 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2391 continue;
2392 write_symbol (cache, &stream, node->decl, seen, false);
2393 }
2394
2395 lto_write_stream (&stream);
2396 pointer_set_destroy (seen);
2397
2398 lto_end_section ();
2399 }
2400
2401
2402 /* This pass is run after all of the functions are serialized and all
2403 of the IPA passes have written their serialized forms. This pass
2404 causes the vector of all of the global decls and types used from
2405 this file to be written in to a section that can then be read in to
2406 recover these on other side. */
2407
2408 void
2409 produce_asm_for_decls (void)
2410 {
2411 struct lto_out_decl_state *out_state;
2412 struct lto_out_decl_state *fn_out_state;
2413 struct lto_decl_header header;
2414 char *section_name;
2415 struct output_block *ob;
2416 struct lto_output_stream *header_stream, *decl_state_stream;
2417 unsigned idx, num_fns;
2418 size_t decl_state_size;
2419 int32_t num_decl_states;
2420
2421 ob = create_output_block (LTO_section_decls);
2422 ob->global = true;
2423
2424 memset (&header, 0, sizeof (struct lto_decl_header));
2425
2426 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2427 lto_begin_section (section_name, !flag_wpa);
2428 free (section_name);
2429
2430 /* Make string 0 be a NULL string. */
2431 streamer_write_char_stream (ob->string_stream, 0);
2432
2433 gcc_assert (!alias_pairs);
2434
2435 /* Write the global symbols. */
2436 out_state = lto_get_out_decl_state ();
2437 num_fns = lto_function_decl_states.length ();
2438 lto_output_decl_state_streams (ob, out_state);
2439 for (idx = 0; idx < num_fns; idx++)
2440 {
2441 fn_out_state =
2442 lto_function_decl_states[idx];
2443 lto_output_decl_state_streams (ob, fn_out_state);
2444 }
2445
2446 header.lto_header.major_version = LTO_major_version;
2447 header.lto_header.minor_version = LTO_minor_version;
2448
2449 /* Currently not used. This field would allow us to preallocate
2450 the globals vector, so that it need not be resized as it is extended. */
2451 header.num_nodes = -1;
2452
2453 /* Compute the total size of all decl out states. */
2454 decl_state_size = sizeof (int32_t);
2455 decl_state_size += lto_out_decl_state_written_size (out_state);
2456 for (idx = 0; idx < num_fns; idx++)
2457 {
2458 fn_out_state =
2459 lto_function_decl_states[idx];
2460 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2461 }
2462 header.decl_state_size = decl_state_size;
2463
2464 header.main_size = ob->main_stream->total_size;
2465 header.string_size = ob->string_stream->total_size;
2466
2467 header_stream = XCNEW (struct lto_output_stream);
2468 lto_output_data_stream (header_stream, &header, sizeof header);
2469 lto_write_stream (header_stream);
2470 free (header_stream);
2471
2472 /* Write the main out-decl state, followed by out-decl states of
2473 functions. */
2474 decl_state_stream = XCNEW (struct lto_output_stream);
2475 num_decl_states = num_fns + 1;
2476 lto_output_data_stream (decl_state_stream, &num_decl_states,
2477 sizeof (num_decl_states));
2478 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2479 for (idx = 0; idx < num_fns; idx++)
2480 {
2481 fn_out_state =
2482 lto_function_decl_states[idx];
2483 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2484 }
2485 lto_write_stream (decl_state_stream);
2486 free (decl_state_stream);
2487
2488 lto_write_stream (ob->main_stream);
2489 lto_write_stream (ob->string_stream);
2490
2491 lto_end_section ();
2492
2493 /* Write the symbol table. It is used by linker to determine dependencies
2494 and thus we can skip it for WPA. */
2495 if (!flag_wpa)
2496 produce_symtab (ob);
2497
2498 /* Write command line opts. */
2499 lto_write_options ();
2500
2501 /* Deallocate memory and clean up. */
2502 for (idx = 0; idx < num_fns; idx++)
2503 {
2504 fn_out_state =
2505 lto_function_decl_states[idx];
2506 lto_delete_out_decl_state (fn_out_state);
2507 }
2508 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2509 lto_function_decl_states.release ();
2510 destroy_output_block (ob);
2511 }