re PR lto/59326 (FAIL: gcc.dg/vect/vect-simd-clone-*.c)
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 /* Clear the line info stored in DATA_IN. */
58
59 static void
60 clear_line_info (struct output_block *ob)
61 {
62 ob->current_file = NULL;
63 ob->current_line = 0;
64 ob->current_col = 0;
65 }
66
67
68 /* Create the output block and return it. SECTION_TYPE is
69 LTO_section_function_body or LTO_static_initializer. */
70
71 struct output_block *
72 create_output_block (enum lto_section_type section_type)
73 {
74 struct output_block *ob = XCNEW (struct output_block);
75
76 ob->section_type = section_type;
77 ob->decl_state = lto_get_out_decl_state ();
78 ob->main_stream = XCNEW (struct lto_output_stream);
79 ob->string_stream = XCNEW (struct lto_output_stream);
80 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
81
82 if (section_type == LTO_section_function_body)
83 ob->cfg_stream = XCNEW (struct lto_output_stream);
84
85 clear_line_info (ob);
86
87 ob->string_hash_table.create (37);
88 gcc_obstack_init (&ob->obstack);
89
90 return ob;
91 }
92
93
94 /* Destroy the output block OB. */
95
96 void
97 destroy_output_block (struct output_block *ob)
98 {
99 enum lto_section_type section_type = ob->section_type;
100
101 ob->string_hash_table.dispose ();
102
103 free (ob->main_stream);
104 free (ob->string_stream);
105 if (section_type == LTO_section_function_body)
106 free (ob->cfg_stream);
107
108 streamer_tree_cache_delete (ob->writer_cache);
109 obstack_free (&ob->obstack, NULL);
110
111 free (ob);
112 }
113
114
115 /* Look up NODE in the type table and write the index for it to OB. */
116
117 static void
118 output_type_ref (struct output_block *ob, tree node)
119 {
120 streamer_write_record_start (ob, LTO_type_ref);
121 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
122 }
123
124
125 /* Return true if tree node T is written to various tables. For these
126 nodes, we sometimes want to write their phyiscal representation
127 (via lto_output_tree), and sometimes we need to emit an index
128 reference into a table (via lto_output_tree_ref). */
129
130 static bool
131 tree_is_indexable (tree t)
132 {
133 /* Parameters and return values of functions of variably modified types
134 must go to global stream, because they may be used in the type
135 definition. */
136 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
137 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
138 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
139 || TREE_CODE (t) == TYPE_DECL
140 || TREE_CODE (t) == CONST_DECL)
141 && decl_function_context (t))
142 return false;
143 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
144 return false;
145 /* Variably modified types need to be streamed alongside function
146 bodies because they can refer to local entities. Together with
147 them we have to localize their members as well.
148 ??? In theory that includes non-FIELD_DECLs as well. */
149 else if (TYPE_P (t)
150 && variably_modified_type_p (t, NULL_TREE))
151 return false;
152 else if (TREE_CODE (t) == FIELD_DECL
153 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
154 return false;
155 else
156 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 }
158
159
160 /* Output info about new location into bitpack BP.
161 After outputting bitpack, lto_output_location_data has
162 to be done to output actual data. */
163
164 void
165 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
166 location_t loc)
167 {
168 expanded_location xloc;
169
170 loc = LOCATION_LOCUS (loc);
171 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
172 if (loc == UNKNOWN_LOCATION)
173 return;
174
175 xloc = expand_location (loc);
176
177 bp_pack_value (bp, ob->current_file != xloc.file, 1);
178 bp_pack_value (bp, ob->current_line != xloc.line, 1);
179 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180
181 if (ob->current_file != xloc.file)
182 bp_pack_var_len_unsigned (bp,
183 streamer_string_index (ob, xloc.file,
184 strlen (xloc.file) + 1,
185 true));
186 ob->current_file = xloc.file;
187
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
191
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
195 }
196
197
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
201
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
204 {
205 enum tree_code code;
206
207 if (TYPE_P (expr))
208 {
209 output_type_ref (ob, expr);
210 return;
211 }
212
213 code = TREE_CODE (expr);
214 switch (code)
215 {
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
220
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
225
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 case PARM_DECL:
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
238
239 case CONST_DECL:
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case IMPORTED_DECL:
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248 break;
249
250 case TYPE_DECL:
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
254
255 case NAMESPACE_DECL:
256 streamer_write_record_start (ob, LTO_namespace_decl_ref);
257 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case LABEL_DECL:
261 streamer_write_record_start (ob, LTO_label_decl_ref);
262 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case RESULT_DECL:
266 streamer_write_record_start (ob, LTO_result_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case TRANSLATION_UNIT_DECL:
271 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 default:
276 /* No other node is indexable, so it should have been handled by
277 lto_output_tree. */
278 gcc_unreachable ();
279 }
280 }
281
282
283 /* Return true if EXPR is a tree node that can be written to disk. */
284
285 static inline bool
286 lto_is_streamable (tree expr)
287 {
288 enum tree_code code = TREE_CODE (expr);
289
290 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
291 name version in lto_output_tree_ref (see output_ssa_names). */
292 return !is_lang_specific (expr)
293 && code != SSA_NAME
294 && code != CALL_EXPR
295 && code != LANG_TYPE
296 && code != MODIFY_EXPR
297 && code != INIT_EXPR
298 && code != TARGET_EXPR
299 && code != BIND_EXPR
300 && code != WITH_CLEANUP_EXPR
301 && code != STATEMENT_LIST
302 && (code == CASE_LABEL_EXPR
303 || code == DECL_EXPR
304 || TREE_CODE_CLASS (code) != tcc_statement);
305 }
306
307
308 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
309
310 static tree
311 get_symbol_initial_value (struct output_block *ob, tree expr)
312 {
313 gcc_checking_assert (DECL_P (expr)
314 && TREE_CODE (expr) != FUNCTION_DECL
315 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
316
317 /* Handle DECL_INITIAL for symbols. */
318 tree initial = DECL_INITIAL (expr);
319 if (TREE_CODE (expr) == VAR_DECL
320 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
321 && !DECL_IN_CONSTANT_POOL (expr)
322 && initial)
323 {
324 lto_symtab_encoder_t encoder;
325 struct varpool_node *vnode;
326
327 encoder = ob->decl_state->symtab_node_encoder;
328 vnode = varpool_get_node (expr);
329 if (!vnode
330 || !lto_symtab_encoder_encode_initializer_p (encoder,
331 vnode))
332 initial = error_mark_node;
333 }
334
335 return initial;
336 }
337
338
339 /* Write a physical representation of tree node EXPR to output block
340 OB. If REF_P is true, the leaves of EXPR are emitted as references
341 via lto_output_tree_ref. IX is the index into the streamer cache
342 where EXPR is stored. */
343
344 static void
345 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
346 {
347 /* Pack all the non-pointer fields in EXPR into a bitpack and write
348 the resulting bitpack. */
349 bitpack_d bp = bitpack_create (ob->main_stream);
350 streamer_pack_tree_bitfields (ob, &bp, expr);
351 streamer_write_bitpack (&bp);
352
353 /* Write all the pointer fields in EXPR. */
354 streamer_write_tree_body (ob, expr, ref_p);
355
356 /* Write any LTO-specific data to OB. */
357 if (DECL_P (expr)
358 && TREE_CODE (expr) != FUNCTION_DECL
359 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
360 {
361 /* Handle DECL_INITIAL for symbols. */
362 tree initial = get_symbol_initial_value (ob, expr);
363 stream_write_tree (ob, initial, ref_p);
364 }
365 }
366
367 /* Write a physical representation of tree node EXPR to output block
368 OB. If REF_P is true, the leaves of EXPR are emitted as references
369 via lto_output_tree_ref. IX is the index into the streamer cache
370 where EXPR is stored. */
371
372 static void
373 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
374 {
375 if (!lto_is_streamable (expr))
376 internal_error ("tree code %qs is not supported in LTO streams",
377 get_tree_code_name (TREE_CODE (expr)));
378
379 /* Write the header, containing everything needed to materialize
380 EXPR on the reading side. */
381 streamer_write_tree_header (ob, expr);
382
383 lto_write_tree_1 (ob, expr, ref_p);
384
385 /* Mark the end of EXPR. */
386 streamer_write_zero (ob);
387 }
388
389 /* Emit the physical representation of tree node EXPR to output block
390 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
391 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
392
393 static void
394 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
395 bool ref_p, bool this_ref_p)
396 {
397 unsigned ix;
398
399 gcc_checking_assert (expr != NULL_TREE
400 && !(this_ref_p && tree_is_indexable (expr)));
401
402 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
403 expr, hash, &ix);
404 gcc_assert (!exists_p);
405 if (streamer_handle_as_builtin_p (expr))
406 {
407 /* MD and NORMAL builtins do not need to be written out
408 completely as they are always instantiated by the
409 compiler on startup. The only builtins that need to
410 be written out are BUILT_IN_FRONTEND. For all other
411 builtins, we simply write the class and code. */
412 streamer_write_builtin (ob, expr);
413 }
414 else if (TREE_CODE (expr) == INTEGER_CST
415 && !TREE_OVERFLOW (expr))
416 {
417 /* Shared INTEGER_CST nodes are special because they need their
418 original type to be materialized by the reader (to implement
419 TYPE_CACHED_VALUES). */
420 streamer_write_integer_cst (ob, expr, ref_p);
421 }
422 else
423 {
424 /* This is the first time we see EXPR, write its fields
425 to OB. */
426 lto_write_tree (ob, expr, ref_p);
427 }
428 }
429
430 struct sccs
431 {
432 unsigned int dfsnum;
433 unsigned int low;
434 };
435
436 struct scc_entry
437 {
438 tree t;
439 hashval_t hash;
440 };
441
442 static unsigned int next_dfs_num;
443 static vec<scc_entry> sccstack;
444 static struct pointer_map_t *sccstate;
445 static struct obstack sccstate_obstack;
446
447 static void
448 DFS_write_tree (struct output_block *ob, sccs *from_state,
449 tree expr, bool ref_p, bool this_ref_p);
450
451 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
452 DFS recurse for all tree edges originating from it. */
453
454 static void
455 DFS_write_tree_body (struct output_block *ob,
456 tree expr, sccs *expr_state, bool ref_p)
457 {
458 #define DFS_follow_tree_edge(DEST) \
459 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
460
461 enum tree_code code;
462
463 code = TREE_CODE (expr);
464
465 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
466 {
467 if (TREE_CODE (expr) != IDENTIFIER_NODE)
468 DFS_follow_tree_edge (TREE_TYPE (expr));
469 }
470
471 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
472 {
473 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
474 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
475 }
476
477 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
478 {
479 DFS_follow_tree_edge (TREE_REALPART (expr));
480 DFS_follow_tree_edge (TREE_IMAGPART (expr));
481 }
482
483 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
484 {
485 /* Drop names that were created for anonymous entities. */
486 if (DECL_NAME (expr)
487 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
488 && ANON_AGGRNAME_P (DECL_NAME (expr)))
489 ;
490 else
491 DFS_follow_tree_edge (DECL_NAME (expr));
492 DFS_follow_tree_edge (DECL_CONTEXT (expr));
493 }
494
495 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
496 {
497 DFS_follow_tree_edge (DECL_SIZE (expr));
498 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
499
500 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
501 special handling in LTO, it must be handled by streamer hooks. */
502
503 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
504
505 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
506 for early inlining so drop it on the floor instead of ICEing in
507 dwarf2out.c. */
508
509 if ((TREE_CODE (expr) == VAR_DECL
510 || TREE_CODE (expr) == PARM_DECL)
511 && DECL_HAS_VALUE_EXPR_P (expr))
512 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
513 if (TREE_CODE (expr) == VAR_DECL)
514 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
515 }
516
517 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
518 {
519 if (TREE_CODE (expr) == TYPE_DECL)
520 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
521 DFS_follow_tree_edge (DECL_VINDEX (expr));
522 }
523
524 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
525 {
526 /* Make sure we don't inadvertently set the assembler name. */
527 if (DECL_ASSEMBLER_NAME_SET_P (expr))
528 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
529 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
530 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
531 }
532
533 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
534 {
535 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
536 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
537 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
538 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
539 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
540 }
541
542 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
543 {
544 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
545 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
546 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
547 }
548
549 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
550 {
551 DFS_follow_tree_edge (TYPE_SIZE (expr));
552 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
553 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
554 DFS_follow_tree_edge (TYPE_NAME (expr));
555 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
556 reconstructed during fixup. */
557 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
558 during fixup. */
559 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
560 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
561 /* TYPE_CANONICAL is re-computed during type merging, so no need
562 to follow it here. */
563 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
564 }
565
566 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
567 {
568 if (TREE_CODE (expr) == ENUMERAL_TYPE)
569 DFS_follow_tree_edge (TYPE_VALUES (expr));
570 else if (TREE_CODE (expr) == ARRAY_TYPE)
571 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
572 else if (RECORD_OR_UNION_TYPE_P (expr))
573 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
574 DFS_follow_tree_edge (t);
575 else if (TREE_CODE (expr) == FUNCTION_TYPE
576 || TREE_CODE (expr) == METHOD_TYPE)
577 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
578
579 if (!POINTER_TYPE_P (expr))
580 DFS_follow_tree_edge (TYPE_MINVAL (expr));
581 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
582 if (RECORD_OR_UNION_TYPE_P (expr))
583 DFS_follow_tree_edge (TYPE_BINFO (expr));
584 }
585
586 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
587 {
588 DFS_follow_tree_edge (TREE_PURPOSE (expr));
589 DFS_follow_tree_edge (TREE_VALUE (expr));
590 DFS_follow_tree_edge (TREE_CHAIN (expr));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
594 {
595 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
596 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
597 }
598
599 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
600 {
601 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
602 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
603 DFS_follow_tree_edge (TREE_BLOCK (expr));
604 }
605
606 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
607 {
608 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
609 /* ??? FIXME. See also streamer_write_chain. */
610 if (!(VAR_OR_FUNCTION_DECL_P (t)
611 && DECL_EXTERNAL (t)))
612 DFS_follow_tree_edge (t);
613
614 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
615
616 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
617 handle - those that represent inlined function scopes.
618 For the drop rest them on the floor instead of ICEing
619 in dwarf2out.c. */
620 if (inlined_function_outer_scope_p (expr))
621 {
622 tree ultimate_origin = block_ultimate_origin (expr);
623 DFS_follow_tree_edge (ultimate_origin);
624 }
625 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
626 information for early inlined BLOCKs so drop it on the floor instead
627 of ICEing in dwarf2out.c. */
628
629 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
630 streaming time. */
631
632 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
633 list is re-constructed from BLOCK_SUPERCONTEXT. */
634 }
635
636 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
637 {
638 unsigned i;
639 tree t;
640
641 /* Note that the number of BINFO slots has already been emitted in
642 EXPR's header (see streamer_write_tree_header) because this length
643 is needed to build the empty BINFO node on the reader side. */
644 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
645 DFS_follow_tree_edge (t);
646 DFS_follow_tree_edge (BINFO_OFFSET (expr));
647 DFS_follow_tree_edge (BINFO_VTABLE (expr));
648 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
649
650 /* The number of BINFO_BASE_ACCESSES has already been emitted in
651 EXPR's bitfield section. */
652 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
653 DFS_follow_tree_edge (t);
654
655 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
656 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
657 }
658
659 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
660 {
661 unsigned i;
662 tree index, value;
663
664 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
665 {
666 DFS_follow_tree_edge (index);
667 DFS_follow_tree_edge (value);
668 }
669 }
670
671 if (code == OMP_CLAUSE)
672 {
673 int i;
674 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
675 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
676 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
677 }
678
679 #undef DFS_follow_tree_edge
680 }
681
682 /* Return a hash value for the tree T. */
683
684 static hashval_t
685 hash_tree (struct streamer_tree_cache_d *cache, tree t)
686 {
687 #define visit(SIBLING) \
688 do { \
689 unsigned ix; \
690 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
691 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
692 } while (0)
693
694 /* Hash TS_BASE. */
695 enum tree_code code = TREE_CODE (t);
696 hashval_t v = iterative_hash_host_wide_int (code, 0);
697 if (!TYPE_P (t))
698 {
699 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
700 | (TREE_CONSTANT (t) << 1)
701 | (TREE_READONLY (t) << 2)
702 | (TREE_PUBLIC (t) << 3), v);
703 }
704 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
705 | (TREE_THIS_VOLATILE (t) << 1), v);
706 if (DECL_P (t))
707 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
708 else if (TYPE_P (t))
709 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
710 if (TYPE_P (t))
711 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
712 else
713 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
714 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
715 | (TREE_STATIC (t) << 1)
716 | (TREE_PROTECTED (t) << 2)
717 | (TREE_DEPRECATED (t) << 3), v);
718 if (code != TREE_BINFO)
719 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
720 if (TYPE_P (t))
721 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
722 | (TYPE_ADDR_SPACE (t) << 1), v);
723 else if (code == SSA_NAME)
724 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
725
726 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
727 {
728 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
729 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
730 }
731
732 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
733 {
734 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
735 v = iterative_hash_host_wide_int (r.cl, v);
736 v = iterative_hash_host_wide_int (r.decimal
737 | (r.sign << 1)
738 | (r.signalling << 2)
739 | (r.canonical << 3), v);
740 v = iterative_hash_host_wide_int (r.uexp, v);
741 for (unsigned i = 0; i < SIGSZ; ++i)
742 v = iterative_hash_host_wide_int (r.sig[i], v);
743 }
744
745 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
746 {
747 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
748 v = iterative_hash_host_wide_int (f.mode, v);
749 v = iterative_hash_host_wide_int (f.data.low, v);
750 v = iterative_hash_host_wide_int (f.data.high, v);
751 }
752
753 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
754 {
755 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
756 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
757 | (DECL_VIRTUAL_P (t) << 1)
758 | (DECL_IGNORED_P (t) << 2)
759 | (DECL_ABSTRACT (t) << 3)
760 | (DECL_ARTIFICIAL (t) << 4)
761 | (DECL_USER_ALIGN (t) << 5)
762 | (DECL_PRESERVE_P (t) << 6)
763 | (DECL_EXTERNAL (t) << 7)
764 | (DECL_GIMPLE_REG_P (t) << 8), v);
765 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
766 if (code == LABEL_DECL)
767 {
768 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
769 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
770 }
771 else if (code == FIELD_DECL)
772 {
773 v = iterative_hash_host_wide_int (DECL_PACKED (t)
774 | (DECL_NONADDRESSABLE_P (t) << 1),
775 v);
776 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
777 }
778 else if (code == VAR_DECL)
779 {
780 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
781 | (DECL_NONLOCAL_FRAME (t) << 1),
782 v);
783 }
784 if (code == RESULT_DECL
785 || code == PARM_DECL
786 || code == VAR_DECL)
787 {
788 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
789 if (code == VAR_DECL
790 || code == PARM_DECL)
791 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
792 }
793 }
794
795 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
796 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
797
798 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
799 {
800 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
801 | (DECL_DLLIMPORT_P (t) << 1)
802 | (DECL_WEAK (t) << 2)
803 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
804 | (DECL_COMDAT (t) << 4)
805 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
806 v);
807 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
808 if (code == VAR_DECL)
809 {
810 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
811 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
812 | (DECL_IN_CONSTANT_POOL (t) << 1),
813 v);
814 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
815 }
816 if (TREE_CODE (t) == FUNCTION_DECL)
817 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
818 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
819 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
820 v);
821 if (VAR_OR_FUNCTION_DECL_P (t))
822 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
823 }
824
825 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
826 {
827 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
828 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
829 | (DECL_STATIC_DESTRUCTOR (t) << 1)
830 | (DECL_UNINLINABLE (t) << 2)
831 | (DECL_POSSIBLY_INLINED (t) << 3)
832 | (DECL_IS_NOVOPS (t) << 4)
833 | (DECL_IS_RETURNS_TWICE (t) << 5)
834 | (DECL_IS_MALLOC (t) << 6)
835 | (DECL_IS_OPERATOR_NEW (t) << 7)
836 | (DECL_DECLARED_INLINE_P (t) << 8)
837 | (DECL_STATIC_CHAIN (t) << 9)
838 | (DECL_NO_INLINE_WARNING_P (t) << 10)
839 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
840 | (DECL_NO_LIMIT_STACK (t) << 12)
841 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
842 | (DECL_PURE_P (t) << 14)
843 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
844 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
845 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
846 if (DECL_STATIC_DESTRUCTOR (t))
847 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
848 }
849
850 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
851 {
852 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
853 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
854 | (TYPE_NO_FORCE_BLK (t) << 1)
855 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
856 | (TYPE_PACKED (t) << 3)
857 | (TYPE_RESTRICT (t) << 4)
858 | (TYPE_USER_ALIGN (t) << 5)
859 | (TYPE_READONLY (t) << 6), v);
860 if (RECORD_OR_UNION_TYPE_P (t))
861 {
862 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
863 | (TYPE_FINAL_P (t) << 1), v);
864 }
865 else if (code == ARRAY_TYPE)
866 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
867 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
868 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
869 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
870 || (!in_lto_p
871 && get_alias_set (t) == 0))
872 ? 0 : -1, v);
873 }
874
875 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
876 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
877 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
878
879 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
880 v = iterative_hash (t, sizeof (struct cl_target_option), v);
881
882 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
883 v = iterative_hash (t, sizeof (struct cl_optimization), v);
884
885 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
886 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
887
888 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
889 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
890
891 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
892 {
893 if (POINTER_TYPE_P (t))
894 {
895 /* For pointers factor in the pointed-to type recursively as
896 we cannot recurse through only pointers.
897 ??? We can generalize this by keeping track of the
898 in-SCC edges for each tree (or arbitrarily the first
899 such edge) and hashing that in in a second stage
900 (instead of the quadratic mixing of the SCC we do now). */
901 hashval_t x;
902 unsigned ix;
903 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
904 x = streamer_tree_cache_get_hash (cache, ix);
905 else
906 x = hash_tree (cache, TREE_TYPE (t));
907 v = iterative_hash_hashval_t (x, v);
908 }
909 else if (code != IDENTIFIER_NODE)
910 visit (TREE_TYPE (t));
911 }
912
913 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
914 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
915 visit (VECTOR_CST_ELT (t, i));
916
917 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
918 {
919 visit (TREE_REALPART (t));
920 visit (TREE_IMAGPART (t));
921 }
922
923 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
924 {
925 /* Drop names that were created for anonymous entities. */
926 if (DECL_NAME (t)
927 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
928 && ANON_AGGRNAME_P (DECL_NAME (t)))
929 ;
930 else
931 visit (DECL_NAME (t));
932 if (DECL_FILE_SCOPE_P (t))
933 ;
934 else
935 visit (DECL_CONTEXT (t));
936 }
937
938 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
939 {
940 visit (DECL_SIZE (t));
941 visit (DECL_SIZE_UNIT (t));
942 visit (DECL_ATTRIBUTES (t));
943 if ((code == VAR_DECL
944 || code == PARM_DECL)
945 && DECL_HAS_VALUE_EXPR_P (t))
946 visit (DECL_VALUE_EXPR (t));
947 if (code == VAR_DECL
948 && DECL_HAS_DEBUG_EXPR_P (t))
949 visit (DECL_DEBUG_EXPR (t));
950 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
951 be able to call get_symbol_initial_value. */
952 }
953
954 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
955 {
956 if (code == TYPE_DECL)
957 visit (DECL_ORIGINAL_TYPE (t));
958 visit (DECL_VINDEX (t));
959 }
960
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
962 {
963 if (DECL_ASSEMBLER_NAME_SET_P (t))
964 visit (DECL_ASSEMBLER_NAME (t));
965 visit (DECL_SECTION_NAME (t));
966 visit (DECL_COMDAT_GROUP (t));
967 }
968
969 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
970 {
971 visit (DECL_FIELD_OFFSET (t));
972 visit (DECL_BIT_FIELD_TYPE (t));
973 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
974 visit (DECL_FIELD_BIT_OFFSET (t));
975 visit (DECL_FCONTEXT (t));
976 }
977
978 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
979 {
980 visit (DECL_FUNCTION_PERSONALITY (t));
981 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
982 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
983 }
984
985 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
986 {
987 visit (TYPE_SIZE (t));
988 visit (TYPE_SIZE_UNIT (t));
989 visit (TYPE_ATTRIBUTES (t));
990 visit (TYPE_NAME (t));
991 visit (TYPE_MAIN_VARIANT (t));
992 if (TYPE_FILE_SCOPE_P (t))
993 ;
994 else
995 visit (TYPE_CONTEXT (t));
996 visit (TYPE_STUB_DECL (t));
997 }
998
999 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1000 {
1001 if (code == ENUMERAL_TYPE)
1002 visit (TYPE_VALUES (t));
1003 else if (code == ARRAY_TYPE)
1004 visit (TYPE_DOMAIN (t));
1005 else if (RECORD_OR_UNION_TYPE_P (t))
1006 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1007 visit (f);
1008 else if (code == FUNCTION_TYPE
1009 || code == METHOD_TYPE)
1010 visit (TYPE_ARG_TYPES (t));
1011 if (!POINTER_TYPE_P (t))
1012 visit (TYPE_MINVAL (t));
1013 visit (TYPE_MAXVAL (t));
1014 if (RECORD_OR_UNION_TYPE_P (t))
1015 visit (TYPE_BINFO (t));
1016 }
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1019 {
1020 visit (TREE_PURPOSE (t));
1021 visit (TREE_VALUE (t));
1022 visit (TREE_CHAIN (t));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1026 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1027 visit (TREE_VEC_ELT (t, i));
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1030 {
1031 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1032 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1033 visit (TREE_OPERAND (t, i));
1034 }
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1037 {
1038 unsigned i;
1039 tree b;
1040 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1041 visit (b);
1042 visit (BINFO_OFFSET (t));
1043 visit (BINFO_VTABLE (t));
1044 visit (BINFO_VPTR_FIELD (t));
1045 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1046 visit (b);
1047 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1048 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1049 }
1050
1051 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1052 {
1053 unsigned i;
1054 tree index, value;
1055 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1056 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1057 {
1058 visit (index);
1059 visit (value);
1060 }
1061 }
1062
1063 if (code == OMP_CLAUSE)
1064 {
1065 int i;
1066
1067 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1068 switch (OMP_CLAUSE_CODE (t))
1069 {
1070 case OMP_CLAUSE_DEFAULT:
1071 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1072 break;
1073 case OMP_CLAUSE_SCHEDULE:
1074 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1075 break;
1076 case OMP_CLAUSE_DEPEND:
1077 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1078 break;
1079 case OMP_CLAUSE_MAP:
1080 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1081 break;
1082 case OMP_CLAUSE_PROC_BIND:
1083 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1084 break;
1085 case OMP_CLAUSE_REDUCTION:
1086 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1087 break;
1088 default:
1089 break;
1090 }
1091 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1092 visit (OMP_CLAUSE_OPERAND (t, i));
1093 visit (OMP_CLAUSE_CHAIN (t));
1094 }
1095
1096 return v;
1097
1098 #undef visit
1099 }
1100
1101 /* Compare two SCC entries by their hash value for qsorting them. */
1102
1103 static int
1104 scc_entry_compare (const void *p1_, const void *p2_)
1105 {
1106 const scc_entry *p1 = (const scc_entry *) p1_;
1107 const scc_entry *p2 = (const scc_entry *) p2_;
1108 if (p1->hash < p2->hash)
1109 return -1;
1110 else if (p1->hash > p2->hash)
1111 return 1;
1112 return 0;
1113 }
1114
1115 /* Return a hash value for the SCC on the SCC stack from FIRST with
1116 size SIZE. */
1117
1118 static hashval_t
1119 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1120 {
1121 /* Compute hash values for the SCC members. */
1122 for (unsigned i = 0; i < size; ++i)
1123 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1124
1125 if (size == 1)
1126 return sccstack[first].hash;
1127
1128 /* Sort the SCC of type, hash pairs so that when we mix in
1129 all members of the SCC the hash value becomes independent on
1130 the order we visited the SCC. Disregard hashes equal to
1131 the hash of the tree we mix into because we cannot guarantee
1132 a stable sort for those across different TUs. */
1133 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1134 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1135 for (unsigned i = 0; i < size; ++i)
1136 {
1137 hashval_t hash = sccstack[first+i].hash;
1138 hashval_t orig_hash = hash;
1139 unsigned j;
1140 /* Skip same hashes. */
1141 for (j = i + 1;
1142 j < size && sccstack[first+j].hash == orig_hash; ++j)
1143 ;
1144 for (; j < size; ++j)
1145 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1146 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1147 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1148 tem[i] = hash;
1149 }
1150 hashval_t scc_hash = 0;
1151 for (unsigned i = 0; i < size; ++i)
1152 {
1153 sccstack[first+i].hash = tem[i];
1154 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1155 }
1156 return scc_hash;
1157 }
1158
1159 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1160 already in the streamer cache. Main routine called for
1161 each visit of EXPR. */
1162
1163 static void
1164 DFS_write_tree (struct output_block *ob, sccs *from_state,
1165 tree expr, bool ref_p, bool this_ref_p)
1166 {
1167 unsigned ix;
1168 sccs **slot;
1169
1170 /* Handle special cases. */
1171 if (expr == NULL_TREE)
1172 return;
1173
1174 /* Do not DFS walk into indexable trees. */
1175 if (this_ref_p && tree_is_indexable (expr))
1176 return;
1177
1178 /* Check if we already streamed EXPR. */
1179 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1180 return;
1181
1182 slot = (sccs **)pointer_map_insert (sccstate, expr);
1183 sccs *cstate = *slot;
1184 if (!cstate)
1185 {
1186 scc_entry e = { expr, 0 };
1187 /* Not yet visited. DFS recurse and push it onto the stack. */
1188 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1189 sccstack.safe_push (e);
1190 cstate->dfsnum = next_dfs_num++;
1191 cstate->low = cstate->dfsnum;
1192
1193 if (streamer_handle_as_builtin_p (expr))
1194 ;
1195 else if (TREE_CODE (expr) == INTEGER_CST
1196 && !TREE_OVERFLOW (expr))
1197 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1198 else
1199 {
1200 DFS_write_tree_body (ob, expr, cstate, ref_p);
1201
1202 /* Walk any LTO-specific edges. */
1203 if (DECL_P (expr)
1204 && TREE_CODE (expr) != FUNCTION_DECL
1205 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1206 {
1207 /* Handle DECL_INITIAL for symbols. */
1208 tree initial = get_symbol_initial_value (ob, expr);
1209 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1210 }
1211 }
1212
1213 /* See if we found an SCC. */
1214 if (cstate->low == cstate->dfsnum)
1215 {
1216 unsigned first, size;
1217 tree x;
1218
1219 /* Pop the SCC and compute its size. */
1220 first = sccstack.length ();
1221 do
1222 {
1223 x = sccstack[--first].t;
1224 }
1225 while (x != expr);
1226 size = sccstack.length () - first;
1227
1228 /* No need to compute hashes for LTRANS units, we don't perform
1229 any merging there. */
1230 hashval_t scc_hash = 0;
1231 unsigned scc_entry_len = 0;
1232 if (!flag_wpa)
1233 {
1234 scc_hash = hash_scc (ob->writer_cache, first, size);
1235
1236 /* Put the entries with the least number of collisions first. */
1237 unsigned entry_start = 0;
1238 scc_entry_len = size + 1;
1239 for (unsigned i = 0; i < size;)
1240 {
1241 unsigned from = i;
1242 for (i = i + 1; i < size
1243 && (sccstack[first + i].hash
1244 == sccstack[first + from].hash); ++i)
1245 ;
1246 if (i - from < scc_entry_len)
1247 {
1248 scc_entry_len = i - from;
1249 entry_start = from;
1250 }
1251 }
1252 for (unsigned i = 0; i < scc_entry_len; ++i)
1253 {
1254 scc_entry tem = sccstack[first + i];
1255 sccstack[first + i] = sccstack[first + entry_start + i];
1256 sccstack[first + entry_start + i] = tem;
1257 }
1258 }
1259
1260 /* Write LTO_tree_scc. */
1261 streamer_write_record_start (ob, LTO_tree_scc);
1262 streamer_write_uhwi (ob, size);
1263 streamer_write_uhwi (ob, scc_hash);
1264
1265 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1266 All INTEGER_CSTs need to be handled this way as we need
1267 their type to materialize them. Also builtins are handled
1268 this way.
1269 ??? We still wrap these in LTO_tree_scc so at the
1270 input side we can properly identify the tree we want
1271 to ultimatively return. */
1272 size_t old_len = ob->writer_cache->nodes.length ();
1273 if (size == 1)
1274 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1275 else
1276 {
1277 /* Write the size of the SCC entry candidates. */
1278 streamer_write_uhwi (ob, scc_entry_len);
1279
1280 /* Write all headers and populate the streamer cache. */
1281 for (unsigned i = 0; i < size; ++i)
1282 {
1283 hashval_t hash = sccstack[first+i].hash;
1284 tree t = sccstack[first+i].t;
1285 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1286 t, hash, &ix);
1287 gcc_assert (!exists_p);
1288
1289 if (!lto_is_streamable (t))
1290 internal_error ("tree code %qs is not supported "
1291 "in LTO streams",
1292 get_tree_code_name (TREE_CODE (t)));
1293
1294 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1295
1296 /* Write the header, containing everything needed to
1297 materialize EXPR on the reading side. */
1298 streamer_write_tree_header (ob, t);
1299 }
1300
1301 /* Write the bitpacks and tree references. */
1302 for (unsigned i = 0; i < size; ++i)
1303 {
1304 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1305
1306 /* Mark the end of the tree. */
1307 streamer_write_zero (ob);
1308 }
1309 }
1310 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1311
1312 /* Finally truncate the vector. */
1313 sccstack.truncate (first);
1314
1315 if (from_state)
1316 from_state->low = MIN (from_state->low, cstate->low);
1317 return;
1318 }
1319
1320 if (from_state)
1321 from_state->low = MIN (from_state->low, cstate->low);
1322 }
1323 gcc_checking_assert (from_state);
1324 if (cstate->dfsnum < from_state->dfsnum)
1325 from_state->low = MIN (cstate->dfsnum, from_state->low);
1326 }
1327
1328
1329 /* Emit the physical representation of tree node EXPR to output block
1330 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1331 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1332
1333 void
1334 lto_output_tree (struct output_block *ob, tree expr,
1335 bool ref_p, bool this_ref_p)
1336 {
1337 unsigned ix;
1338 bool existed_p;
1339
1340 if (expr == NULL_TREE)
1341 {
1342 streamer_write_record_start (ob, LTO_null);
1343 return;
1344 }
1345
1346 if (this_ref_p && tree_is_indexable (expr))
1347 {
1348 lto_output_tree_ref (ob, expr);
1349 return;
1350 }
1351
1352 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1353 if (existed_p)
1354 {
1355 /* If a node has already been streamed out, make sure that
1356 we don't write it more than once. Otherwise, the reader
1357 will instantiate two different nodes for the same object. */
1358 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1359 streamer_write_uhwi (ob, ix);
1360 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1361 lto_tree_code_to_tag (TREE_CODE (expr)));
1362 lto_stats.num_pickle_refs_output++;
1363 }
1364 else
1365 {
1366 /* This is the first time we see EXPR, write all reachable
1367 trees to OB. */
1368 static bool in_dfs_walk;
1369
1370 /* Protect against recursion which means disconnect between
1371 what tree edges we walk in the DFS walk and what edges
1372 we stream out. */
1373 gcc_assert (!in_dfs_walk);
1374
1375 /* Start the DFS walk. */
1376 /* Save ob state ... */
1377 /* let's see ... */
1378 in_dfs_walk = true;
1379 sccstate = pointer_map_create ();
1380 gcc_obstack_init (&sccstate_obstack);
1381 next_dfs_num = 1;
1382 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1383 sccstack.release ();
1384 pointer_map_destroy (sccstate);
1385 obstack_free (&sccstate_obstack, NULL);
1386 in_dfs_walk = false;
1387
1388 /* Finally append a reference to the tree we were writing.
1389 ??? If expr ended up as a singleton we could have
1390 inlined it here and avoid outputting a reference. */
1391 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1392 gcc_assert (existed_p);
1393 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1394 streamer_write_uhwi (ob, ix);
1395 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1396 lto_tree_code_to_tag (TREE_CODE (expr)));
1397 lto_stats.num_pickle_refs_output++;
1398 }
1399 }
1400
1401
1402 /* Output to OB a list of try/catch handlers starting with FIRST. */
1403
1404 static void
1405 output_eh_try_list (struct output_block *ob, eh_catch first)
1406 {
1407 eh_catch n;
1408
1409 for (n = first; n; n = n->next_catch)
1410 {
1411 streamer_write_record_start (ob, LTO_eh_catch);
1412 stream_write_tree (ob, n->type_list, true);
1413 stream_write_tree (ob, n->filter_list, true);
1414 stream_write_tree (ob, n->label, true);
1415 }
1416
1417 streamer_write_record_start (ob, LTO_null);
1418 }
1419
1420
1421 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1422 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1423 detect EH region sharing. */
1424
1425 static void
1426 output_eh_region (struct output_block *ob, eh_region r)
1427 {
1428 enum LTO_tags tag;
1429
1430 if (r == NULL)
1431 {
1432 streamer_write_record_start (ob, LTO_null);
1433 return;
1434 }
1435
1436 if (r->type == ERT_CLEANUP)
1437 tag = LTO_ert_cleanup;
1438 else if (r->type == ERT_TRY)
1439 tag = LTO_ert_try;
1440 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1441 tag = LTO_ert_allowed_exceptions;
1442 else if (r->type == ERT_MUST_NOT_THROW)
1443 tag = LTO_ert_must_not_throw;
1444 else
1445 gcc_unreachable ();
1446
1447 streamer_write_record_start (ob, tag);
1448 streamer_write_hwi (ob, r->index);
1449
1450 if (r->outer)
1451 streamer_write_hwi (ob, r->outer->index);
1452 else
1453 streamer_write_zero (ob);
1454
1455 if (r->inner)
1456 streamer_write_hwi (ob, r->inner->index);
1457 else
1458 streamer_write_zero (ob);
1459
1460 if (r->next_peer)
1461 streamer_write_hwi (ob, r->next_peer->index);
1462 else
1463 streamer_write_zero (ob);
1464
1465 if (r->type == ERT_TRY)
1466 {
1467 output_eh_try_list (ob, r->u.eh_try.first_catch);
1468 }
1469 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1470 {
1471 stream_write_tree (ob, r->u.allowed.type_list, true);
1472 stream_write_tree (ob, r->u.allowed.label, true);
1473 streamer_write_uhwi (ob, r->u.allowed.filter);
1474 }
1475 else if (r->type == ERT_MUST_NOT_THROW)
1476 {
1477 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1478 bitpack_d bp = bitpack_create (ob->main_stream);
1479 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1480 streamer_write_bitpack (&bp);
1481 }
1482
1483 if (r->landing_pads)
1484 streamer_write_hwi (ob, r->landing_pads->index);
1485 else
1486 streamer_write_zero (ob);
1487 }
1488
1489
1490 /* Output landing pad LP to OB. */
1491
1492 static void
1493 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1494 {
1495 if (lp == NULL)
1496 {
1497 streamer_write_record_start (ob, LTO_null);
1498 return;
1499 }
1500
1501 streamer_write_record_start (ob, LTO_eh_landing_pad);
1502 streamer_write_hwi (ob, lp->index);
1503 if (lp->next_lp)
1504 streamer_write_hwi (ob, lp->next_lp->index);
1505 else
1506 streamer_write_zero (ob);
1507
1508 if (lp->region)
1509 streamer_write_hwi (ob, lp->region->index);
1510 else
1511 streamer_write_zero (ob);
1512
1513 stream_write_tree (ob, lp->post_landing_pad, true);
1514 }
1515
1516
1517 /* Output the existing eh_table to OB. */
1518
1519 static void
1520 output_eh_regions (struct output_block *ob, struct function *fn)
1521 {
1522 if (fn->eh && fn->eh->region_tree)
1523 {
1524 unsigned i;
1525 eh_region eh;
1526 eh_landing_pad lp;
1527 tree ttype;
1528
1529 streamer_write_record_start (ob, LTO_eh_table);
1530
1531 /* Emit the index of the root of the EH region tree. */
1532 streamer_write_hwi (ob, fn->eh->region_tree->index);
1533
1534 /* Emit all the EH regions in the region array. */
1535 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1536 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1537 output_eh_region (ob, eh);
1538
1539 /* Emit all landing pads. */
1540 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1541 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1542 output_eh_lp (ob, lp);
1543
1544 /* Emit all the runtime type data. */
1545 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1546 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1547 stream_write_tree (ob, ttype, true);
1548
1549 /* Emit the table of action chains. */
1550 if (targetm.arm_eabi_unwinder)
1551 {
1552 tree t;
1553 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1554 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1555 stream_write_tree (ob, t, true);
1556 }
1557 else
1558 {
1559 uchar c;
1560 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1561 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1562 streamer_write_char_stream (ob->main_stream, c);
1563 }
1564 }
1565
1566 /* The LTO_null either terminates the record or indicates that there
1567 are no eh_records at all. */
1568 streamer_write_record_start (ob, LTO_null);
1569 }
1570
1571
1572 /* Output all of the active ssa names to the ssa_names stream. */
1573
1574 static void
1575 output_ssa_names (struct output_block *ob, struct function *fn)
1576 {
1577 unsigned int i, len;
1578
1579 len = vec_safe_length (SSANAMES (fn));
1580 streamer_write_uhwi (ob, len);
1581
1582 for (i = 1; i < len; i++)
1583 {
1584 tree ptr = (*SSANAMES (fn))[i];
1585
1586 if (ptr == NULL_TREE
1587 || SSA_NAME_IN_FREE_LIST (ptr)
1588 || virtual_operand_p (ptr))
1589 continue;
1590
1591 streamer_write_uhwi (ob, i);
1592 streamer_write_char_stream (ob->main_stream,
1593 SSA_NAME_IS_DEFAULT_DEF (ptr));
1594 if (SSA_NAME_VAR (ptr))
1595 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1596 else
1597 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1598 stream_write_tree (ob, TREE_TYPE (ptr), true);
1599 }
1600
1601 streamer_write_zero (ob);
1602 }
1603
1604
1605 /* Output the cfg. */
1606
1607 static void
1608 output_cfg (struct output_block *ob, struct function *fn)
1609 {
1610 struct lto_output_stream *tmp_stream = ob->main_stream;
1611 basic_block bb;
1612
1613 ob->main_stream = ob->cfg_stream;
1614
1615 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1616 profile_status_for_function (fn));
1617
1618 /* Output the number of the highest basic block. */
1619 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1620
1621 FOR_ALL_BB_FN (bb, fn)
1622 {
1623 edge_iterator ei;
1624 edge e;
1625
1626 streamer_write_hwi (ob, bb->index);
1627
1628 /* Output the successors and the edge flags. */
1629 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1630 FOR_EACH_EDGE (e, ei, bb->succs)
1631 {
1632 streamer_write_uhwi (ob, e->dest->index);
1633 streamer_write_hwi (ob, e->probability);
1634 streamer_write_gcov_count (ob, e->count);
1635 streamer_write_uhwi (ob, e->flags);
1636 }
1637 }
1638
1639 streamer_write_hwi (ob, -1);
1640
1641 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1642 while (bb->next_bb)
1643 {
1644 streamer_write_hwi (ob, bb->next_bb->index);
1645 bb = bb->next_bb;
1646 }
1647
1648 streamer_write_hwi (ob, -1);
1649
1650 /* ??? The cfgloop interface is tied to cfun. */
1651 gcc_assert (cfun == fn);
1652
1653 /* Output the number of loops. */
1654 streamer_write_uhwi (ob, number_of_loops (fn));
1655
1656 /* Output each loop, skipping the tree root which has number zero. */
1657 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1658 {
1659 struct loop *loop = get_loop (fn, i);
1660
1661 /* Write the index of the loop header. That's enough to rebuild
1662 the loop tree on the reader side. Stream -1 for an unused
1663 loop entry. */
1664 if (!loop)
1665 {
1666 streamer_write_hwi (ob, -1);
1667 continue;
1668 }
1669 else
1670 streamer_write_hwi (ob, loop->header->index);
1671
1672 /* Write everything copy_loop_info copies. */
1673 streamer_write_enum (ob->main_stream,
1674 loop_estimation, EST_LAST, loop->estimate_state);
1675 streamer_write_hwi (ob, loop->any_upper_bound);
1676 if (loop->any_upper_bound)
1677 {
1678 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1679 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1680 }
1681 streamer_write_hwi (ob, loop->any_estimate);
1682 if (loop->any_estimate)
1683 {
1684 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1685 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1686 }
1687
1688 /* Write OMP SIMD related info. */
1689 streamer_write_hwi (ob, loop->safelen);
1690 streamer_write_hwi (ob, loop->force_vect);
1691 stream_write_tree (ob, loop->simduid, true);
1692 }
1693
1694 ob->main_stream = tmp_stream;
1695 }
1696
1697
1698 /* Create the header in the file using OB. If the section type is for
1699 a function, set FN to the decl for that function. */
1700
1701 void
1702 produce_asm (struct output_block *ob, tree fn)
1703 {
1704 enum lto_section_type section_type = ob->section_type;
1705 struct lto_function_header header;
1706 char *section_name;
1707 struct lto_output_stream *header_stream;
1708
1709 if (section_type == LTO_section_function_body)
1710 {
1711 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1712 section_name = lto_get_section_name (section_type, name, NULL);
1713 }
1714 else
1715 section_name = lto_get_section_name (section_type, NULL, NULL);
1716
1717 lto_begin_section (section_name, !flag_wpa);
1718 free (section_name);
1719
1720 /* The entire header is stream computed here. */
1721 memset (&header, 0, sizeof (struct lto_function_header));
1722
1723 /* Write the header. */
1724 header.lto_header.major_version = LTO_major_version;
1725 header.lto_header.minor_version = LTO_minor_version;
1726
1727 header.compressed_size = 0;
1728
1729 if (section_type == LTO_section_function_body)
1730 header.cfg_size = ob->cfg_stream->total_size;
1731 header.main_size = ob->main_stream->total_size;
1732 header.string_size = ob->string_stream->total_size;
1733
1734 header_stream = XCNEW (struct lto_output_stream);
1735 lto_output_data_stream (header_stream, &header, sizeof header);
1736 lto_write_stream (header_stream);
1737 free (header_stream);
1738
1739 /* Put all of the gimple and the string table out the asm file as a
1740 block of text. */
1741 if (section_type == LTO_section_function_body)
1742 lto_write_stream (ob->cfg_stream);
1743 lto_write_stream (ob->main_stream);
1744 lto_write_stream (ob->string_stream);
1745
1746 lto_end_section ();
1747 }
1748
1749
1750 /* Output the base body of struct function FN using output block OB. */
1751
1752 static void
1753 output_struct_function_base (struct output_block *ob, struct function *fn)
1754 {
1755 struct bitpack_d bp;
1756 unsigned i;
1757 tree t;
1758
1759 /* Output the static chain and non-local goto save area. */
1760 stream_write_tree (ob, fn->static_chain_decl, true);
1761 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1762
1763 /* Output all the local variables in the function. */
1764 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1765 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1766 stream_write_tree (ob, t, true);
1767
1768 /* Output current IL state of the function. */
1769 streamer_write_uhwi (ob, fn->curr_properties);
1770
1771 /* Write all the attributes for FN. */
1772 bp = bitpack_create (ob->main_stream);
1773 bp_pack_value (&bp, fn->is_thunk, 1);
1774 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1775 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1776 bp_pack_value (&bp, fn->returns_struct, 1);
1777 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1778 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1779 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1780 bp_pack_value (&bp, fn->after_inlining, 1);
1781 bp_pack_value (&bp, fn->stdarg, 1);
1782 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1783 bp_pack_value (&bp, fn->calls_alloca, 1);
1784 bp_pack_value (&bp, fn->calls_setjmp, 1);
1785 bp_pack_value (&bp, fn->has_force_vect_loops, 1);
1786 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1787 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1788 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1789
1790 /* Output the function start and end loci. */
1791 stream_output_location (ob, &bp, fn->function_start_locus);
1792 stream_output_location (ob, &bp, fn->function_end_locus);
1793
1794 streamer_write_bitpack (&bp);
1795 }
1796
1797
1798 /* Output the body of function NODE->DECL. */
1799
1800 static void
1801 output_function (struct cgraph_node *node)
1802 {
1803 tree function;
1804 struct function *fn;
1805 basic_block bb;
1806 struct output_block *ob;
1807
1808 function = node->decl;
1809 fn = DECL_STRUCT_FUNCTION (function);
1810 ob = create_output_block (LTO_section_function_body);
1811
1812 clear_line_info (ob);
1813 ob->cgraph_node = node;
1814
1815 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1816
1817 /* Set current_function_decl and cfun. */
1818 push_cfun (fn);
1819
1820 /* Make string 0 be a NULL string. */
1821 streamer_write_char_stream (ob->string_stream, 0);
1822
1823 streamer_write_record_start (ob, LTO_function);
1824
1825 /* Output decls for parameters and args. */
1826 stream_write_tree (ob, DECL_RESULT (function), true);
1827 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1828
1829 /* Output DECL_INITIAL for the function, which contains the tree of
1830 lexical scopes. */
1831 stream_write_tree (ob, DECL_INITIAL (function), true);
1832
1833 /* We also stream abstract functions where we stream only stuff needed for
1834 debug info. */
1835 if (gimple_has_body_p (function))
1836 {
1837 streamer_write_uhwi (ob, 1);
1838 output_struct_function_base (ob, fn);
1839
1840 /* Output all the SSA names used in the function. */
1841 output_ssa_names (ob, fn);
1842
1843 /* Output any exception handling regions. */
1844 output_eh_regions (ob, fn);
1845
1846
1847 /* We will renumber the statements. The code that does this uses
1848 the same ordering that we use for serializing them so we can use
1849 the same code on the other end and not have to write out the
1850 statement numbers. We do not assign UIDs to PHIs here because
1851 virtual PHIs get re-computed on-the-fly which would make numbers
1852 inconsistent. */
1853 set_gimple_stmt_max_uid (cfun, 0);
1854 FOR_ALL_BB (bb)
1855 {
1856 gimple_stmt_iterator gsi;
1857 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1858 {
1859 gimple stmt = gsi_stmt (gsi);
1860
1861 /* Virtual PHIs are not going to be streamed. */
1862 if (!virtual_operand_p (gimple_phi_result (stmt)))
1863 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1864 }
1865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1866 {
1867 gimple stmt = gsi_stmt (gsi);
1868 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1869 }
1870 }
1871 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1872 virtual phis now. */
1873 FOR_ALL_BB (bb)
1874 {
1875 gimple_stmt_iterator gsi;
1876 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1877 {
1878 gimple stmt = gsi_stmt (gsi);
1879 if (virtual_operand_p (gimple_phi_result (stmt)))
1880 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1881 }
1882 }
1883
1884 /* Output the code for the function. */
1885 FOR_ALL_BB_FN (bb, fn)
1886 output_bb (ob, bb, fn);
1887
1888 /* The terminator for this function. */
1889 streamer_write_record_start (ob, LTO_null);
1890
1891 output_cfg (ob, fn);
1892
1893 pop_cfun ();
1894 }
1895 else
1896 streamer_write_uhwi (ob, 0);
1897
1898 /* Create a section to hold the pickled output of this function. */
1899 produce_asm (ob, function);
1900
1901 destroy_output_block (ob);
1902 }
1903
1904
1905 /* Emit toplevel asms. */
1906
1907 void
1908 lto_output_toplevel_asms (void)
1909 {
1910 struct output_block *ob;
1911 struct asm_node *can;
1912 char *section_name;
1913 struct lto_output_stream *header_stream;
1914 struct lto_asm_header header;
1915
1916 if (! asm_nodes)
1917 return;
1918
1919 ob = create_output_block (LTO_section_asm);
1920
1921 /* Make string 0 be a NULL string. */
1922 streamer_write_char_stream (ob->string_stream, 0);
1923
1924 for (can = asm_nodes; can; can = can->next)
1925 {
1926 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1927 streamer_write_hwi (ob, can->order);
1928 }
1929
1930 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1931
1932 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1933 lto_begin_section (section_name, !flag_wpa);
1934 free (section_name);
1935
1936 /* The entire header stream is computed here. */
1937 memset (&header, 0, sizeof (header));
1938
1939 /* Write the header. */
1940 header.lto_header.major_version = LTO_major_version;
1941 header.lto_header.minor_version = LTO_minor_version;
1942
1943 header.main_size = ob->main_stream->total_size;
1944 header.string_size = ob->string_stream->total_size;
1945
1946 header_stream = XCNEW (struct lto_output_stream);
1947 lto_output_data_stream (header_stream, &header, sizeof (header));
1948 lto_write_stream (header_stream);
1949 free (header_stream);
1950
1951 /* Put all of the gimple and the string table out the asm file as a
1952 block of text. */
1953 lto_write_stream (ob->main_stream);
1954 lto_write_stream (ob->string_stream);
1955
1956 lto_end_section ();
1957
1958 destroy_output_block (ob);
1959 }
1960
1961
1962 /* Copy the function body of NODE without deserializing. */
1963
1964 static void
1965 copy_function (struct cgraph_node *node)
1966 {
1967 tree function = node->decl;
1968 struct lto_file_decl_data *file_data = node->lto_file_data;
1969 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1970 const char *data;
1971 size_t len;
1972 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1973 char *section_name =
1974 lto_get_section_name (LTO_section_function_body, name, NULL);
1975 size_t i, j;
1976 struct lto_in_decl_state *in_state;
1977 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1978
1979 lto_begin_section (section_name, !flag_wpa);
1980 free (section_name);
1981
1982 /* We may have renamed the declaration, e.g., a static function. */
1983 name = lto_get_decl_name_mapping (file_data, name);
1984
1985 data = lto_get_section_data (file_data, LTO_section_function_body,
1986 name, &len);
1987 gcc_assert (data);
1988
1989 /* Do a bit copy of the function body. */
1990 lto_output_data_stream (output_stream, data, len);
1991 lto_write_stream (output_stream);
1992
1993 /* Copy decls. */
1994 in_state =
1995 lto_get_function_in_decl_state (node->lto_file_data, function);
1996 gcc_assert (in_state);
1997
1998 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1999 {
2000 size_t n = in_state->streams[i].size;
2001 tree *trees = in_state->streams[i].trees;
2002 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2003
2004 /* The out state must have the same indices and the in state.
2005 So just copy the vector. All the encoders in the in state
2006 must be empty where we reach here. */
2007 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2008 encoder->trees.reserve_exact (n);
2009 for (j = 0; j < n; j++)
2010 encoder->trees.safe_push (trees[j]);
2011 }
2012
2013 lto_free_section_data (file_data, LTO_section_function_body, name,
2014 data, len);
2015 free (output_stream);
2016 lto_end_section ();
2017 }
2018
2019
2020 /* Main entry point from the pass manager. */
2021
2022 void
2023 lto_output (void)
2024 {
2025 struct lto_out_decl_state *decl_state;
2026 #ifdef ENABLE_CHECKING
2027 bitmap output = lto_bitmap_alloc ();
2028 #endif
2029 int i, n_nodes;
2030 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2031
2032 /* Initialize the streamer. */
2033 lto_streamer_init ();
2034
2035 n_nodes = lto_symtab_encoder_size (encoder);
2036 /* Process only the functions with bodies. */
2037 for (i = 0; i < n_nodes; i++)
2038 {
2039 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2040 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2041 if (node
2042 && lto_symtab_encoder_encode_body_p (encoder, node)
2043 && !node->alias)
2044 {
2045 #ifdef ENABLE_CHECKING
2046 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2047 bitmap_set_bit (output, DECL_UID (node->decl));
2048 #endif
2049 decl_state = lto_new_out_decl_state ();
2050 lto_push_out_decl_state (decl_state);
2051 if (gimple_has_body_p (node->decl) || !flag_wpa)
2052 output_function (node);
2053 else
2054 copy_function (node);
2055 gcc_assert (lto_get_out_decl_state () == decl_state);
2056 lto_pop_out_decl_state ();
2057 lto_record_function_out_decl_state (node->decl, decl_state);
2058 }
2059 }
2060
2061 /* Emit the callgraph after emitting function bodies. This needs to
2062 be done now to make sure that all the statements in every function
2063 have been renumbered so that edges can be associated with call
2064 statements using the statement UIDs. */
2065 output_symtab ();
2066
2067 #ifdef ENABLE_CHECKING
2068 lto_bitmap_free (output);
2069 #endif
2070 }
2071
2072 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2073 from it and required for correct representation of its semantics.
2074 Each node in ENCODER must be a global declaration or a type. A node
2075 is written only once, even if it appears multiple times in the
2076 vector. Certain transitively-reachable nodes, such as those
2077 representing expressions, may be duplicated, but such nodes
2078 must not appear in ENCODER itself. */
2079
2080 static void
2081 write_global_stream (struct output_block *ob,
2082 struct lto_tree_ref_encoder *encoder)
2083 {
2084 tree t;
2085 size_t index;
2086 const size_t size = lto_tree_ref_encoder_size (encoder);
2087
2088 for (index = 0; index < size; index++)
2089 {
2090 t = lto_tree_ref_encoder_get_tree (encoder, index);
2091 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2092 stream_write_tree (ob, t, false);
2093 }
2094 }
2095
2096
2097 /* Write a sequence of indices into the globals vector corresponding
2098 to the trees in ENCODER. These are used by the reader to map the
2099 indices used to refer to global entities within function bodies to
2100 their referents. */
2101
2102 static void
2103 write_global_references (struct output_block *ob,
2104 struct lto_output_stream *ref_stream,
2105 struct lto_tree_ref_encoder *encoder)
2106 {
2107 tree t;
2108 uint32_t index;
2109 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2110
2111 /* Write size as 32-bit unsigned. */
2112 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2113
2114 for (index = 0; index < size; index++)
2115 {
2116 uint32_t slot_num;
2117
2118 t = lto_tree_ref_encoder_get_tree (encoder, index);
2119 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2120 gcc_assert (slot_num != (unsigned)-1);
2121 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2122 }
2123 }
2124
2125
2126 /* Write all the streams in an lto_out_decl_state STATE using
2127 output block OB and output stream OUT_STREAM. */
2128
2129 void
2130 lto_output_decl_state_streams (struct output_block *ob,
2131 struct lto_out_decl_state *state)
2132 {
2133 int i;
2134
2135 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2136 write_global_stream (ob, &state->streams[i]);
2137 }
2138
2139
2140 /* Write all the references in an lto_out_decl_state STATE using
2141 output block OB and output stream OUT_STREAM. */
2142
2143 void
2144 lto_output_decl_state_refs (struct output_block *ob,
2145 struct lto_output_stream *out_stream,
2146 struct lto_out_decl_state *state)
2147 {
2148 unsigned i;
2149 uint32_t ref;
2150 tree decl;
2151
2152 /* Write reference to FUNCTION_DECL. If there is not function,
2153 write reference to void_type_node. */
2154 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2155 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2156 gcc_assert (ref != (unsigned)-1);
2157 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2158
2159 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2160 write_global_references (ob, out_stream, &state->streams[i]);
2161 }
2162
2163
2164 /* Return the written size of STATE. */
2165
2166 static size_t
2167 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2168 {
2169 int i;
2170 size_t size;
2171
2172 size = sizeof (int32_t); /* fn_ref. */
2173 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2174 {
2175 size += sizeof (int32_t); /* vector size. */
2176 size += (lto_tree_ref_encoder_size (&state->streams[i])
2177 * sizeof (int32_t));
2178 }
2179 return size;
2180 }
2181
2182
2183 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2184 so far. */
2185
2186 static void
2187 write_symbol (struct streamer_tree_cache_d *cache,
2188 struct lto_output_stream *stream,
2189 tree t, struct pointer_set_t *seen, bool alias)
2190 {
2191 const char *name;
2192 enum gcc_plugin_symbol_kind kind;
2193 enum gcc_plugin_symbol_visibility visibility;
2194 unsigned slot_num;
2195 unsigned HOST_WIDEST_INT size;
2196 const char *comdat;
2197 unsigned char c;
2198
2199 /* None of the following kinds of symbols are needed in the
2200 symbol table. */
2201 if (!TREE_PUBLIC (t)
2202 || is_builtin_fn (t)
2203 || DECL_ABSTRACT (t)
2204 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2205 return;
2206 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2207
2208 gcc_assert (TREE_CODE (t) == VAR_DECL
2209 || TREE_CODE (t) == FUNCTION_DECL);
2210
2211 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2212
2213 /* This behaves like assemble_name_raw in varasm.c, performing the
2214 same name manipulations that ASM_OUTPUT_LABELREF does. */
2215 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2216
2217 if (pointer_set_contains (seen, name))
2218 return;
2219 pointer_set_insert (seen, name);
2220
2221 streamer_tree_cache_lookup (cache, t, &slot_num);
2222 gcc_assert (slot_num != (unsigned)-1);
2223
2224 if (DECL_EXTERNAL (t))
2225 {
2226 if (DECL_WEAK (t))
2227 kind = GCCPK_WEAKUNDEF;
2228 else
2229 kind = GCCPK_UNDEF;
2230 }
2231 else
2232 {
2233 if (DECL_WEAK (t))
2234 kind = GCCPK_WEAKDEF;
2235 else if (DECL_COMMON (t))
2236 kind = GCCPK_COMMON;
2237 else
2238 kind = GCCPK_DEF;
2239
2240 /* When something is defined, it should have node attached. */
2241 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2242 || varpool_get_node (t)->definition);
2243 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2244 || (cgraph_get_node (t)
2245 && cgraph_get_node (t)->definition));
2246 }
2247
2248 /* Imitate what default_elf_asm_output_external do.
2249 When symbol is external, we need to output it with DEFAULT visibility
2250 when compiling with -fvisibility=default, while with HIDDEN visibility
2251 when symbol has attribute (visibility("hidden")) specified.
2252 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2253 right. */
2254
2255 if (DECL_EXTERNAL (t)
2256 && !targetm.binds_local_p (t))
2257 visibility = GCCPV_DEFAULT;
2258 else
2259 switch (DECL_VISIBILITY (t))
2260 {
2261 case VISIBILITY_DEFAULT:
2262 visibility = GCCPV_DEFAULT;
2263 break;
2264 case VISIBILITY_PROTECTED:
2265 visibility = GCCPV_PROTECTED;
2266 break;
2267 case VISIBILITY_HIDDEN:
2268 visibility = GCCPV_HIDDEN;
2269 break;
2270 case VISIBILITY_INTERNAL:
2271 visibility = GCCPV_INTERNAL;
2272 break;
2273 }
2274
2275 if (kind == GCCPK_COMMON
2276 && DECL_SIZE_UNIT (t)
2277 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2278 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2279 else
2280 size = 0;
2281
2282 if (DECL_ONE_ONLY (t))
2283 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2284 else
2285 comdat = "";
2286
2287 lto_output_data_stream (stream, name, strlen (name) + 1);
2288 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2289 c = (unsigned char) kind;
2290 lto_output_data_stream (stream, &c, 1);
2291 c = (unsigned char) visibility;
2292 lto_output_data_stream (stream, &c, 1);
2293 lto_output_data_stream (stream, &size, 8);
2294 lto_output_data_stream (stream, &slot_num, 4);
2295 }
2296
2297 /* Return true if NODE should appear in the plugin symbol table. */
2298
2299 bool
2300 output_symbol_p (symtab_node *node)
2301 {
2302 struct cgraph_node *cnode;
2303 if (!symtab_real_symbol_p (node))
2304 return false;
2305 /* We keep external functions in symtab for sake of inlining
2306 and devirtualization. We do not want to see them in symbol table as
2307 references unless they are really used. */
2308 cnode = dyn_cast <cgraph_node> (node);
2309 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2310 && cnode->callers)
2311 return true;
2312
2313 /* Ignore all references from external vars initializers - they are not really
2314 part of the compilation unit until they are used by folding. Some symbols,
2315 like references to external construction vtables can not be referred to at all.
2316 We decide this at can_refer_decl_in_current_unit_p. */
2317 if (!node->definition || DECL_EXTERNAL (node->decl))
2318 {
2319 int i;
2320 struct ipa_ref *ref;
2321 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2322 i, ref); i++)
2323 {
2324 if (ref->use == IPA_REF_ALIAS)
2325 continue;
2326 if (is_a <cgraph_node> (ref->referring))
2327 return true;
2328 if (!DECL_EXTERNAL (ref->referring->decl))
2329 return true;
2330 }
2331 return false;
2332 }
2333 return true;
2334 }
2335
2336
2337 /* Write an IL symbol table to OB.
2338 SET and VSET are cgraph/varpool node sets we are outputting. */
2339
2340 static void
2341 produce_symtab (struct output_block *ob)
2342 {
2343 struct streamer_tree_cache_d *cache = ob->writer_cache;
2344 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2345 struct pointer_set_t *seen;
2346 struct lto_output_stream stream;
2347 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2348 lto_symtab_encoder_iterator lsei;
2349
2350 lto_begin_section (section_name, false);
2351 free (section_name);
2352
2353 seen = pointer_set_create ();
2354 memset (&stream, 0, sizeof (stream));
2355
2356 /* Write the symbol table.
2357 First write everything defined and then all declarations.
2358 This is necessary to handle cases where we have duplicated symbols. */
2359 for (lsei = lsei_start (encoder);
2360 !lsei_end_p (lsei); lsei_next (&lsei))
2361 {
2362 symtab_node *node = lsei_node (lsei);
2363
2364 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2365 continue;
2366 write_symbol (cache, &stream, node->decl, seen, false);
2367 }
2368 for (lsei = lsei_start (encoder);
2369 !lsei_end_p (lsei); lsei_next (&lsei))
2370 {
2371 symtab_node *node = lsei_node (lsei);
2372
2373 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2374 continue;
2375 write_symbol (cache, &stream, node->decl, seen, false);
2376 }
2377
2378 lto_write_stream (&stream);
2379 pointer_set_destroy (seen);
2380
2381 lto_end_section ();
2382 }
2383
2384
2385 /* This pass is run after all of the functions are serialized and all
2386 of the IPA passes have written their serialized forms. This pass
2387 causes the vector of all of the global decls and types used from
2388 this file to be written in to a section that can then be read in to
2389 recover these on other side. */
2390
2391 void
2392 produce_asm_for_decls (void)
2393 {
2394 struct lto_out_decl_state *out_state;
2395 struct lto_out_decl_state *fn_out_state;
2396 struct lto_decl_header header;
2397 char *section_name;
2398 struct output_block *ob;
2399 struct lto_output_stream *header_stream, *decl_state_stream;
2400 unsigned idx, num_fns;
2401 size_t decl_state_size;
2402 int32_t num_decl_states;
2403
2404 ob = create_output_block (LTO_section_decls);
2405 ob->global = true;
2406
2407 memset (&header, 0, sizeof (struct lto_decl_header));
2408
2409 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2410 lto_begin_section (section_name, !flag_wpa);
2411 free (section_name);
2412
2413 /* Make string 0 be a NULL string. */
2414 streamer_write_char_stream (ob->string_stream, 0);
2415
2416 gcc_assert (!alias_pairs);
2417
2418 /* Write the global symbols. */
2419 out_state = lto_get_out_decl_state ();
2420 num_fns = lto_function_decl_states.length ();
2421 lto_output_decl_state_streams (ob, out_state);
2422 for (idx = 0; idx < num_fns; idx++)
2423 {
2424 fn_out_state =
2425 lto_function_decl_states[idx];
2426 lto_output_decl_state_streams (ob, fn_out_state);
2427 }
2428
2429 header.lto_header.major_version = LTO_major_version;
2430 header.lto_header.minor_version = LTO_minor_version;
2431
2432 /* Currently not used. This field would allow us to preallocate
2433 the globals vector, so that it need not be resized as it is extended. */
2434 header.num_nodes = -1;
2435
2436 /* Compute the total size of all decl out states. */
2437 decl_state_size = sizeof (int32_t);
2438 decl_state_size += lto_out_decl_state_written_size (out_state);
2439 for (idx = 0; idx < num_fns; idx++)
2440 {
2441 fn_out_state =
2442 lto_function_decl_states[idx];
2443 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2444 }
2445 header.decl_state_size = decl_state_size;
2446
2447 header.main_size = ob->main_stream->total_size;
2448 header.string_size = ob->string_stream->total_size;
2449
2450 header_stream = XCNEW (struct lto_output_stream);
2451 lto_output_data_stream (header_stream, &header, sizeof header);
2452 lto_write_stream (header_stream);
2453 free (header_stream);
2454
2455 /* Write the main out-decl state, followed by out-decl states of
2456 functions. */
2457 decl_state_stream = XCNEW (struct lto_output_stream);
2458 num_decl_states = num_fns + 1;
2459 lto_output_data_stream (decl_state_stream, &num_decl_states,
2460 sizeof (num_decl_states));
2461 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2462 for (idx = 0; idx < num_fns; idx++)
2463 {
2464 fn_out_state =
2465 lto_function_decl_states[idx];
2466 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2467 }
2468 lto_write_stream (decl_state_stream);
2469 free (decl_state_stream);
2470
2471 lto_write_stream (ob->main_stream);
2472 lto_write_stream (ob->string_stream);
2473
2474 lto_end_section ();
2475
2476 /* Write the symbol table. It is used by linker to determine dependencies
2477 and thus we can skip it for WPA. */
2478 if (!flag_wpa)
2479 produce_symtab (ob);
2480
2481 /* Write command line opts. */
2482 lto_write_options ();
2483
2484 /* Deallocate memory and clean up. */
2485 for (idx = 0; idx < num_fns; idx++)
2486 {
2487 fn_out_state =
2488 lto_function_decl_states[idx];
2489 lto_delete_out_decl_state (fn_out_state);
2490 }
2491 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2492 lto_function_decl_states.release ();
2493 destroy_output_block (ob);
2494 }