lto.c (read_cgraph_and_symbols): Do not push DECL_INIT_IO timevar
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55 #include "builtins.h"
56
57
58 static void lto_write_tree (struct output_block*, tree, bool);
59
60 /* Clear the line info stored in DATA_IN. */
61
62 static void
63 clear_line_info (struct output_block *ob)
64 {
65 ob->current_file = NULL;
66 ob->current_line = 0;
67 ob->current_col = 0;
68 }
69
70
71 /* Create the output block and return it. SECTION_TYPE is
72 LTO_section_function_body or LTO_static_initializer. */
73
74 struct output_block *
75 create_output_block (enum lto_section_type section_type)
76 {
77 struct output_block *ob = XCNEW (struct output_block);
78
79 ob->section_type = section_type;
80 ob->decl_state = lto_get_out_decl_state ();
81 ob->main_stream = XCNEW (struct lto_output_stream);
82 ob->string_stream = XCNEW (struct lto_output_stream);
83 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
84
85 if (section_type == LTO_section_function_body)
86 ob->cfg_stream = XCNEW (struct lto_output_stream);
87
88 clear_line_info (ob);
89
90 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
91 gcc_obstack_init (&ob->obstack);
92
93 return ob;
94 }
95
96
97 /* Destroy the output block OB. */
98
99 void
100 destroy_output_block (struct output_block *ob)
101 {
102 enum lto_section_type section_type = ob->section_type;
103
104 delete ob->string_hash_table;
105 ob->string_hash_table = NULL;
106
107 free (ob->main_stream);
108 free (ob->string_stream);
109 if (section_type == LTO_section_function_body)
110 free (ob->cfg_stream);
111
112 streamer_tree_cache_delete (ob->writer_cache);
113 obstack_free (&ob->obstack, NULL);
114
115 free (ob);
116 }
117
118
119 /* Look up NODE in the type table and write the index for it to OB. */
120
121 static void
122 output_type_ref (struct output_block *ob, tree node)
123 {
124 streamer_write_record_start (ob, LTO_type_ref);
125 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
126 }
127
128
129 /* Return true if tree node T is written to various tables. For these
130 nodes, we sometimes want to write their phyiscal representation
131 (via lto_output_tree), and sometimes we need to emit an index
132 reference into a table (via lto_output_tree_ref). */
133
134 static bool
135 tree_is_indexable (tree t)
136 {
137 /* Parameters and return values of functions of variably modified types
138 must go to global stream, because they may be used in the type
139 definition. */
140 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
141 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
142 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
143 || TREE_CODE (t) == TYPE_DECL
144 || TREE_CODE (t) == CONST_DECL
145 || TREE_CODE (t) == NAMELIST_DECL)
146 && decl_function_context (t))
147 return false;
148 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
149 return false;
150 /* Variably modified types need to be streamed alongside function
151 bodies because they can refer to local entities. Together with
152 them we have to localize their members as well.
153 ??? In theory that includes non-FIELD_DECLs as well. */
154 else if (TYPE_P (t)
155 && variably_modified_type_p (t, NULL_TREE))
156 return false;
157 else if (TREE_CODE (t) == FIELD_DECL
158 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
159 return false;
160 else
161 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
162 }
163
164
165 /* Output info about new location into bitpack BP.
166 After outputting bitpack, lto_output_location_data has
167 to be done to output actual data. */
168
169 void
170 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
171 location_t loc)
172 {
173 expanded_location xloc;
174
175 loc = LOCATION_LOCUS (loc);
176 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
177 if (loc == UNKNOWN_LOCATION)
178 return;
179
180 xloc = expand_location (loc);
181
182 bp_pack_value (bp, ob->current_file != xloc.file, 1);
183 bp_pack_value (bp, ob->current_line != xloc.line, 1);
184 bp_pack_value (bp, ob->current_col != xloc.column, 1);
185
186 if (ob->current_file != xloc.file)
187 bp_pack_var_len_unsigned (bp,
188 streamer_string_index (ob, xloc.file,
189 strlen (xloc.file) + 1,
190 true));
191 ob->current_file = xloc.file;
192
193 if (ob->current_line != xloc.line)
194 bp_pack_var_len_unsigned (bp, xloc.line);
195 ob->current_line = xloc.line;
196
197 if (ob->current_col != xloc.column)
198 bp_pack_var_len_unsigned (bp, xloc.column);
199 ob->current_col = xloc.column;
200 }
201
202
203 /* If EXPR is an indexable tree node, output a reference to it to
204 output block OB. Otherwise, output the physical representation of
205 EXPR to OB. */
206
207 static void
208 lto_output_tree_ref (struct output_block *ob, tree expr)
209 {
210 enum tree_code code;
211
212 if (TYPE_P (expr))
213 {
214 output_type_ref (ob, expr);
215 return;
216 }
217
218 code = TREE_CODE (expr);
219 switch (code)
220 {
221 case SSA_NAME:
222 streamer_write_record_start (ob, LTO_ssa_name_ref);
223 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
224 break;
225
226 case FIELD_DECL:
227 streamer_write_record_start (ob, LTO_field_decl_ref);
228 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case FUNCTION_DECL:
232 streamer_write_record_start (ob, LTO_function_decl_ref);
233 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case VAR_DECL:
237 case DEBUG_EXPR_DECL:
238 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
239 case PARM_DECL:
240 streamer_write_record_start (ob, LTO_global_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case CONST_DECL:
245 streamer_write_record_start (ob, LTO_const_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case IMPORTED_DECL:
250 gcc_assert (decl_function_context (expr) == NULL);
251 streamer_write_record_start (ob, LTO_imported_decl_ref);
252 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
254
255 case TYPE_DECL:
256 streamer_write_record_start (ob, LTO_type_decl_ref);
257 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case NAMELIST_DECL:
261 streamer_write_record_start (ob, LTO_namelist_decl_ref);
262 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMESPACE_DECL:
266 streamer_write_record_start (ob, LTO_namespace_decl_ref);
267 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case LABEL_DECL:
271 streamer_write_record_start (ob, LTO_label_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case RESULT_DECL:
276 streamer_write_record_start (ob, LTO_result_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case TRANSLATION_UNIT_DECL:
281 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 default:
286 /* No other node is indexable, so it should have been handled by
287 lto_output_tree. */
288 gcc_unreachable ();
289 }
290 }
291
292
293 /* Return true if EXPR is a tree node that can be written to disk. */
294
295 static inline bool
296 lto_is_streamable (tree expr)
297 {
298 enum tree_code code = TREE_CODE (expr);
299
300 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
301 name version in lto_output_tree_ref (see output_ssa_names). */
302 return !is_lang_specific (expr)
303 && code != SSA_NAME
304 && code != CALL_EXPR
305 && code != LANG_TYPE
306 && code != MODIFY_EXPR
307 && code != INIT_EXPR
308 && code != TARGET_EXPR
309 && code != BIND_EXPR
310 && code != WITH_CLEANUP_EXPR
311 && code != STATEMENT_LIST
312 && (code == CASE_LABEL_EXPR
313 || code == DECL_EXPR
314 || TREE_CODE_CLASS (code) != tcc_statement);
315 }
316
317
318 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
319
320 static tree
321 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
322 {
323 gcc_checking_assert (DECL_P (expr)
324 && TREE_CODE (expr) != FUNCTION_DECL
325 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
326
327 /* Handle DECL_INITIAL for symbols. */
328 tree initial = DECL_INITIAL (expr);
329 if (TREE_CODE (expr) == VAR_DECL
330 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
331 && !DECL_IN_CONSTANT_POOL (expr)
332 && initial)
333 {
334 varpool_node *vnode;
335 /* Extra section needs about 30 bytes; do not produce it for simple
336 scalar values. */
337 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
338 || !(vnode = varpool_get_node (expr))
339 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
340 initial = error_mark_node;
341 }
342
343 return initial;
344 }
345
346
347 /* Write a physical representation of tree node EXPR to output block
348 OB. If REF_P is true, the leaves of EXPR are emitted as references
349 via lto_output_tree_ref. IX is the index into the streamer cache
350 where EXPR is stored. */
351
352 static void
353 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
354 {
355 /* Pack all the non-pointer fields in EXPR into a bitpack and write
356 the resulting bitpack. */
357 bitpack_d bp = bitpack_create (ob->main_stream);
358 streamer_pack_tree_bitfields (ob, &bp, expr);
359 streamer_write_bitpack (&bp);
360
361 /* Write all the pointer fields in EXPR. */
362 streamer_write_tree_body (ob, expr, ref_p);
363
364 /* Write any LTO-specific data to OB. */
365 if (DECL_P (expr)
366 && TREE_CODE (expr) != FUNCTION_DECL
367 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
368 {
369 /* Handle DECL_INITIAL for symbols. */
370 tree initial = get_symbol_initial_value
371 (ob->decl_state->symtab_node_encoder, expr);
372 stream_write_tree (ob, initial, ref_p);
373 }
374 }
375
376 /* Write a physical representation of tree node EXPR to output block
377 OB. If REF_P is true, the leaves of EXPR are emitted as references
378 via lto_output_tree_ref. IX is the index into the streamer cache
379 where EXPR is stored. */
380
381 static void
382 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
383 {
384 if (!lto_is_streamable (expr))
385 internal_error ("tree code %qs is not supported in LTO streams",
386 get_tree_code_name (TREE_CODE (expr)));
387
388 /* Write the header, containing everything needed to materialize
389 EXPR on the reading side. */
390 streamer_write_tree_header (ob, expr);
391
392 lto_write_tree_1 (ob, expr, ref_p);
393
394 /* Mark the end of EXPR. */
395 streamer_write_zero (ob);
396 }
397
398 /* Emit the physical representation of tree node EXPR to output block
399 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
400 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
401
402 static void
403 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
404 bool ref_p, bool this_ref_p)
405 {
406 unsigned ix;
407
408 gcc_checking_assert (expr != NULL_TREE
409 && !(this_ref_p && tree_is_indexable (expr)));
410
411 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
412 expr, hash, &ix);
413 gcc_assert (!exists_p);
414 if (streamer_handle_as_builtin_p (expr))
415 {
416 /* MD and NORMAL builtins do not need to be written out
417 completely as they are always instantiated by the
418 compiler on startup. The only builtins that need to
419 be written out are BUILT_IN_FRONTEND. For all other
420 builtins, we simply write the class and code. */
421 streamer_write_builtin (ob, expr);
422 }
423 else if (TREE_CODE (expr) == INTEGER_CST
424 && !TREE_OVERFLOW (expr))
425 {
426 /* Shared INTEGER_CST nodes are special because they need their
427 original type to be materialized by the reader (to implement
428 TYPE_CACHED_VALUES). */
429 streamer_write_integer_cst (ob, expr, ref_p);
430 }
431 else
432 {
433 /* This is the first time we see EXPR, write its fields
434 to OB. */
435 lto_write_tree (ob, expr, ref_p);
436 }
437 }
438
439 struct sccs
440 {
441 unsigned int dfsnum;
442 unsigned int low;
443 };
444
445 struct scc_entry
446 {
447 tree t;
448 hashval_t hash;
449 };
450
451 static unsigned int next_dfs_num;
452 static vec<scc_entry> sccstack;
453 static struct pointer_map_t *sccstate;
454 static struct obstack sccstate_obstack;
455
456 static void
457 DFS_write_tree (struct output_block *ob, sccs *from_state,
458 tree expr, bool ref_p, bool this_ref_p);
459
460 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
461 DFS recurse for all tree edges originating from it. */
462
463 static void
464 DFS_write_tree_body (struct output_block *ob,
465 tree expr, sccs *expr_state, bool ref_p)
466 {
467 #define DFS_follow_tree_edge(DEST) \
468 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
469
470 enum tree_code code;
471
472 code = TREE_CODE (expr);
473
474 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
475 {
476 if (TREE_CODE (expr) != IDENTIFIER_NODE)
477 DFS_follow_tree_edge (TREE_TYPE (expr));
478 }
479
480 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
481 {
482 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
483 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
484 }
485
486 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
487 {
488 DFS_follow_tree_edge (TREE_REALPART (expr));
489 DFS_follow_tree_edge (TREE_IMAGPART (expr));
490 }
491
492 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
493 {
494 /* Drop names that were created for anonymous entities. */
495 if (DECL_NAME (expr)
496 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
497 && ANON_AGGRNAME_P (DECL_NAME (expr)))
498 ;
499 else
500 DFS_follow_tree_edge (DECL_NAME (expr));
501 DFS_follow_tree_edge (DECL_CONTEXT (expr));
502 }
503
504 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
505 {
506 DFS_follow_tree_edge (DECL_SIZE (expr));
507 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
508
509 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
510 special handling in LTO, it must be handled by streamer hooks. */
511
512 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
513
514 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
515 for early inlining so drop it on the floor instead of ICEing in
516 dwarf2out.c. */
517
518 if ((TREE_CODE (expr) == VAR_DECL
519 || TREE_CODE (expr) == PARM_DECL)
520 && DECL_HAS_VALUE_EXPR_P (expr))
521 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
522 if (TREE_CODE (expr) == VAR_DECL)
523 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
524 }
525
526 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
527 {
528 if (TREE_CODE (expr) == TYPE_DECL)
529 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
533 {
534 /* Make sure we don't inadvertently set the assembler name. */
535 if (DECL_ASSEMBLER_NAME_SET_P (expr))
536 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
540 {
541 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
542 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
543 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
544 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
545 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
546 }
547
548 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
549 {
550 DFS_follow_tree_edge (DECL_VINDEX (expr));
551 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
552 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
553 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
554 }
555
556 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
557 {
558 DFS_follow_tree_edge (TYPE_SIZE (expr));
559 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
560 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
561 DFS_follow_tree_edge (TYPE_NAME (expr));
562 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
563 reconstructed during fixup. */
564 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
565 during fixup. */
566 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
567 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
568 /* TYPE_CANONICAL is re-computed during type merging, so no need
569 to follow it here. */
570 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
571 }
572
573 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
574 {
575 if (TREE_CODE (expr) == ENUMERAL_TYPE)
576 DFS_follow_tree_edge (TYPE_VALUES (expr));
577 else if (TREE_CODE (expr) == ARRAY_TYPE)
578 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
579 else if (RECORD_OR_UNION_TYPE_P (expr))
580 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
581 DFS_follow_tree_edge (t);
582 else if (TREE_CODE (expr) == FUNCTION_TYPE
583 || TREE_CODE (expr) == METHOD_TYPE)
584 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
585
586 if (!POINTER_TYPE_P (expr))
587 DFS_follow_tree_edge (TYPE_MINVAL (expr));
588 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
589 if (RECORD_OR_UNION_TYPE_P (expr))
590 DFS_follow_tree_edge (TYPE_BINFO (expr));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
594 {
595 DFS_follow_tree_edge (TREE_PURPOSE (expr));
596 DFS_follow_tree_edge (TREE_VALUE (expr));
597 DFS_follow_tree_edge (TREE_CHAIN (expr));
598 }
599
600 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
601 {
602 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
603 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
604 }
605
606 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
607 {
608 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
609 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
610 DFS_follow_tree_edge (TREE_BLOCK (expr));
611 }
612
613 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
614 {
615 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
616 /* ??? FIXME. See also streamer_write_chain. */
617 if (!(VAR_OR_FUNCTION_DECL_P (t)
618 && DECL_EXTERNAL (t)))
619 DFS_follow_tree_edge (t);
620
621 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
622
623 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
624 handle - those that represent inlined function scopes.
625 For the drop rest them on the floor instead of ICEing
626 in dwarf2out.c. */
627 if (inlined_function_outer_scope_p (expr))
628 {
629 tree ultimate_origin = block_ultimate_origin (expr);
630 DFS_follow_tree_edge (ultimate_origin);
631 }
632 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
633 information for early inlined BLOCKs so drop it on the floor instead
634 of ICEing in dwarf2out.c. */
635
636 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
637 streaming time. */
638
639 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
640 list is re-constructed from BLOCK_SUPERCONTEXT. */
641 }
642
643 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
644 {
645 unsigned i;
646 tree t;
647
648 /* Note that the number of BINFO slots has already been emitted in
649 EXPR's header (see streamer_write_tree_header) because this length
650 is needed to build the empty BINFO node on the reader side. */
651 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
652 DFS_follow_tree_edge (t);
653 DFS_follow_tree_edge (BINFO_OFFSET (expr));
654 DFS_follow_tree_edge (BINFO_VTABLE (expr));
655 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
656
657 /* The number of BINFO_BASE_ACCESSES has already been emitted in
658 EXPR's bitfield section. */
659 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
660 DFS_follow_tree_edge (t);
661
662 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
663 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
664 }
665
666 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
667 {
668 unsigned i;
669 tree index, value;
670
671 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
672 {
673 DFS_follow_tree_edge (index);
674 DFS_follow_tree_edge (value);
675 }
676 }
677
678 if (code == OMP_CLAUSE)
679 {
680 int i;
681 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
682 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
683 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
684 }
685
686 #undef DFS_follow_tree_edge
687 }
688
689 /* Return a hash value for the tree T. */
690
691 static hashval_t
692 hash_tree (struct streamer_tree_cache_d *cache, tree t)
693 {
694 #define visit(SIBLING) \
695 do { \
696 unsigned ix; \
697 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
698 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
699 } while (0)
700
701 /* Hash TS_BASE. */
702 enum tree_code code = TREE_CODE (t);
703 hashval_t v = iterative_hash_host_wide_int (code, 0);
704 if (!TYPE_P (t))
705 {
706 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
707 | (TREE_CONSTANT (t) << 1)
708 | (TREE_READONLY (t) << 2)
709 | (TREE_PUBLIC (t) << 3), v);
710 }
711 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
712 | (TREE_THIS_VOLATILE (t) << 1), v);
713 if (DECL_P (t))
714 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
715 else if (TYPE_P (t))
716 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
717 if (TYPE_P (t))
718 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
719 else
720 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
721 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
722 | (TREE_STATIC (t) << 1)
723 | (TREE_PROTECTED (t) << 2)
724 | (TREE_DEPRECATED (t) << 3), v);
725 if (code != TREE_BINFO)
726 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
727 if (TYPE_P (t))
728 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
729 | (TYPE_ADDR_SPACE (t) << 1), v);
730 else if (code == SSA_NAME)
731 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
732
733 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
734 {
735 int i;
736 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
737 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
738 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
739 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
740 }
741
742 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
743 {
744 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
745 v = iterative_hash_host_wide_int (r.cl, v);
746 v = iterative_hash_host_wide_int (r.decimal
747 | (r.sign << 1)
748 | (r.signalling << 2)
749 | (r.canonical << 3), v);
750 v = iterative_hash_host_wide_int (r.uexp, v);
751 for (unsigned i = 0; i < SIGSZ; ++i)
752 v = iterative_hash_host_wide_int (r.sig[i], v);
753 }
754
755 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
756 {
757 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
758 v = iterative_hash_host_wide_int (f.mode, v);
759 v = iterative_hash_host_wide_int (f.data.low, v);
760 v = iterative_hash_host_wide_int (f.data.high, v);
761 }
762
763 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
764 {
765 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
766 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
767 | (DECL_VIRTUAL_P (t) << 1)
768 | (DECL_IGNORED_P (t) << 2)
769 | (DECL_ABSTRACT (t) << 3)
770 | (DECL_ARTIFICIAL (t) << 4)
771 | (DECL_USER_ALIGN (t) << 5)
772 | (DECL_PRESERVE_P (t) << 6)
773 | (DECL_EXTERNAL (t) << 7)
774 | (DECL_GIMPLE_REG_P (t) << 8), v);
775 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
776 if (code == LABEL_DECL)
777 {
778 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
779 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
780 }
781 else if (code == FIELD_DECL)
782 {
783 v = iterative_hash_host_wide_int (DECL_PACKED (t)
784 | (DECL_NONADDRESSABLE_P (t) << 1),
785 v);
786 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
787 }
788 else if (code == VAR_DECL)
789 {
790 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
791 | (DECL_NONLOCAL_FRAME (t) << 1),
792 v);
793 }
794 if (code == RESULT_DECL
795 || code == PARM_DECL
796 || code == VAR_DECL)
797 {
798 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
799 if (code == VAR_DECL
800 || code == PARM_DECL)
801 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
802 }
803 }
804
805 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
806 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
807
808 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
809 {
810 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
811 | (DECL_DLLIMPORT_P (t) << 1)
812 | (DECL_WEAK (t) << 2)
813 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
814 | (DECL_COMDAT (t) << 4)
815 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
816 v);
817 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
818 if (code == VAR_DECL)
819 {
820 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
821 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
822 | (DECL_IN_CONSTANT_POOL (t) << 1),
823 v);
824 }
825 if (TREE_CODE (t) == FUNCTION_DECL)
826 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
827 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
828 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
829 v);
830 }
831
832 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
833 {
834 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
835 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
836 | (DECL_STATIC_DESTRUCTOR (t) << 1)
837 | (DECL_UNINLINABLE (t) << 2)
838 | (DECL_POSSIBLY_INLINED (t) << 3)
839 | (DECL_IS_NOVOPS (t) << 4)
840 | (DECL_IS_RETURNS_TWICE (t) << 5)
841 | (DECL_IS_MALLOC (t) << 6)
842 | (DECL_IS_OPERATOR_NEW (t) << 7)
843 | (DECL_DECLARED_INLINE_P (t) << 8)
844 | (DECL_STATIC_CHAIN (t) << 9)
845 | (DECL_NO_INLINE_WARNING_P (t) << 10)
846 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
847 | (DECL_NO_LIMIT_STACK (t) << 12)
848 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
849 | (DECL_PURE_P (t) << 14)
850 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
851 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
852 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
853 }
854
855 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
856 {
857 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
858 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
859 | (TYPE_NO_FORCE_BLK (t) << 1)
860 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
861 | (TYPE_PACKED (t) << 3)
862 | (TYPE_RESTRICT (t) << 4)
863 | (TYPE_USER_ALIGN (t) << 5)
864 | (TYPE_READONLY (t) << 6), v);
865 if (RECORD_OR_UNION_TYPE_P (t))
866 {
867 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
868 | (TYPE_FINAL_P (t) << 1), v);
869 }
870 else if (code == ARRAY_TYPE)
871 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
872 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
873 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
874 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
875 || (!in_lto_p
876 && get_alias_set (t) == 0))
877 ? 0 : -1, v);
878 }
879
880 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
881 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
882 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
883
884 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
885 gcc_unreachable ();
886
887 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
888 v = iterative_hash (t, sizeof (struct cl_optimization), v);
889
890 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
891 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
892
893 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
894 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
895
896 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
897 {
898 if (POINTER_TYPE_P (t))
899 {
900 /* For pointers factor in the pointed-to type recursively as
901 we cannot recurse through only pointers.
902 ??? We can generalize this by keeping track of the
903 in-SCC edges for each tree (or arbitrarily the first
904 such edge) and hashing that in in a second stage
905 (instead of the quadratic mixing of the SCC we do now). */
906 hashval_t x;
907 unsigned ix;
908 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
909 x = streamer_tree_cache_get_hash (cache, ix);
910 else
911 x = hash_tree (cache, TREE_TYPE (t));
912 v = iterative_hash_hashval_t (x, v);
913 }
914 else if (code != IDENTIFIER_NODE)
915 visit (TREE_TYPE (t));
916 }
917
918 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
919 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
920 visit (VECTOR_CST_ELT (t, i));
921
922 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
923 {
924 visit (TREE_REALPART (t));
925 visit (TREE_IMAGPART (t));
926 }
927
928 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
929 {
930 /* Drop names that were created for anonymous entities. */
931 if (DECL_NAME (t)
932 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
933 && ANON_AGGRNAME_P (DECL_NAME (t)))
934 ;
935 else
936 visit (DECL_NAME (t));
937 if (DECL_FILE_SCOPE_P (t))
938 ;
939 else
940 visit (DECL_CONTEXT (t));
941 }
942
943 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
944 {
945 visit (DECL_SIZE (t));
946 visit (DECL_SIZE_UNIT (t));
947 visit (DECL_ATTRIBUTES (t));
948 if ((code == VAR_DECL
949 || code == PARM_DECL)
950 && DECL_HAS_VALUE_EXPR_P (t))
951 visit (DECL_VALUE_EXPR (t));
952 if (code == VAR_DECL
953 && DECL_HAS_DEBUG_EXPR_P (t))
954 visit (DECL_DEBUG_EXPR (t));
955 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
956 be able to call get_symbol_initial_value. */
957 }
958
959 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
960 {
961 if (code == TYPE_DECL)
962 visit (DECL_ORIGINAL_TYPE (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
966 {
967 if (DECL_ASSEMBLER_NAME_SET_P (t))
968 visit (DECL_ASSEMBLER_NAME (t));
969 }
970
971 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
972 {
973 visit (DECL_FIELD_OFFSET (t));
974 visit (DECL_BIT_FIELD_TYPE (t));
975 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
976 visit (DECL_FIELD_BIT_OFFSET (t));
977 visit (DECL_FCONTEXT (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
981 {
982 visit (DECL_VINDEX (t));
983 visit (DECL_FUNCTION_PERSONALITY (t));
984 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
985 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
986 }
987
988 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
989 {
990 visit (TYPE_SIZE (t));
991 visit (TYPE_SIZE_UNIT (t));
992 visit (TYPE_ATTRIBUTES (t));
993 visit (TYPE_NAME (t));
994 visit (TYPE_MAIN_VARIANT (t));
995 if (TYPE_FILE_SCOPE_P (t))
996 ;
997 else
998 visit (TYPE_CONTEXT (t));
999 visit (TYPE_STUB_DECL (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1003 {
1004 if (code == ENUMERAL_TYPE)
1005 visit (TYPE_VALUES (t));
1006 else if (code == ARRAY_TYPE)
1007 visit (TYPE_DOMAIN (t));
1008 else if (RECORD_OR_UNION_TYPE_P (t))
1009 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1010 visit (f);
1011 else if (code == FUNCTION_TYPE
1012 || code == METHOD_TYPE)
1013 visit (TYPE_ARG_TYPES (t));
1014 if (!POINTER_TYPE_P (t))
1015 visit (TYPE_MINVAL (t));
1016 visit (TYPE_MAXVAL (t));
1017 if (RECORD_OR_UNION_TYPE_P (t))
1018 visit (TYPE_BINFO (t));
1019 }
1020
1021 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1022 {
1023 visit (TREE_PURPOSE (t));
1024 visit (TREE_VALUE (t));
1025 visit (TREE_CHAIN (t));
1026 }
1027
1028 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1029 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1030 visit (TREE_VEC_ELT (t, i));
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1033 {
1034 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1035 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1036 visit (TREE_OPERAND (t, i));
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1040 {
1041 unsigned i;
1042 tree b;
1043 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1044 visit (b);
1045 visit (BINFO_OFFSET (t));
1046 visit (BINFO_VTABLE (t));
1047 visit (BINFO_VPTR_FIELD (t));
1048 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1049 visit (b);
1050 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1051 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1052 }
1053
1054 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1055 {
1056 unsigned i;
1057 tree index, value;
1058 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1059 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1060 {
1061 visit (index);
1062 visit (value);
1063 }
1064 }
1065
1066 if (code == OMP_CLAUSE)
1067 {
1068 int i;
1069
1070 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1071 switch (OMP_CLAUSE_CODE (t))
1072 {
1073 case OMP_CLAUSE_DEFAULT:
1074 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1075 break;
1076 case OMP_CLAUSE_SCHEDULE:
1077 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1078 break;
1079 case OMP_CLAUSE_DEPEND:
1080 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1081 break;
1082 case OMP_CLAUSE_MAP:
1083 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1084 break;
1085 case OMP_CLAUSE_PROC_BIND:
1086 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1087 break;
1088 case OMP_CLAUSE_REDUCTION:
1089 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1090 break;
1091 default:
1092 break;
1093 }
1094 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1095 visit (OMP_CLAUSE_OPERAND (t, i));
1096 visit (OMP_CLAUSE_CHAIN (t));
1097 }
1098
1099 return v;
1100
1101 #undef visit
1102 }
1103
1104 /* Compare two SCC entries by their hash value for qsorting them. */
1105
1106 static int
1107 scc_entry_compare (const void *p1_, const void *p2_)
1108 {
1109 const scc_entry *p1 = (const scc_entry *) p1_;
1110 const scc_entry *p2 = (const scc_entry *) p2_;
1111 if (p1->hash < p2->hash)
1112 return -1;
1113 else if (p1->hash > p2->hash)
1114 return 1;
1115 return 0;
1116 }
1117
1118 /* Return a hash value for the SCC on the SCC stack from FIRST with
1119 size SIZE. */
1120
1121 static hashval_t
1122 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1123 {
1124 /* Compute hash values for the SCC members. */
1125 for (unsigned i = 0; i < size; ++i)
1126 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1127
1128 if (size == 1)
1129 return sccstack[first].hash;
1130
1131 /* Sort the SCC of type, hash pairs so that when we mix in
1132 all members of the SCC the hash value becomes independent on
1133 the order we visited the SCC. Produce hash of the whole SCC as
1134 combination of hashes of individual elements. Then combine that hash into
1135 hash of each element, so othewise identically looking elements from two
1136 different SCCs are distinguished. */
1137 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1138
1139 hashval_t scc_hash = sccstack[first].hash;
1140 for (unsigned i = 1; i < size; ++i)
1141 scc_hash = iterative_hash_hashval_t (scc_hash,
1142 sccstack[first+i].hash);
1143 for (unsigned i = 0; i < size; ++i)
1144 sccstack[first+i].hash = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1145 return scc_hash;
1146 }
1147
1148 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1149 already in the streamer cache. Main routine called for
1150 each visit of EXPR. */
1151
1152 static void
1153 DFS_write_tree (struct output_block *ob, sccs *from_state,
1154 tree expr, bool ref_p, bool this_ref_p)
1155 {
1156 unsigned ix;
1157 sccs **slot;
1158
1159 /* Handle special cases. */
1160 if (expr == NULL_TREE)
1161 return;
1162
1163 /* Do not DFS walk into indexable trees. */
1164 if (this_ref_p && tree_is_indexable (expr))
1165 return;
1166
1167 /* Check if we already streamed EXPR. */
1168 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1169 return;
1170
1171 slot = (sccs **)pointer_map_insert (sccstate, expr);
1172 sccs *cstate = *slot;
1173 if (!cstate)
1174 {
1175 scc_entry e = { expr, 0 };
1176 /* Not yet visited. DFS recurse and push it onto the stack. */
1177 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1178 sccstack.safe_push (e);
1179 cstate->dfsnum = next_dfs_num++;
1180 cstate->low = cstate->dfsnum;
1181
1182 if (streamer_handle_as_builtin_p (expr))
1183 ;
1184 else if (TREE_CODE (expr) == INTEGER_CST
1185 && !TREE_OVERFLOW (expr))
1186 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1187 else
1188 {
1189 DFS_write_tree_body (ob, expr, cstate, ref_p);
1190
1191 /* Walk any LTO-specific edges. */
1192 if (DECL_P (expr)
1193 && TREE_CODE (expr) != FUNCTION_DECL
1194 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1195 {
1196 /* Handle DECL_INITIAL for symbols. */
1197 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1198 expr);
1199 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1200 }
1201 }
1202
1203 /* See if we found an SCC. */
1204 if (cstate->low == cstate->dfsnum)
1205 {
1206 unsigned first, size;
1207 tree x;
1208
1209 /* Pop the SCC and compute its size. */
1210 first = sccstack.length ();
1211 do
1212 {
1213 x = sccstack[--first].t;
1214 }
1215 while (x != expr);
1216 size = sccstack.length () - first;
1217
1218 /* No need to compute hashes for LTRANS units, we don't perform
1219 any merging there. */
1220 hashval_t scc_hash = 0;
1221 unsigned scc_entry_len = 0;
1222 if (!flag_wpa)
1223 {
1224 scc_hash = hash_scc (ob->writer_cache, first, size);
1225
1226 /* Put the entries with the least number of collisions first. */
1227 unsigned entry_start = 0;
1228 scc_entry_len = size + 1;
1229 for (unsigned i = 0; i < size;)
1230 {
1231 unsigned from = i;
1232 for (i = i + 1; i < size
1233 && (sccstack[first + i].hash
1234 == sccstack[first + from].hash); ++i)
1235 ;
1236 if (i - from < scc_entry_len)
1237 {
1238 scc_entry_len = i - from;
1239 entry_start = from;
1240 }
1241 }
1242 for (unsigned i = 0; i < scc_entry_len; ++i)
1243 {
1244 scc_entry tem = sccstack[first + i];
1245 sccstack[first + i] = sccstack[first + entry_start + i];
1246 sccstack[first + entry_start + i] = tem;
1247 }
1248 }
1249
1250 /* Write LTO_tree_scc. */
1251 streamer_write_record_start (ob, LTO_tree_scc);
1252 streamer_write_uhwi (ob, size);
1253 streamer_write_uhwi (ob, scc_hash);
1254
1255 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1256 All INTEGER_CSTs need to be handled this way as we need
1257 their type to materialize them. Also builtins are handled
1258 this way.
1259 ??? We still wrap these in LTO_tree_scc so at the
1260 input side we can properly identify the tree we want
1261 to ultimatively return. */
1262 if (size == 1)
1263 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1264 else
1265 {
1266 /* Write the size of the SCC entry candidates. */
1267 streamer_write_uhwi (ob, scc_entry_len);
1268
1269 /* Write all headers and populate the streamer cache. */
1270 for (unsigned i = 0; i < size; ++i)
1271 {
1272 hashval_t hash = sccstack[first+i].hash;
1273 tree t = sccstack[first+i].t;
1274 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1275 t, hash, &ix);
1276 gcc_assert (!exists_p);
1277
1278 if (!lto_is_streamable (t))
1279 internal_error ("tree code %qs is not supported "
1280 "in LTO streams",
1281 get_tree_code_name (TREE_CODE (t)));
1282
1283 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1284
1285 /* Write the header, containing everything needed to
1286 materialize EXPR on the reading side. */
1287 streamer_write_tree_header (ob, t);
1288 }
1289
1290 /* Write the bitpacks and tree references. */
1291 for (unsigned i = 0; i < size; ++i)
1292 {
1293 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1294
1295 /* Mark the end of the tree. */
1296 streamer_write_zero (ob);
1297 }
1298 }
1299
1300 /* Finally truncate the vector. */
1301 sccstack.truncate (first);
1302
1303 if (from_state)
1304 from_state->low = MIN (from_state->low, cstate->low);
1305 return;
1306 }
1307
1308 if (from_state)
1309 from_state->low = MIN (from_state->low, cstate->low);
1310 }
1311 gcc_checking_assert (from_state);
1312 if (cstate->dfsnum < from_state->dfsnum)
1313 from_state->low = MIN (cstate->dfsnum, from_state->low);
1314 }
1315
1316
1317 /* Emit the physical representation of tree node EXPR to output block
1318 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1319 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1320
1321 void
1322 lto_output_tree (struct output_block *ob, tree expr,
1323 bool ref_p, bool this_ref_p)
1324 {
1325 unsigned ix;
1326 bool existed_p;
1327
1328 if (expr == NULL_TREE)
1329 {
1330 streamer_write_record_start (ob, LTO_null);
1331 return;
1332 }
1333
1334 if (this_ref_p && tree_is_indexable (expr))
1335 {
1336 lto_output_tree_ref (ob, expr);
1337 return;
1338 }
1339
1340 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1341 if (existed_p)
1342 {
1343 /* If a node has already been streamed out, make sure that
1344 we don't write it more than once. Otherwise, the reader
1345 will instantiate two different nodes for the same object. */
1346 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1347 streamer_write_uhwi (ob, ix);
1348 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1349 lto_tree_code_to_tag (TREE_CODE (expr)));
1350 lto_stats.num_pickle_refs_output++;
1351 }
1352 else
1353 {
1354 /* This is the first time we see EXPR, write all reachable
1355 trees to OB. */
1356 static bool in_dfs_walk;
1357
1358 /* Protect against recursion which means disconnect between
1359 what tree edges we walk in the DFS walk and what edges
1360 we stream out. */
1361 gcc_assert (!in_dfs_walk);
1362
1363 /* Start the DFS walk. */
1364 /* Save ob state ... */
1365 /* let's see ... */
1366 in_dfs_walk = true;
1367 sccstate = pointer_map_create ();
1368 gcc_obstack_init (&sccstate_obstack);
1369 next_dfs_num = 1;
1370 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1371 sccstack.release ();
1372 pointer_map_destroy (sccstate);
1373 obstack_free (&sccstate_obstack, NULL);
1374 in_dfs_walk = false;
1375
1376 /* Finally append a reference to the tree we were writing.
1377 ??? If expr ended up as a singleton we could have
1378 inlined it here and avoid outputting a reference. */
1379 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1380 gcc_assert (existed_p);
1381 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1382 streamer_write_uhwi (ob, ix);
1383 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1384 lto_tree_code_to_tag (TREE_CODE (expr)));
1385 lto_stats.num_pickle_refs_output++;
1386 }
1387 }
1388
1389
1390 /* Output to OB a list of try/catch handlers starting with FIRST. */
1391
1392 static void
1393 output_eh_try_list (struct output_block *ob, eh_catch first)
1394 {
1395 eh_catch n;
1396
1397 for (n = first; n; n = n->next_catch)
1398 {
1399 streamer_write_record_start (ob, LTO_eh_catch);
1400 stream_write_tree (ob, n->type_list, true);
1401 stream_write_tree (ob, n->filter_list, true);
1402 stream_write_tree (ob, n->label, true);
1403 }
1404
1405 streamer_write_record_start (ob, LTO_null);
1406 }
1407
1408
1409 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1410 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1411 detect EH region sharing. */
1412
1413 static void
1414 output_eh_region (struct output_block *ob, eh_region r)
1415 {
1416 enum LTO_tags tag;
1417
1418 if (r == NULL)
1419 {
1420 streamer_write_record_start (ob, LTO_null);
1421 return;
1422 }
1423
1424 if (r->type == ERT_CLEANUP)
1425 tag = LTO_ert_cleanup;
1426 else if (r->type == ERT_TRY)
1427 tag = LTO_ert_try;
1428 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1429 tag = LTO_ert_allowed_exceptions;
1430 else if (r->type == ERT_MUST_NOT_THROW)
1431 tag = LTO_ert_must_not_throw;
1432 else
1433 gcc_unreachable ();
1434
1435 streamer_write_record_start (ob, tag);
1436 streamer_write_hwi (ob, r->index);
1437
1438 if (r->outer)
1439 streamer_write_hwi (ob, r->outer->index);
1440 else
1441 streamer_write_zero (ob);
1442
1443 if (r->inner)
1444 streamer_write_hwi (ob, r->inner->index);
1445 else
1446 streamer_write_zero (ob);
1447
1448 if (r->next_peer)
1449 streamer_write_hwi (ob, r->next_peer->index);
1450 else
1451 streamer_write_zero (ob);
1452
1453 if (r->type == ERT_TRY)
1454 {
1455 output_eh_try_list (ob, r->u.eh_try.first_catch);
1456 }
1457 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1458 {
1459 stream_write_tree (ob, r->u.allowed.type_list, true);
1460 stream_write_tree (ob, r->u.allowed.label, true);
1461 streamer_write_uhwi (ob, r->u.allowed.filter);
1462 }
1463 else if (r->type == ERT_MUST_NOT_THROW)
1464 {
1465 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1466 bitpack_d bp = bitpack_create (ob->main_stream);
1467 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1468 streamer_write_bitpack (&bp);
1469 }
1470
1471 if (r->landing_pads)
1472 streamer_write_hwi (ob, r->landing_pads->index);
1473 else
1474 streamer_write_zero (ob);
1475 }
1476
1477
1478 /* Output landing pad LP to OB. */
1479
1480 static void
1481 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1482 {
1483 if (lp == NULL)
1484 {
1485 streamer_write_record_start (ob, LTO_null);
1486 return;
1487 }
1488
1489 streamer_write_record_start (ob, LTO_eh_landing_pad);
1490 streamer_write_hwi (ob, lp->index);
1491 if (lp->next_lp)
1492 streamer_write_hwi (ob, lp->next_lp->index);
1493 else
1494 streamer_write_zero (ob);
1495
1496 if (lp->region)
1497 streamer_write_hwi (ob, lp->region->index);
1498 else
1499 streamer_write_zero (ob);
1500
1501 stream_write_tree (ob, lp->post_landing_pad, true);
1502 }
1503
1504
1505 /* Output the existing eh_table to OB. */
1506
1507 static void
1508 output_eh_regions (struct output_block *ob, struct function *fn)
1509 {
1510 if (fn->eh && fn->eh->region_tree)
1511 {
1512 unsigned i;
1513 eh_region eh;
1514 eh_landing_pad lp;
1515 tree ttype;
1516
1517 streamer_write_record_start (ob, LTO_eh_table);
1518
1519 /* Emit the index of the root of the EH region tree. */
1520 streamer_write_hwi (ob, fn->eh->region_tree->index);
1521
1522 /* Emit all the EH regions in the region array. */
1523 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1524 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1525 output_eh_region (ob, eh);
1526
1527 /* Emit all landing pads. */
1528 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1529 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1530 output_eh_lp (ob, lp);
1531
1532 /* Emit all the runtime type data. */
1533 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1534 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1535 stream_write_tree (ob, ttype, true);
1536
1537 /* Emit the table of action chains. */
1538 if (targetm.arm_eabi_unwinder)
1539 {
1540 tree t;
1541 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1542 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1543 stream_write_tree (ob, t, true);
1544 }
1545 else
1546 {
1547 uchar c;
1548 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1549 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1550 streamer_write_char_stream (ob->main_stream, c);
1551 }
1552 }
1553
1554 /* The LTO_null either terminates the record or indicates that there
1555 are no eh_records at all. */
1556 streamer_write_record_start (ob, LTO_null);
1557 }
1558
1559
1560 /* Output all of the active ssa names to the ssa_names stream. */
1561
1562 static void
1563 output_ssa_names (struct output_block *ob, struct function *fn)
1564 {
1565 unsigned int i, len;
1566
1567 len = vec_safe_length (SSANAMES (fn));
1568 streamer_write_uhwi (ob, len);
1569
1570 for (i = 1; i < len; i++)
1571 {
1572 tree ptr = (*SSANAMES (fn))[i];
1573
1574 if (ptr == NULL_TREE
1575 || SSA_NAME_IN_FREE_LIST (ptr)
1576 || virtual_operand_p (ptr))
1577 continue;
1578
1579 streamer_write_uhwi (ob, i);
1580 streamer_write_char_stream (ob->main_stream,
1581 SSA_NAME_IS_DEFAULT_DEF (ptr));
1582 if (SSA_NAME_VAR (ptr))
1583 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1584 else
1585 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1586 stream_write_tree (ob, TREE_TYPE (ptr), true);
1587 }
1588
1589 streamer_write_zero (ob);
1590 }
1591
1592
1593 /* Output a wide-int. */
1594
1595 static void
1596 streamer_write_wi (struct output_block *ob,
1597 const widest_int &w)
1598 {
1599 int len = w.get_len ();
1600
1601 streamer_write_uhwi (ob, w.get_precision ());
1602 streamer_write_uhwi (ob, len);
1603 for (int i = 0; i < len; i++)
1604 streamer_write_hwi (ob, w.elt (i));
1605 }
1606
1607
1608 /* Output the cfg. */
1609
1610 static void
1611 output_cfg (struct output_block *ob, struct function *fn)
1612 {
1613 struct lto_output_stream *tmp_stream = ob->main_stream;
1614 basic_block bb;
1615
1616 ob->main_stream = ob->cfg_stream;
1617
1618 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1619 profile_status_for_fn (fn));
1620
1621 /* Output the number of the highest basic block. */
1622 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1623
1624 FOR_ALL_BB_FN (bb, fn)
1625 {
1626 edge_iterator ei;
1627 edge e;
1628
1629 streamer_write_hwi (ob, bb->index);
1630
1631 /* Output the successors and the edge flags. */
1632 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1633 FOR_EACH_EDGE (e, ei, bb->succs)
1634 {
1635 streamer_write_uhwi (ob, e->dest->index);
1636 streamer_write_hwi (ob, e->probability);
1637 streamer_write_gcov_count (ob, e->count);
1638 streamer_write_uhwi (ob, e->flags);
1639 }
1640 }
1641
1642 streamer_write_hwi (ob, -1);
1643
1644 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1645 while (bb->next_bb)
1646 {
1647 streamer_write_hwi (ob, bb->next_bb->index);
1648 bb = bb->next_bb;
1649 }
1650
1651 streamer_write_hwi (ob, -1);
1652
1653 /* ??? The cfgloop interface is tied to cfun. */
1654 gcc_assert (cfun == fn);
1655
1656 /* Output the number of loops. */
1657 streamer_write_uhwi (ob, number_of_loops (fn));
1658
1659 /* Output each loop, skipping the tree root which has number zero. */
1660 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1661 {
1662 struct loop *loop = get_loop (fn, i);
1663
1664 /* Write the index of the loop header. That's enough to rebuild
1665 the loop tree on the reader side. Stream -1 for an unused
1666 loop entry. */
1667 if (!loop)
1668 {
1669 streamer_write_hwi (ob, -1);
1670 continue;
1671 }
1672 else
1673 streamer_write_hwi (ob, loop->header->index);
1674
1675 /* Write everything copy_loop_info copies. */
1676 streamer_write_enum (ob->main_stream,
1677 loop_estimation, EST_LAST, loop->estimate_state);
1678 streamer_write_hwi (ob, loop->any_upper_bound);
1679 if (loop->any_upper_bound)
1680 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1681 streamer_write_hwi (ob, loop->any_estimate);
1682 if (loop->any_estimate)
1683 streamer_write_wi (ob, loop->nb_iterations_estimate);
1684
1685 /* Write OMP SIMD related info. */
1686 streamer_write_hwi (ob, loop->safelen);
1687 streamer_write_hwi (ob, loop->dont_vectorize);
1688 streamer_write_hwi (ob, loop->force_vectorize);
1689 stream_write_tree (ob, loop->simduid, true);
1690 }
1691
1692 ob->main_stream = tmp_stream;
1693 }
1694
1695
1696 /* Create the header in the file using OB. If the section type is for
1697 a function, set FN to the decl for that function. */
1698
1699 void
1700 produce_asm (struct output_block *ob, tree fn)
1701 {
1702 enum lto_section_type section_type = ob->section_type;
1703 struct lto_function_header header;
1704 char *section_name;
1705 struct lto_output_stream *header_stream;
1706
1707 if (section_type == LTO_section_function_body)
1708 {
1709 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1710 section_name = lto_get_section_name (section_type, name, NULL);
1711 }
1712 else
1713 section_name = lto_get_section_name (section_type, NULL, NULL);
1714
1715 lto_begin_section (section_name, !flag_wpa);
1716 free (section_name);
1717
1718 /* The entire header is stream computed here. */
1719 memset (&header, 0, sizeof (struct lto_function_header));
1720
1721 /* Write the header. */
1722 header.lto_header.major_version = LTO_major_version;
1723 header.lto_header.minor_version = LTO_minor_version;
1724
1725 header.compressed_size = 0;
1726
1727 if (section_type == LTO_section_function_body)
1728 header.cfg_size = ob->cfg_stream->total_size;
1729 header.main_size = ob->main_stream->total_size;
1730 header.string_size = ob->string_stream->total_size;
1731
1732 header_stream = XCNEW (struct lto_output_stream);
1733 lto_output_data_stream (header_stream, &header, sizeof header);
1734 lto_write_stream (header_stream);
1735 free (header_stream);
1736
1737 /* Put all of the gimple and the string table out the asm file as a
1738 block of text. */
1739 if (section_type == LTO_section_function_body)
1740 lto_write_stream (ob->cfg_stream);
1741 lto_write_stream (ob->main_stream);
1742 lto_write_stream (ob->string_stream);
1743
1744 lto_end_section ();
1745 }
1746
1747
1748 /* Output the base body of struct function FN using output block OB. */
1749
1750 static void
1751 output_struct_function_base (struct output_block *ob, struct function *fn)
1752 {
1753 struct bitpack_d bp;
1754 unsigned i;
1755 tree t;
1756
1757 /* Output the static chain and non-local goto save area. */
1758 stream_write_tree (ob, fn->static_chain_decl, true);
1759 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1760
1761 /* Output all the local variables in the function. */
1762 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1763 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1764 stream_write_tree (ob, t, true);
1765
1766 /* Output current IL state of the function. */
1767 streamer_write_uhwi (ob, fn->curr_properties);
1768
1769 /* Write all the attributes for FN. */
1770 bp = bitpack_create (ob->main_stream);
1771 bp_pack_value (&bp, fn->is_thunk, 1);
1772 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1773 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1774 bp_pack_value (&bp, fn->returns_struct, 1);
1775 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1776 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1777 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1778 bp_pack_value (&bp, fn->after_inlining, 1);
1779 bp_pack_value (&bp, fn->stdarg, 1);
1780 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1781 bp_pack_value (&bp, fn->calls_alloca, 1);
1782 bp_pack_value (&bp, fn->calls_setjmp, 1);
1783 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1784 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1785 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1786 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1787
1788 /* Output the function start and end loci. */
1789 stream_output_location (ob, &bp, fn->function_start_locus);
1790 stream_output_location (ob, &bp, fn->function_end_locus);
1791
1792 streamer_write_bitpack (&bp);
1793 }
1794
1795
1796 /* Output the body of function NODE->DECL. */
1797
1798 static void
1799 output_function (struct cgraph_node *node)
1800 {
1801 tree function;
1802 struct function *fn;
1803 basic_block bb;
1804 struct output_block *ob;
1805
1806 function = node->decl;
1807 fn = DECL_STRUCT_FUNCTION (function);
1808 ob = create_output_block (LTO_section_function_body);
1809
1810 clear_line_info (ob);
1811 ob->symbol = node;
1812
1813 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1814
1815 /* Set current_function_decl and cfun. */
1816 push_cfun (fn);
1817
1818 /* Make string 0 be a NULL string. */
1819 streamer_write_char_stream (ob->string_stream, 0);
1820
1821 streamer_write_record_start (ob, LTO_function);
1822
1823 /* Output decls for parameters and args. */
1824 stream_write_tree (ob, DECL_RESULT (function), true);
1825 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1826
1827 /* Output DECL_INITIAL for the function, which contains the tree of
1828 lexical scopes. */
1829 stream_write_tree (ob, DECL_INITIAL (function), true);
1830
1831 /* We also stream abstract functions where we stream only stuff needed for
1832 debug info. */
1833 if (gimple_has_body_p (function))
1834 {
1835 streamer_write_uhwi (ob, 1);
1836 output_struct_function_base (ob, fn);
1837
1838 /* Output all the SSA names used in the function. */
1839 output_ssa_names (ob, fn);
1840
1841 /* Output any exception handling regions. */
1842 output_eh_regions (ob, fn);
1843
1844
1845 /* We will renumber the statements. The code that does this uses
1846 the same ordering that we use for serializing them so we can use
1847 the same code on the other end and not have to write out the
1848 statement numbers. We do not assign UIDs to PHIs here because
1849 virtual PHIs get re-computed on-the-fly which would make numbers
1850 inconsistent. */
1851 set_gimple_stmt_max_uid (cfun, 0);
1852 FOR_ALL_BB_FN (bb, cfun)
1853 {
1854 gimple_stmt_iterator gsi;
1855 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1856 {
1857 gimple stmt = gsi_stmt (gsi);
1858
1859 /* Virtual PHIs are not going to be streamed. */
1860 if (!virtual_operand_p (gimple_phi_result (stmt)))
1861 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1862 }
1863 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1864 {
1865 gimple stmt = gsi_stmt (gsi);
1866 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1867 }
1868 }
1869 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1870 virtual phis now. */
1871 FOR_ALL_BB_FN (bb, cfun)
1872 {
1873 gimple_stmt_iterator gsi;
1874 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1875 {
1876 gimple stmt = gsi_stmt (gsi);
1877 if (virtual_operand_p (gimple_phi_result (stmt)))
1878 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1879 }
1880 }
1881
1882 /* Output the code for the function. */
1883 FOR_ALL_BB_FN (bb, fn)
1884 output_bb (ob, bb, fn);
1885
1886 /* The terminator for this function. */
1887 streamer_write_record_start (ob, LTO_null);
1888
1889 output_cfg (ob, fn);
1890
1891 pop_cfun ();
1892 }
1893 else
1894 streamer_write_uhwi (ob, 0);
1895
1896 /* Create a section to hold the pickled output of this function. */
1897 produce_asm (ob, function);
1898
1899 destroy_output_block (ob);
1900 }
1901
1902 /* Output the body of function NODE->DECL. */
1903
1904 static void
1905 output_constructor (struct varpool_node *node)
1906 {
1907 tree var = node->decl;
1908 struct output_block *ob;
1909
1910 ob = create_output_block (LTO_section_function_body);
1911
1912 clear_line_info (ob);
1913 ob->symbol = node;
1914
1915 /* Make string 0 be a NULL string. */
1916 streamer_write_char_stream (ob->string_stream, 0);
1917
1918 /* Output DECL_INITIAL for the function, which contains the tree of
1919 lexical scopes. */
1920 stream_write_tree (ob, DECL_INITIAL (var), true);
1921
1922 /* Create a section to hold the pickled output of this function. */
1923 produce_asm (ob, var);
1924
1925 destroy_output_block (ob);
1926 }
1927
1928
1929 /* Emit toplevel asms. */
1930
1931 void
1932 lto_output_toplevel_asms (void)
1933 {
1934 struct output_block *ob;
1935 struct asm_node *can;
1936 char *section_name;
1937 struct lto_output_stream *header_stream;
1938 struct lto_asm_header header;
1939
1940 if (! asm_nodes)
1941 return;
1942
1943 ob = create_output_block (LTO_section_asm);
1944
1945 /* Make string 0 be a NULL string. */
1946 streamer_write_char_stream (ob->string_stream, 0);
1947
1948 for (can = asm_nodes; can; can = can->next)
1949 {
1950 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1951 streamer_write_hwi (ob, can->order);
1952 }
1953
1954 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1955
1956 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1957 lto_begin_section (section_name, !flag_wpa);
1958 free (section_name);
1959
1960 /* The entire header stream is computed here. */
1961 memset (&header, 0, sizeof (header));
1962
1963 /* Write the header. */
1964 header.lto_header.major_version = LTO_major_version;
1965 header.lto_header.minor_version = LTO_minor_version;
1966
1967 header.main_size = ob->main_stream->total_size;
1968 header.string_size = ob->string_stream->total_size;
1969
1970 header_stream = XCNEW (struct lto_output_stream);
1971 lto_output_data_stream (header_stream, &header, sizeof (header));
1972 lto_write_stream (header_stream);
1973 free (header_stream);
1974
1975 /* Put all of the gimple and the string table out the asm file as a
1976 block of text. */
1977 lto_write_stream (ob->main_stream);
1978 lto_write_stream (ob->string_stream);
1979
1980 lto_end_section ();
1981
1982 destroy_output_block (ob);
1983 }
1984
1985
1986 /* Copy the function body or variable constructor of NODE without deserializing. */
1987
1988 static void
1989 copy_function_or_variable (struct symtab_node *node)
1990 {
1991 tree function = node->decl;
1992 struct lto_file_decl_data *file_data = node->lto_file_data;
1993 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1994 const char *data;
1995 size_t len;
1996 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1997 char *section_name =
1998 lto_get_section_name (LTO_section_function_body, name, NULL);
1999 size_t i, j;
2000 struct lto_in_decl_state *in_state;
2001 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2002
2003 lto_begin_section (section_name, !flag_wpa);
2004 free (section_name);
2005
2006 /* We may have renamed the declaration, e.g., a static function. */
2007 name = lto_get_decl_name_mapping (file_data, name);
2008
2009 data = lto_get_section_data (file_data, LTO_section_function_body,
2010 name, &len);
2011 gcc_assert (data);
2012
2013 /* Do a bit copy of the function body. */
2014 lto_output_data_stream (output_stream, data, len);
2015 lto_write_stream (output_stream);
2016
2017 /* Copy decls. */
2018 in_state =
2019 lto_get_function_in_decl_state (node->lto_file_data, function);
2020 gcc_assert (in_state);
2021
2022 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2023 {
2024 size_t n = in_state->streams[i].size;
2025 tree *trees = in_state->streams[i].trees;
2026 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2027
2028 /* The out state must have the same indices and the in state.
2029 So just copy the vector. All the encoders in the in state
2030 must be empty where we reach here. */
2031 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2032 encoder->trees.reserve_exact (n);
2033 for (j = 0; j < n; j++)
2034 encoder->trees.safe_push (trees[j]);
2035 }
2036
2037 lto_free_section_data (file_data, LTO_section_function_body, name,
2038 data, len);
2039 free (output_stream);
2040 lto_end_section ();
2041 }
2042
2043 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2044
2045 static tree
2046 wrap_refs (tree *tp, int *ws, void *)
2047 {
2048 tree t = *tp;
2049 if (handled_component_p (t)
2050 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2051 {
2052 tree decl = TREE_OPERAND (t, 0);
2053 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2054 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2055 build1 (ADDR_EXPR, ptrtype, decl),
2056 build_int_cst (ptrtype, 0));
2057 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2058 *ws = 0;
2059 }
2060 else if (TREE_CODE (t) == CONSTRUCTOR)
2061 ;
2062 else if (!EXPR_P (t))
2063 *ws = 0;
2064 return NULL_TREE;
2065 }
2066
2067 /* Main entry point from the pass manager. */
2068
2069 void
2070 lto_output (void)
2071 {
2072 struct lto_out_decl_state *decl_state;
2073 #ifdef ENABLE_CHECKING
2074 bitmap output = lto_bitmap_alloc ();
2075 #endif
2076 int i, n_nodes;
2077 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2078
2079 /* Initialize the streamer. */
2080 lto_streamer_init ();
2081
2082 n_nodes = lto_symtab_encoder_size (encoder);
2083 /* Process only the functions with bodies. */
2084 for (i = 0; i < n_nodes; i++)
2085 {
2086 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2087 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2088 {
2089 if (lto_symtab_encoder_encode_body_p (encoder, node)
2090 && !node->alias)
2091 {
2092 #ifdef ENABLE_CHECKING
2093 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2094 bitmap_set_bit (output, DECL_UID (node->decl));
2095 #endif
2096 decl_state = lto_new_out_decl_state ();
2097 lto_push_out_decl_state (decl_state);
2098 if (gimple_has_body_p (node->decl) || !flag_wpa)
2099 output_function (node);
2100 else
2101 copy_function_or_variable (node);
2102 gcc_assert (lto_get_out_decl_state () == decl_state);
2103 lto_pop_out_decl_state ();
2104 lto_record_function_out_decl_state (node->decl, decl_state);
2105 }
2106 }
2107 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2108 {
2109 /* Wrap symbol references inside the ctor in a type
2110 preserving MEM_REF. */
2111 tree ctor = DECL_INITIAL (node->decl);
2112 if (ctor && !in_lto_p)
2113 walk_tree (&ctor, wrap_refs, NULL, NULL);
2114 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2115 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2116 && !node->alias)
2117 {
2118 timevar_push (TV_IPA_LTO_CTORS_OUT);
2119 #ifdef ENABLE_CHECKING
2120 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2121 bitmap_set_bit (output, DECL_UID (node->decl));
2122 #endif
2123 decl_state = lto_new_out_decl_state ();
2124 lto_push_out_decl_state (decl_state);
2125 if (DECL_INITIAL (node->decl) != error_mark_node
2126 || !flag_wpa)
2127 output_constructor (node);
2128 else
2129 copy_function_or_variable (node);
2130 gcc_assert (lto_get_out_decl_state () == decl_state);
2131 lto_pop_out_decl_state ();
2132 lto_record_function_out_decl_state (node->decl, decl_state);
2133 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2134 }
2135 }
2136 }
2137
2138 /* Emit the callgraph after emitting function bodies. This needs to
2139 be done now to make sure that all the statements in every function
2140 have been renumbered so that edges can be associated with call
2141 statements using the statement UIDs. */
2142 output_symtab ();
2143
2144 #ifdef ENABLE_CHECKING
2145 lto_bitmap_free (output);
2146 #endif
2147 }
2148
2149 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2150 from it and required for correct representation of its semantics.
2151 Each node in ENCODER must be a global declaration or a type. A node
2152 is written only once, even if it appears multiple times in the
2153 vector. Certain transitively-reachable nodes, such as those
2154 representing expressions, may be duplicated, but such nodes
2155 must not appear in ENCODER itself. */
2156
2157 static void
2158 write_global_stream (struct output_block *ob,
2159 struct lto_tree_ref_encoder *encoder)
2160 {
2161 tree t;
2162 size_t index;
2163 const size_t size = lto_tree_ref_encoder_size (encoder);
2164
2165 for (index = 0; index < size; index++)
2166 {
2167 t = lto_tree_ref_encoder_get_tree (encoder, index);
2168 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2169 stream_write_tree (ob, t, false);
2170 }
2171 }
2172
2173
2174 /* Write a sequence of indices into the globals vector corresponding
2175 to the trees in ENCODER. These are used by the reader to map the
2176 indices used to refer to global entities within function bodies to
2177 their referents. */
2178
2179 static void
2180 write_global_references (struct output_block *ob,
2181 struct lto_output_stream *ref_stream,
2182 struct lto_tree_ref_encoder *encoder)
2183 {
2184 tree t;
2185 uint32_t index;
2186 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2187
2188 /* Write size as 32-bit unsigned. */
2189 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2190
2191 for (index = 0; index < size; index++)
2192 {
2193 uint32_t slot_num;
2194
2195 t = lto_tree_ref_encoder_get_tree (encoder, index);
2196 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2197 gcc_assert (slot_num != (unsigned)-1);
2198 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2199 }
2200 }
2201
2202
2203 /* Write all the streams in an lto_out_decl_state STATE using
2204 output block OB and output stream OUT_STREAM. */
2205
2206 void
2207 lto_output_decl_state_streams (struct output_block *ob,
2208 struct lto_out_decl_state *state)
2209 {
2210 int i;
2211
2212 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2213 write_global_stream (ob, &state->streams[i]);
2214 }
2215
2216
2217 /* Write all the references in an lto_out_decl_state STATE using
2218 output block OB and output stream OUT_STREAM. */
2219
2220 void
2221 lto_output_decl_state_refs (struct output_block *ob,
2222 struct lto_output_stream *out_stream,
2223 struct lto_out_decl_state *state)
2224 {
2225 unsigned i;
2226 uint32_t ref;
2227 tree decl;
2228
2229 /* Write reference to FUNCTION_DECL. If there is not function,
2230 write reference to void_type_node. */
2231 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2232 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2233 gcc_assert (ref != (unsigned)-1);
2234 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2235
2236 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2237 write_global_references (ob, out_stream, &state->streams[i]);
2238 }
2239
2240
2241 /* Return the written size of STATE. */
2242
2243 static size_t
2244 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2245 {
2246 int i;
2247 size_t size;
2248
2249 size = sizeof (int32_t); /* fn_ref. */
2250 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2251 {
2252 size += sizeof (int32_t); /* vector size. */
2253 size += (lto_tree_ref_encoder_size (&state->streams[i])
2254 * sizeof (int32_t));
2255 }
2256 return size;
2257 }
2258
2259
2260 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2261 so far. */
2262
2263 static void
2264 write_symbol (struct streamer_tree_cache_d *cache,
2265 struct lto_output_stream *stream,
2266 tree t, struct pointer_set_t *seen, bool alias)
2267 {
2268 const char *name;
2269 enum gcc_plugin_symbol_kind kind;
2270 enum gcc_plugin_symbol_visibility visibility;
2271 unsigned slot_num;
2272 uint64_t size;
2273 const char *comdat;
2274 unsigned char c;
2275
2276 /* None of the following kinds of symbols are needed in the
2277 symbol table. */
2278 if (!TREE_PUBLIC (t)
2279 || is_builtin_fn (t)
2280 || DECL_ABSTRACT (t)
2281 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2282 return;
2283 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2284
2285 gcc_assert (TREE_CODE (t) == VAR_DECL
2286 || TREE_CODE (t) == FUNCTION_DECL);
2287
2288 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2289
2290 /* This behaves like assemble_name_raw in varasm.c, performing the
2291 same name manipulations that ASM_OUTPUT_LABELREF does. */
2292 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2293
2294 if (pointer_set_contains (seen, name))
2295 return;
2296 pointer_set_insert (seen, name);
2297
2298 streamer_tree_cache_lookup (cache, t, &slot_num);
2299 gcc_assert (slot_num != (unsigned)-1);
2300
2301 if (DECL_EXTERNAL (t))
2302 {
2303 if (DECL_WEAK (t))
2304 kind = GCCPK_WEAKUNDEF;
2305 else
2306 kind = GCCPK_UNDEF;
2307 }
2308 else
2309 {
2310 if (DECL_WEAK (t))
2311 kind = GCCPK_WEAKDEF;
2312 else if (DECL_COMMON (t))
2313 kind = GCCPK_COMMON;
2314 else
2315 kind = GCCPK_DEF;
2316
2317 /* When something is defined, it should have node attached. */
2318 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2319 || varpool_get_node (t)->definition);
2320 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2321 || (cgraph_get_node (t)
2322 && cgraph_get_node (t)->definition));
2323 }
2324
2325 /* Imitate what default_elf_asm_output_external do.
2326 When symbol is external, we need to output it with DEFAULT visibility
2327 when compiling with -fvisibility=default, while with HIDDEN visibility
2328 when symbol has attribute (visibility("hidden")) specified.
2329 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2330 right. */
2331
2332 if (DECL_EXTERNAL (t)
2333 && !targetm.binds_local_p (t))
2334 visibility = GCCPV_DEFAULT;
2335 else
2336 switch (DECL_VISIBILITY (t))
2337 {
2338 case VISIBILITY_DEFAULT:
2339 visibility = GCCPV_DEFAULT;
2340 break;
2341 case VISIBILITY_PROTECTED:
2342 visibility = GCCPV_PROTECTED;
2343 break;
2344 case VISIBILITY_HIDDEN:
2345 visibility = GCCPV_HIDDEN;
2346 break;
2347 case VISIBILITY_INTERNAL:
2348 visibility = GCCPV_INTERNAL;
2349 break;
2350 }
2351
2352 if (kind == GCCPK_COMMON
2353 && DECL_SIZE_UNIT (t)
2354 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2355 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2356 else
2357 size = 0;
2358
2359 if (DECL_ONE_ONLY (t))
2360 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2361 else
2362 comdat = "";
2363
2364 lto_output_data_stream (stream, name, strlen (name) + 1);
2365 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2366 c = (unsigned char) kind;
2367 lto_output_data_stream (stream, &c, 1);
2368 c = (unsigned char) visibility;
2369 lto_output_data_stream (stream, &c, 1);
2370 lto_output_data_stream (stream, &size, 8);
2371 lto_output_data_stream (stream, &slot_num, 4);
2372 }
2373
2374 /* Return true if NODE should appear in the plugin symbol table. */
2375
2376 bool
2377 output_symbol_p (symtab_node *node)
2378 {
2379 struct cgraph_node *cnode;
2380 if (!symtab_real_symbol_p (node))
2381 return false;
2382 /* We keep external functions in symtab for sake of inlining
2383 and devirtualization. We do not want to see them in symbol table as
2384 references unless they are really used. */
2385 cnode = dyn_cast <cgraph_node *> (node);
2386 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2387 && cnode->callers)
2388 return true;
2389
2390 /* Ignore all references from external vars initializers - they are not really
2391 part of the compilation unit until they are used by folding. Some symbols,
2392 like references to external construction vtables can not be referred to at all.
2393 We decide this at can_refer_decl_in_current_unit_p. */
2394 if (!node->definition || DECL_EXTERNAL (node->decl))
2395 {
2396 int i;
2397 struct ipa_ref *ref;
2398 for (i = 0; node->iterate_referring (i, ref); i++)
2399 {
2400 if (ref->use == IPA_REF_ALIAS)
2401 continue;
2402 if (is_a <cgraph_node *> (ref->referring))
2403 return true;
2404 if (!DECL_EXTERNAL (ref->referring->decl))
2405 return true;
2406 }
2407 return false;
2408 }
2409 return true;
2410 }
2411
2412
2413 /* Write an IL symbol table to OB.
2414 SET and VSET are cgraph/varpool node sets we are outputting. */
2415
2416 static void
2417 produce_symtab (struct output_block *ob)
2418 {
2419 struct streamer_tree_cache_d *cache = ob->writer_cache;
2420 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2421 struct pointer_set_t *seen;
2422 struct lto_output_stream stream;
2423 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2424 lto_symtab_encoder_iterator lsei;
2425
2426 lto_begin_section (section_name, false);
2427 free (section_name);
2428
2429 seen = pointer_set_create ();
2430 memset (&stream, 0, sizeof (stream));
2431
2432 /* Write the symbol table.
2433 First write everything defined and then all declarations.
2434 This is necessary to handle cases where we have duplicated symbols. */
2435 for (lsei = lsei_start (encoder);
2436 !lsei_end_p (lsei); lsei_next (&lsei))
2437 {
2438 symtab_node *node = lsei_node (lsei);
2439
2440 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2441 continue;
2442 write_symbol (cache, &stream, node->decl, seen, false);
2443 }
2444 for (lsei = lsei_start (encoder);
2445 !lsei_end_p (lsei); lsei_next (&lsei))
2446 {
2447 symtab_node *node = lsei_node (lsei);
2448
2449 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2450 continue;
2451 write_symbol (cache, &stream, node->decl, seen, false);
2452 }
2453
2454 lto_write_stream (&stream);
2455 pointer_set_destroy (seen);
2456
2457 lto_end_section ();
2458 }
2459
2460
2461 /* This pass is run after all of the functions are serialized and all
2462 of the IPA passes have written their serialized forms. This pass
2463 causes the vector of all of the global decls and types used from
2464 this file to be written in to a section that can then be read in to
2465 recover these on other side. */
2466
2467 void
2468 produce_asm_for_decls (void)
2469 {
2470 struct lto_out_decl_state *out_state;
2471 struct lto_out_decl_state *fn_out_state;
2472 struct lto_decl_header header;
2473 char *section_name;
2474 struct output_block *ob;
2475 struct lto_output_stream *header_stream, *decl_state_stream;
2476 unsigned idx, num_fns;
2477 size_t decl_state_size;
2478 int32_t num_decl_states;
2479
2480 ob = create_output_block (LTO_section_decls);
2481 ob->global = true;
2482
2483 memset (&header, 0, sizeof (struct lto_decl_header));
2484
2485 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2486 lto_begin_section (section_name, !flag_wpa);
2487 free (section_name);
2488
2489 /* Make string 0 be a NULL string. */
2490 streamer_write_char_stream (ob->string_stream, 0);
2491
2492 gcc_assert (!alias_pairs);
2493
2494 /* Get rid of the global decl state hash tables to save some memory. */
2495 out_state = lto_get_out_decl_state ();
2496 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2497 if (out_state->streams[i].tree_hash_table)
2498 {
2499 delete out_state->streams[i].tree_hash_table;
2500 out_state->streams[i].tree_hash_table = NULL;
2501 }
2502
2503 /* Write the global symbols. */
2504 lto_output_decl_state_streams (ob, out_state);
2505 num_fns = lto_function_decl_states.length ();
2506 for (idx = 0; idx < num_fns; idx++)
2507 {
2508 fn_out_state =
2509 lto_function_decl_states[idx];
2510 lto_output_decl_state_streams (ob, fn_out_state);
2511 }
2512
2513 header.lto_header.major_version = LTO_major_version;
2514 header.lto_header.minor_version = LTO_minor_version;
2515
2516 /* Currently not used. This field would allow us to preallocate
2517 the globals vector, so that it need not be resized as it is extended. */
2518 header.num_nodes = -1;
2519
2520 /* Compute the total size of all decl out states. */
2521 decl_state_size = sizeof (int32_t);
2522 decl_state_size += lto_out_decl_state_written_size (out_state);
2523 for (idx = 0; idx < num_fns; idx++)
2524 {
2525 fn_out_state =
2526 lto_function_decl_states[idx];
2527 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2528 }
2529 header.decl_state_size = decl_state_size;
2530
2531 header.main_size = ob->main_stream->total_size;
2532 header.string_size = ob->string_stream->total_size;
2533
2534 header_stream = XCNEW (struct lto_output_stream);
2535 lto_output_data_stream (header_stream, &header, sizeof header);
2536 lto_write_stream (header_stream);
2537 free (header_stream);
2538
2539 /* Write the main out-decl state, followed by out-decl states of
2540 functions. */
2541 decl_state_stream = XCNEW (struct lto_output_stream);
2542 num_decl_states = num_fns + 1;
2543 lto_output_data_stream (decl_state_stream, &num_decl_states,
2544 sizeof (num_decl_states));
2545 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2546 for (idx = 0; idx < num_fns; idx++)
2547 {
2548 fn_out_state =
2549 lto_function_decl_states[idx];
2550 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2551 }
2552 lto_write_stream (decl_state_stream);
2553 free (decl_state_stream);
2554
2555 lto_write_stream (ob->main_stream);
2556 lto_write_stream (ob->string_stream);
2557
2558 lto_end_section ();
2559
2560 /* Write the symbol table. It is used by linker to determine dependencies
2561 and thus we can skip it for WPA. */
2562 if (!flag_wpa)
2563 produce_symtab (ob);
2564
2565 /* Write command line opts. */
2566 lto_write_options ();
2567
2568 /* Deallocate memory and clean up. */
2569 for (idx = 0; idx < num_fns; idx++)
2570 {
2571 fn_out_state =
2572 lto_function_decl_states[idx];
2573 lto_delete_out_decl_state (fn_out_state);
2574 }
2575 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2576 lto_function_decl_states.release ();
2577 destroy_output_block (ob);
2578 }