Merge in wide-int.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 static void lto_write_tree (struct output_block*, tree, bool);
58
59 /* Clear the line info stored in DATA_IN. */
60
61 static void
62 clear_line_info (struct output_block *ob)
63 {
64 ob->current_file = NULL;
65 ob->current_line = 0;
66 ob->current_col = 0;
67 }
68
69
70 /* Create the output block and return it. SECTION_TYPE is
71 LTO_section_function_body or LTO_static_initializer. */
72
73 struct output_block *
74 create_output_block (enum lto_section_type section_type)
75 {
76 struct output_block *ob = XCNEW (struct output_block);
77
78 ob->section_type = section_type;
79 ob->decl_state = lto_get_out_decl_state ();
80 ob->main_stream = XCNEW (struct lto_output_stream);
81 ob->string_stream = XCNEW (struct lto_output_stream);
82 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
83
84 if (section_type == LTO_section_function_body)
85 ob->cfg_stream = XCNEW (struct lto_output_stream);
86
87 clear_line_info (ob);
88
89 ob->string_hash_table.create (37);
90 gcc_obstack_init (&ob->obstack);
91
92 return ob;
93 }
94
95
96 /* Destroy the output block OB. */
97
98 void
99 destroy_output_block (struct output_block *ob)
100 {
101 enum lto_section_type section_type = ob->section_type;
102
103 ob->string_hash_table.dispose ();
104
105 free (ob->main_stream);
106 free (ob->string_stream);
107 if (section_type == LTO_section_function_body)
108 free (ob->cfg_stream);
109
110 streamer_tree_cache_delete (ob->writer_cache);
111 obstack_free (&ob->obstack, NULL);
112
113 free (ob);
114 }
115
116
117 /* Look up NODE in the type table and write the index for it to OB. */
118
119 static void
120 output_type_ref (struct output_block *ob, tree node)
121 {
122 streamer_write_record_start (ob, LTO_type_ref);
123 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
124 }
125
126
127 /* Return true if tree node T is written to various tables. For these
128 nodes, we sometimes want to write their phyiscal representation
129 (via lto_output_tree), and sometimes we need to emit an index
130 reference into a table (via lto_output_tree_ref). */
131
132 static bool
133 tree_is_indexable (tree t)
134 {
135 /* Parameters and return values of functions of variably modified types
136 must go to global stream, because they may be used in the type
137 definition. */
138 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
139 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
140 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
141 || TREE_CODE (t) == TYPE_DECL
142 || TREE_CODE (t) == CONST_DECL
143 || TREE_CODE (t) == NAMELIST_DECL)
144 && decl_function_context (t))
145 return false;
146 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
147 return false;
148 /* Variably modified types need to be streamed alongside function
149 bodies because they can refer to local entities. Together with
150 them we have to localize their members as well.
151 ??? In theory that includes non-FIELD_DECLs as well. */
152 else if (TYPE_P (t)
153 && variably_modified_type_p (t, NULL_TREE))
154 return false;
155 else if (TREE_CODE (t) == FIELD_DECL
156 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
157 return false;
158 else
159 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
160 }
161
162
163 /* Output info about new location into bitpack BP.
164 After outputting bitpack, lto_output_location_data has
165 to be done to output actual data. */
166
167 void
168 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
169 location_t loc)
170 {
171 expanded_location xloc;
172
173 loc = LOCATION_LOCUS (loc);
174 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
175 if (loc == UNKNOWN_LOCATION)
176 return;
177
178 xloc = expand_location (loc);
179
180 bp_pack_value (bp, ob->current_file != xloc.file, 1);
181 bp_pack_value (bp, ob->current_line != xloc.line, 1);
182 bp_pack_value (bp, ob->current_col != xloc.column, 1);
183
184 if (ob->current_file != xloc.file)
185 bp_pack_var_len_unsigned (bp,
186 streamer_string_index (ob, xloc.file,
187 strlen (xloc.file) + 1,
188 true));
189 ob->current_file = xloc.file;
190
191 if (ob->current_line != xloc.line)
192 bp_pack_var_len_unsigned (bp, xloc.line);
193 ob->current_line = xloc.line;
194
195 if (ob->current_col != xloc.column)
196 bp_pack_var_len_unsigned (bp, xloc.column);
197 ob->current_col = xloc.column;
198 }
199
200
201 /* If EXPR is an indexable tree node, output a reference to it to
202 output block OB. Otherwise, output the physical representation of
203 EXPR to OB. */
204
205 static void
206 lto_output_tree_ref (struct output_block *ob, tree expr)
207 {
208 enum tree_code code;
209
210 if (TYPE_P (expr))
211 {
212 output_type_ref (ob, expr);
213 return;
214 }
215
216 code = TREE_CODE (expr);
217 switch (code)
218 {
219 case SSA_NAME:
220 streamer_write_record_start (ob, LTO_ssa_name_ref);
221 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
222 break;
223
224 case FIELD_DECL:
225 streamer_write_record_start (ob, LTO_field_decl_ref);
226 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
227 break;
228
229 case FUNCTION_DECL:
230 streamer_write_record_start (ob, LTO_function_decl_ref);
231 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
232 break;
233
234 case VAR_DECL:
235 case DEBUG_EXPR_DECL:
236 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
287 }
288 }
289
290
291 /* Return true if EXPR is a tree node that can be written to disk. */
292
293 static inline bool
294 lto_is_streamable (tree expr)
295 {
296 enum tree_code code = TREE_CODE (expr);
297
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
313 }
314
315
316 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
317
318 static tree
319 get_symbol_initial_value (struct output_block *ob, tree expr)
320 {
321 gcc_checking_assert (DECL_P (expr)
322 && TREE_CODE (expr) != FUNCTION_DECL
323 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
324
325 /* Handle DECL_INITIAL for symbols. */
326 tree initial = DECL_INITIAL (expr);
327 if (TREE_CODE (expr) == VAR_DECL
328 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
329 && !DECL_IN_CONSTANT_POOL (expr)
330 && initial)
331 {
332 lto_symtab_encoder_t encoder;
333 varpool_node *vnode;
334
335 encoder = ob->decl_state->symtab_node_encoder;
336 vnode = varpool_get_node (expr);
337 if (!vnode
338 || !lto_symtab_encoder_encode_initializer_p (encoder,
339 vnode))
340 initial = error_mark_node;
341 }
342
343 return initial;
344 }
345
346
347 /* Write a physical representation of tree node EXPR to output block
348 OB. If REF_P is true, the leaves of EXPR are emitted as references
349 via lto_output_tree_ref. IX is the index into the streamer cache
350 where EXPR is stored. */
351
352 static void
353 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
354 {
355 /* Pack all the non-pointer fields in EXPR into a bitpack and write
356 the resulting bitpack. */
357 bitpack_d bp = bitpack_create (ob->main_stream);
358 streamer_pack_tree_bitfields (ob, &bp, expr);
359 streamer_write_bitpack (&bp);
360
361 /* Write all the pointer fields in EXPR. */
362 streamer_write_tree_body (ob, expr, ref_p);
363
364 /* Write any LTO-specific data to OB. */
365 if (DECL_P (expr)
366 && TREE_CODE (expr) != FUNCTION_DECL
367 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
368 {
369 /* Handle DECL_INITIAL for symbols. */
370 tree initial = get_symbol_initial_value (ob, expr);
371 stream_write_tree (ob, initial, ref_p);
372 }
373 }
374
375 /* Write a physical representation of tree node EXPR to output block
376 OB. If REF_P is true, the leaves of EXPR are emitted as references
377 via lto_output_tree_ref. IX is the index into the streamer cache
378 where EXPR is stored. */
379
380 static void
381 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
382 {
383 if (!lto_is_streamable (expr))
384 internal_error ("tree code %qs is not supported in LTO streams",
385 get_tree_code_name (TREE_CODE (expr)));
386
387 /* Write the header, containing everything needed to materialize
388 EXPR on the reading side. */
389 streamer_write_tree_header (ob, expr);
390
391 lto_write_tree_1 (ob, expr, ref_p);
392
393 /* Mark the end of EXPR. */
394 streamer_write_zero (ob);
395 }
396
397 /* Emit the physical representation of tree node EXPR to output block
398 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
399 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
400
401 static void
402 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
403 bool ref_p, bool this_ref_p)
404 {
405 unsigned ix;
406
407 gcc_checking_assert (expr != NULL_TREE
408 && !(this_ref_p && tree_is_indexable (expr)));
409
410 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
411 expr, hash, &ix);
412 gcc_assert (!exists_p);
413 if (streamer_handle_as_builtin_p (expr))
414 {
415 /* MD and NORMAL builtins do not need to be written out
416 completely as they are always instantiated by the
417 compiler on startup. The only builtins that need to
418 be written out are BUILT_IN_FRONTEND. For all other
419 builtins, we simply write the class and code. */
420 streamer_write_builtin (ob, expr);
421 }
422 else if (TREE_CODE (expr) == INTEGER_CST
423 && !TREE_OVERFLOW (expr))
424 {
425 /* Shared INTEGER_CST nodes are special because they need their
426 original type to be materialized by the reader (to implement
427 TYPE_CACHED_VALUES). */
428 streamer_write_integer_cst (ob, expr, ref_p);
429 }
430 else
431 {
432 /* This is the first time we see EXPR, write its fields
433 to OB. */
434 lto_write_tree (ob, expr, ref_p);
435 }
436 }
437
438 struct sccs
439 {
440 unsigned int dfsnum;
441 unsigned int low;
442 };
443
444 struct scc_entry
445 {
446 tree t;
447 hashval_t hash;
448 };
449
450 static unsigned int next_dfs_num;
451 static vec<scc_entry> sccstack;
452 static struct pointer_map_t *sccstate;
453 static struct obstack sccstate_obstack;
454
455 static void
456 DFS_write_tree (struct output_block *ob, sccs *from_state,
457 tree expr, bool ref_p, bool this_ref_p);
458
459 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
460 DFS recurse for all tree edges originating from it. */
461
462 static void
463 DFS_write_tree_body (struct output_block *ob,
464 tree expr, sccs *expr_state, bool ref_p)
465 {
466 #define DFS_follow_tree_edge(DEST) \
467 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
468
469 enum tree_code code;
470
471 code = TREE_CODE (expr);
472
473 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
474 {
475 if (TREE_CODE (expr) != IDENTIFIER_NODE)
476 DFS_follow_tree_edge (TREE_TYPE (expr));
477 }
478
479 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
480 {
481 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
482 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
483 }
484
485 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
486 {
487 DFS_follow_tree_edge (TREE_REALPART (expr));
488 DFS_follow_tree_edge (TREE_IMAGPART (expr));
489 }
490
491 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
492 {
493 /* Drop names that were created for anonymous entities. */
494 if (DECL_NAME (expr)
495 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
496 && ANON_AGGRNAME_P (DECL_NAME (expr)))
497 ;
498 else
499 DFS_follow_tree_edge (DECL_NAME (expr));
500 DFS_follow_tree_edge (DECL_CONTEXT (expr));
501 }
502
503 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
504 {
505 DFS_follow_tree_edge (DECL_SIZE (expr));
506 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
507
508 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
509 special handling in LTO, it must be handled by streamer hooks. */
510
511 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
512
513 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
514 for early inlining so drop it on the floor instead of ICEing in
515 dwarf2out.c. */
516
517 if ((TREE_CODE (expr) == VAR_DECL
518 || TREE_CODE (expr) == PARM_DECL)
519 && DECL_HAS_VALUE_EXPR_P (expr))
520 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
521 if (TREE_CODE (expr) == VAR_DECL)
522 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
526 {
527 if (TREE_CODE (expr) == TYPE_DECL)
528 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
529 DFS_follow_tree_edge (DECL_VINDEX (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
533 {
534 /* Make sure we don't inadvertently set the assembler name. */
535 if (DECL_ASSEMBLER_NAME_SET_P (expr))
536 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
537 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
538 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
539 }
540
541 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
542 {
543 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
544 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
545 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
546 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
547 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
548 }
549
550 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
551 {
552 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
553 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
554 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
555 }
556
557 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
558 {
559 DFS_follow_tree_edge (TYPE_SIZE (expr));
560 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
561 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
562 DFS_follow_tree_edge (TYPE_NAME (expr));
563 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
564 reconstructed during fixup. */
565 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
566 during fixup. */
567 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
568 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
569 /* TYPE_CANONICAL is re-computed during type merging, so no need
570 to follow it here. */
571 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
572 }
573
574 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
575 {
576 if (TREE_CODE (expr) == ENUMERAL_TYPE)
577 DFS_follow_tree_edge (TYPE_VALUES (expr));
578 else if (TREE_CODE (expr) == ARRAY_TYPE)
579 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
580 else if (RECORD_OR_UNION_TYPE_P (expr))
581 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
582 DFS_follow_tree_edge (t);
583 else if (TREE_CODE (expr) == FUNCTION_TYPE
584 || TREE_CODE (expr) == METHOD_TYPE)
585 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
586
587 if (!POINTER_TYPE_P (expr))
588 DFS_follow_tree_edge (TYPE_MINVAL (expr));
589 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
590 if (RECORD_OR_UNION_TYPE_P (expr))
591 DFS_follow_tree_edge (TYPE_BINFO (expr));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
595 {
596 DFS_follow_tree_edge (TREE_PURPOSE (expr));
597 DFS_follow_tree_edge (TREE_VALUE (expr));
598 DFS_follow_tree_edge (TREE_CHAIN (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
602 {
603 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
604 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
605 }
606
607 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
608 {
609 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
610 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
611 DFS_follow_tree_edge (TREE_BLOCK (expr));
612 }
613
614 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
615 {
616 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
617 /* ??? FIXME. See also streamer_write_chain. */
618 if (!(VAR_OR_FUNCTION_DECL_P (t)
619 && DECL_EXTERNAL (t)))
620 DFS_follow_tree_edge (t);
621
622 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
623
624 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
625 handle - those that represent inlined function scopes.
626 For the drop rest them on the floor instead of ICEing
627 in dwarf2out.c. */
628 if (inlined_function_outer_scope_p (expr))
629 {
630 tree ultimate_origin = block_ultimate_origin (expr);
631 DFS_follow_tree_edge (ultimate_origin);
632 }
633 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
634 information for early inlined BLOCKs so drop it on the floor instead
635 of ICEing in dwarf2out.c. */
636
637 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
638 streaming time. */
639
640 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
641 list is re-constructed from BLOCK_SUPERCONTEXT. */
642 }
643
644 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
645 {
646 unsigned i;
647 tree t;
648
649 /* Note that the number of BINFO slots has already been emitted in
650 EXPR's header (see streamer_write_tree_header) because this length
651 is needed to build the empty BINFO node on the reader side. */
652 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
653 DFS_follow_tree_edge (t);
654 DFS_follow_tree_edge (BINFO_OFFSET (expr));
655 DFS_follow_tree_edge (BINFO_VTABLE (expr));
656 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
657
658 /* The number of BINFO_BASE_ACCESSES has already been emitted in
659 EXPR's bitfield section. */
660 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
661 DFS_follow_tree_edge (t);
662
663 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
664 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
665 }
666
667 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
668 {
669 unsigned i;
670 tree index, value;
671
672 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
673 {
674 DFS_follow_tree_edge (index);
675 DFS_follow_tree_edge (value);
676 }
677 }
678
679 if (code == OMP_CLAUSE)
680 {
681 int i;
682 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
683 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
684 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
685 }
686
687 #undef DFS_follow_tree_edge
688 }
689
690 /* Return a hash value for the tree T. */
691
692 static hashval_t
693 hash_tree (struct streamer_tree_cache_d *cache, tree t)
694 {
695 #define visit(SIBLING) \
696 do { \
697 unsigned ix; \
698 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
699 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
700 } while (0)
701
702 /* Hash TS_BASE. */
703 enum tree_code code = TREE_CODE (t);
704 hashval_t v = iterative_hash_host_wide_int (code, 0);
705 if (!TYPE_P (t))
706 {
707 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
708 | (TREE_CONSTANT (t) << 1)
709 | (TREE_READONLY (t) << 2)
710 | (TREE_PUBLIC (t) << 3), v);
711 }
712 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
713 | (TREE_THIS_VOLATILE (t) << 1), v);
714 if (DECL_P (t))
715 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
716 else if (TYPE_P (t))
717 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
718 if (TYPE_P (t))
719 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
720 else
721 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
722 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
723 | (TREE_STATIC (t) << 1)
724 | (TREE_PROTECTED (t) << 2)
725 | (TREE_DEPRECATED (t) << 3), v);
726 if (code != TREE_BINFO)
727 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
728 if (TYPE_P (t))
729 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
730 | (TYPE_ADDR_SPACE (t) << 1), v);
731 else if (code == SSA_NAME)
732 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
733
734 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
735 {
736 int i;
737 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
738 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
739 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
740 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
741 }
742
743 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
744 {
745 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
746 v = iterative_hash_host_wide_int (r.cl, v);
747 v = iterative_hash_host_wide_int (r.decimal
748 | (r.sign << 1)
749 | (r.signalling << 2)
750 | (r.canonical << 3), v);
751 v = iterative_hash_host_wide_int (r.uexp, v);
752 for (unsigned i = 0; i < SIGSZ; ++i)
753 v = iterative_hash_host_wide_int (r.sig[i], v);
754 }
755
756 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
757 {
758 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
759 v = iterative_hash_host_wide_int (f.mode, v);
760 v = iterative_hash_host_wide_int (f.data.low, v);
761 v = iterative_hash_host_wide_int (f.data.high, v);
762 }
763
764 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
765 {
766 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
767 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
768 | (DECL_VIRTUAL_P (t) << 1)
769 | (DECL_IGNORED_P (t) << 2)
770 | (DECL_ABSTRACT (t) << 3)
771 | (DECL_ARTIFICIAL (t) << 4)
772 | (DECL_USER_ALIGN (t) << 5)
773 | (DECL_PRESERVE_P (t) << 6)
774 | (DECL_EXTERNAL (t) << 7)
775 | (DECL_GIMPLE_REG_P (t) << 8), v);
776 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
777 if (code == LABEL_DECL)
778 {
779 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
780 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
781 }
782 else if (code == FIELD_DECL)
783 {
784 v = iterative_hash_host_wide_int (DECL_PACKED (t)
785 | (DECL_NONADDRESSABLE_P (t) << 1),
786 v);
787 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
788 }
789 else if (code == VAR_DECL)
790 {
791 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
792 | (DECL_NONLOCAL_FRAME (t) << 1),
793 v);
794 }
795 if (code == RESULT_DECL
796 || code == PARM_DECL
797 || code == VAR_DECL)
798 {
799 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
800 if (code == VAR_DECL
801 || code == PARM_DECL)
802 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
803 }
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
807 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
808
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
810 {
811 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
812 | (DECL_DLLIMPORT_P (t) << 1)
813 | (DECL_WEAK (t) << 2)
814 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
815 | (DECL_COMDAT (t) << 4)
816 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
817 v);
818 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
819 if (code == VAR_DECL)
820 {
821 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
822 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
823 | (DECL_IN_CONSTANT_POOL (t) << 1),
824 v);
825 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
826 }
827 if (TREE_CODE (t) == FUNCTION_DECL)
828 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
829 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
830 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
831 v);
832 if (VAR_OR_FUNCTION_DECL_P (t))
833 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
834 }
835
836 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
837 {
838 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
839 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
840 | (DECL_STATIC_DESTRUCTOR (t) << 1)
841 | (DECL_UNINLINABLE (t) << 2)
842 | (DECL_POSSIBLY_INLINED (t) << 3)
843 | (DECL_IS_NOVOPS (t) << 4)
844 | (DECL_IS_RETURNS_TWICE (t) << 5)
845 | (DECL_IS_MALLOC (t) << 6)
846 | (DECL_IS_OPERATOR_NEW (t) << 7)
847 | (DECL_DECLARED_INLINE_P (t) << 8)
848 | (DECL_STATIC_CHAIN (t) << 9)
849 | (DECL_NO_INLINE_WARNING_P (t) << 10)
850 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
851 | (DECL_NO_LIMIT_STACK (t) << 12)
852 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
853 | (DECL_PURE_P (t) << 14)
854 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
855 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
856 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
857 if (DECL_STATIC_DESTRUCTOR (t))
858 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
859 }
860
861 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
862 {
863 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
864 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
865 | (TYPE_NO_FORCE_BLK (t) << 1)
866 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
867 | (TYPE_PACKED (t) << 3)
868 | (TYPE_RESTRICT (t) << 4)
869 | (TYPE_USER_ALIGN (t) << 5)
870 | (TYPE_READONLY (t) << 6), v);
871 if (RECORD_OR_UNION_TYPE_P (t))
872 {
873 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
874 | (TYPE_FINAL_P (t) << 1), v);
875 }
876 else if (code == ARRAY_TYPE)
877 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
878 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
879 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
880 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
881 || (!in_lto_p
882 && get_alias_set (t) == 0))
883 ? 0 : -1, v);
884 }
885
886 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
887 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
888 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
889
890 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
891 gcc_unreachable ();
892
893 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
894 v = iterative_hash (t, sizeof (struct cl_optimization), v);
895
896 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
897 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
898
899 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
900 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
901
902 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
903 {
904 if (POINTER_TYPE_P (t))
905 {
906 /* For pointers factor in the pointed-to type recursively as
907 we cannot recurse through only pointers.
908 ??? We can generalize this by keeping track of the
909 in-SCC edges for each tree (or arbitrarily the first
910 such edge) and hashing that in in a second stage
911 (instead of the quadratic mixing of the SCC we do now). */
912 hashval_t x;
913 unsigned ix;
914 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
915 x = streamer_tree_cache_get_hash (cache, ix);
916 else
917 x = hash_tree (cache, TREE_TYPE (t));
918 v = iterative_hash_hashval_t (x, v);
919 }
920 else if (code != IDENTIFIER_NODE)
921 visit (TREE_TYPE (t));
922 }
923
924 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
925 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
926 visit (VECTOR_CST_ELT (t, i));
927
928 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
929 {
930 visit (TREE_REALPART (t));
931 visit (TREE_IMAGPART (t));
932 }
933
934 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
935 {
936 /* Drop names that were created for anonymous entities. */
937 if (DECL_NAME (t)
938 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
939 && ANON_AGGRNAME_P (DECL_NAME (t)))
940 ;
941 else
942 visit (DECL_NAME (t));
943 if (DECL_FILE_SCOPE_P (t))
944 ;
945 else
946 visit (DECL_CONTEXT (t));
947 }
948
949 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
950 {
951 visit (DECL_SIZE (t));
952 visit (DECL_SIZE_UNIT (t));
953 visit (DECL_ATTRIBUTES (t));
954 if ((code == VAR_DECL
955 || code == PARM_DECL)
956 && DECL_HAS_VALUE_EXPR_P (t))
957 visit (DECL_VALUE_EXPR (t));
958 if (code == VAR_DECL
959 && DECL_HAS_DEBUG_EXPR_P (t))
960 visit (DECL_DEBUG_EXPR (t));
961 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
962 be able to call get_symbol_initial_value. */
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
966 {
967 if (code == TYPE_DECL)
968 visit (DECL_ORIGINAL_TYPE (t));
969 visit (DECL_VINDEX (t));
970 }
971
972 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
973 {
974 if (DECL_ASSEMBLER_NAME_SET_P (t))
975 visit (DECL_ASSEMBLER_NAME (t));
976 visit (DECL_SECTION_NAME (t));
977 visit (DECL_COMDAT_GROUP (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
981 {
982 visit (DECL_FIELD_OFFSET (t));
983 visit (DECL_BIT_FIELD_TYPE (t));
984 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
985 visit (DECL_FIELD_BIT_OFFSET (t));
986 visit (DECL_FCONTEXT (t));
987 }
988
989 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
990 {
991 visit (DECL_FUNCTION_PERSONALITY (t));
992 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
993 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
994 }
995
996 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
997 {
998 visit (TYPE_SIZE (t));
999 visit (TYPE_SIZE_UNIT (t));
1000 visit (TYPE_ATTRIBUTES (t));
1001 visit (TYPE_NAME (t));
1002 visit (TYPE_MAIN_VARIANT (t));
1003 if (TYPE_FILE_SCOPE_P (t))
1004 ;
1005 else
1006 visit (TYPE_CONTEXT (t));
1007 visit (TYPE_STUB_DECL (t));
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1011 {
1012 if (code == ENUMERAL_TYPE)
1013 visit (TYPE_VALUES (t));
1014 else if (code == ARRAY_TYPE)
1015 visit (TYPE_DOMAIN (t));
1016 else if (RECORD_OR_UNION_TYPE_P (t))
1017 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1018 visit (f);
1019 else if (code == FUNCTION_TYPE
1020 || code == METHOD_TYPE)
1021 visit (TYPE_ARG_TYPES (t));
1022 if (!POINTER_TYPE_P (t))
1023 visit (TYPE_MINVAL (t));
1024 visit (TYPE_MAXVAL (t));
1025 if (RECORD_OR_UNION_TYPE_P (t))
1026 visit (TYPE_BINFO (t));
1027 }
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1030 {
1031 visit (TREE_PURPOSE (t));
1032 visit (TREE_VALUE (t));
1033 visit (TREE_CHAIN (t));
1034 }
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1037 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1038 visit (TREE_VEC_ELT (t, i));
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1041 {
1042 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1043 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1044 visit (TREE_OPERAND (t, i));
1045 }
1046
1047 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1048 {
1049 unsigned i;
1050 tree b;
1051 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1052 visit (b);
1053 visit (BINFO_OFFSET (t));
1054 visit (BINFO_VTABLE (t));
1055 visit (BINFO_VPTR_FIELD (t));
1056 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1057 visit (b);
1058 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1059 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1060 }
1061
1062 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1063 {
1064 unsigned i;
1065 tree index, value;
1066 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1067 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1068 {
1069 visit (index);
1070 visit (value);
1071 }
1072 }
1073
1074 if (code == OMP_CLAUSE)
1075 {
1076 int i;
1077
1078 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1079 switch (OMP_CLAUSE_CODE (t))
1080 {
1081 case OMP_CLAUSE_DEFAULT:
1082 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1083 break;
1084 case OMP_CLAUSE_SCHEDULE:
1085 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1086 break;
1087 case OMP_CLAUSE_DEPEND:
1088 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1089 break;
1090 case OMP_CLAUSE_MAP:
1091 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1092 break;
1093 case OMP_CLAUSE_PROC_BIND:
1094 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1095 break;
1096 case OMP_CLAUSE_REDUCTION:
1097 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1098 break;
1099 default:
1100 break;
1101 }
1102 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1103 visit (OMP_CLAUSE_OPERAND (t, i));
1104 visit (OMP_CLAUSE_CHAIN (t));
1105 }
1106
1107 return v;
1108
1109 #undef visit
1110 }
1111
1112 /* Compare two SCC entries by their hash value for qsorting them. */
1113
1114 static int
1115 scc_entry_compare (const void *p1_, const void *p2_)
1116 {
1117 const scc_entry *p1 = (const scc_entry *) p1_;
1118 const scc_entry *p2 = (const scc_entry *) p2_;
1119 if (p1->hash < p2->hash)
1120 return -1;
1121 else if (p1->hash > p2->hash)
1122 return 1;
1123 return 0;
1124 }
1125
1126 /* Return a hash value for the SCC on the SCC stack from FIRST with
1127 size SIZE. */
1128
1129 static hashval_t
1130 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1131 {
1132 /* Compute hash values for the SCC members. */
1133 for (unsigned i = 0; i < size; ++i)
1134 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1135
1136 if (size == 1)
1137 return sccstack[first].hash;
1138
1139 /* Sort the SCC of type, hash pairs so that when we mix in
1140 all members of the SCC the hash value becomes independent on
1141 the order we visited the SCC. Disregard hashes equal to
1142 the hash of the tree we mix into because we cannot guarantee
1143 a stable sort for those across different TUs. */
1144 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1145 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1146 for (unsigned i = 0; i < size; ++i)
1147 {
1148 hashval_t hash = sccstack[first+i].hash;
1149 hashval_t orig_hash = hash;
1150 unsigned j;
1151 /* Skip same hashes. */
1152 for (j = i + 1;
1153 j < size && sccstack[first+j].hash == orig_hash; ++j)
1154 ;
1155 for (; j < size; ++j)
1156 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1157 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1158 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1159 tem[i] = hash;
1160 }
1161 hashval_t scc_hash = 0;
1162 for (unsigned i = 0; i < size; ++i)
1163 {
1164 sccstack[first+i].hash = tem[i];
1165 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1166 }
1167 return scc_hash;
1168 }
1169
1170 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1171 already in the streamer cache. Main routine called for
1172 each visit of EXPR. */
1173
1174 static void
1175 DFS_write_tree (struct output_block *ob, sccs *from_state,
1176 tree expr, bool ref_p, bool this_ref_p)
1177 {
1178 unsigned ix;
1179 sccs **slot;
1180
1181 /* Handle special cases. */
1182 if (expr == NULL_TREE)
1183 return;
1184
1185 /* Do not DFS walk into indexable trees. */
1186 if (this_ref_p && tree_is_indexable (expr))
1187 return;
1188
1189 /* Check if we already streamed EXPR. */
1190 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1191 return;
1192
1193 slot = (sccs **)pointer_map_insert (sccstate, expr);
1194 sccs *cstate = *slot;
1195 if (!cstate)
1196 {
1197 scc_entry e = { expr, 0 };
1198 /* Not yet visited. DFS recurse and push it onto the stack. */
1199 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1200 sccstack.safe_push (e);
1201 cstate->dfsnum = next_dfs_num++;
1202 cstate->low = cstate->dfsnum;
1203
1204 if (streamer_handle_as_builtin_p (expr))
1205 ;
1206 else if (TREE_CODE (expr) == INTEGER_CST
1207 && !TREE_OVERFLOW (expr))
1208 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1209 else
1210 {
1211 DFS_write_tree_body (ob, expr, cstate, ref_p);
1212
1213 /* Walk any LTO-specific edges. */
1214 if (DECL_P (expr)
1215 && TREE_CODE (expr) != FUNCTION_DECL
1216 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1217 {
1218 /* Handle DECL_INITIAL for symbols. */
1219 tree initial = get_symbol_initial_value (ob, expr);
1220 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1221 }
1222 }
1223
1224 /* See if we found an SCC. */
1225 if (cstate->low == cstate->dfsnum)
1226 {
1227 unsigned first, size;
1228 tree x;
1229
1230 /* Pop the SCC and compute its size. */
1231 first = sccstack.length ();
1232 do
1233 {
1234 x = sccstack[--first].t;
1235 }
1236 while (x != expr);
1237 size = sccstack.length () - first;
1238
1239 /* No need to compute hashes for LTRANS units, we don't perform
1240 any merging there. */
1241 hashval_t scc_hash = 0;
1242 unsigned scc_entry_len = 0;
1243 if (!flag_wpa)
1244 {
1245 scc_hash = hash_scc (ob->writer_cache, first, size);
1246
1247 /* Put the entries with the least number of collisions first. */
1248 unsigned entry_start = 0;
1249 scc_entry_len = size + 1;
1250 for (unsigned i = 0; i < size;)
1251 {
1252 unsigned from = i;
1253 for (i = i + 1; i < size
1254 && (sccstack[first + i].hash
1255 == sccstack[first + from].hash); ++i)
1256 ;
1257 if (i - from < scc_entry_len)
1258 {
1259 scc_entry_len = i - from;
1260 entry_start = from;
1261 }
1262 }
1263 for (unsigned i = 0; i < scc_entry_len; ++i)
1264 {
1265 scc_entry tem = sccstack[first + i];
1266 sccstack[first + i] = sccstack[first + entry_start + i];
1267 sccstack[first + entry_start + i] = tem;
1268 }
1269 }
1270
1271 /* Write LTO_tree_scc. */
1272 streamer_write_record_start (ob, LTO_tree_scc);
1273 streamer_write_uhwi (ob, size);
1274 streamer_write_uhwi (ob, scc_hash);
1275
1276 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1277 All INTEGER_CSTs need to be handled this way as we need
1278 their type to materialize them. Also builtins are handled
1279 this way.
1280 ??? We still wrap these in LTO_tree_scc so at the
1281 input side we can properly identify the tree we want
1282 to ultimatively return. */
1283 if (size == 1)
1284 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1285 else
1286 {
1287 /* Write the size of the SCC entry candidates. */
1288 streamer_write_uhwi (ob, scc_entry_len);
1289
1290 /* Write all headers and populate the streamer cache. */
1291 for (unsigned i = 0; i < size; ++i)
1292 {
1293 hashval_t hash = sccstack[first+i].hash;
1294 tree t = sccstack[first+i].t;
1295 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1296 t, hash, &ix);
1297 gcc_assert (!exists_p);
1298
1299 if (!lto_is_streamable (t))
1300 internal_error ("tree code %qs is not supported "
1301 "in LTO streams",
1302 get_tree_code_name (TREE_CODE (t)));
1303
1304 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1305
1306 /* Write the header, containing everything needed to
1307 materialize EXPR on the reading side. */
1308 streamer_write_tree_header (ob, t);
1309 }
1310
1311 /* Write the bitpacks and tree references. */
1312 for (unsigned i = 0; i < size; ++i)
1313 {
1314 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1315
1316 /* Mark the end of the tree. */
1317 streamer_write_zero (ob);
1318 }
1319 }
1320
1321 /* Finally truncate the vector. */
1322 sccstack.truncate (first);
1323
1324 if (from_state)
1325 from_state->low = MIN (from_state->low, cstate->low);
1326 return;
1327 }
1328
1329 if (from_state)
1330 from_state->low = MIN (from_state->low, cstate->low);
1331 }
1332 gcc_checking_assert (from_state);
1333 if (cstate->dfsnum < from_state->dfsnum)
1334 from_state->low = MIN (cstate->dfsnum, from_state->low);
1335 }
1336
1337
1338 /* Emit the physical representation of tree node EXPR to output block
1339 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1340 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1341
1342 void
1343 lto_output_tree (struct output_block *ob, tree expr,
1344 bool ref_p, bool this_ref_p)
1345 {
1346 unsigned ix;
1347 bool existed_p;
1348
1349 if (expr == NULL_TREE)
1350 {
1351 streamer_write_record_start (ob, LTO_null);
1352 return;
1353 }
1354
1355 if (this_ref_p && tree_is_indexable (expr))
1356 {
1357 lto_output_tree_ref (ob, expr);
1358 return;
1359 }
1360
1361 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1362 if (existed_p)
1363 {
1364 /* If a node has already been streamed out, make sure that
1365 we don't write it more than once. Otherwise, the reader
1366 will instantiate two different nodes for the same object. */
1367 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1368 streamer_write_uhwi (ob, ix);
1369 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1370 lto_tree_code_to_tag (TREE_CODE (expr)));
1371 lto_stats.num_pickle_refs_output++;
1372 }
1373 else
1374 {
1375 /* This is the first time we see EXPR, write all reachable
1376 trees to OB. */
1377 static bool in_dfs_walk;
1378
1379 /* Protect against recursion which means disconnect between
1380 what tree edges we walk in the DFS walk and what edges
1381 we stream out. */
1382 gcc_assert (!in_dfs_walk);
1383
1384 /* Start the DFS walk. */
1385 /* Save ob state ... */
1386 /* let's see ... */
1387 in_dfs_walk = true;
1388 sccstate = pointer_map_create ();
1389 gcc_obstack_init (&sccstate_obstack);
1390 next_dfs_num = 1;
1391 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1392 sccstack.release ();
1393 pointer_map_destroy (sccstate);
1394 obstack_free (&sccstate_obstack, NULL);
1395 in_dfs_walk = false;
1396
1397 /* Finally append a reference to the tree we were writing.
1398 ??? If expr ended up as a singleton we could have
1399 inlined it here and avoid outputting a reference. */
1400 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1401 gcc_assert (existed_p);
1402 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1403 streamer_write_uhwi (ob, ix);
1404 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1405 lto_tree_code_to_tag (TREE_CODE (expr)));
1406 lto_stats.num_pickle_refs_output++;
1407 }
1408 }
1409
1410
1411 /* Output to OB a list of try/catch handlers starting with FIRST. */
1412
1413 static void
1414 output_eh_try_list (struct output_block *ob, eh_catch first)
1415 {
1416 eh_catch n;
1417
1418 for (n = first; n; n = n->next_catch)
1419 {
1420 streamer_write_record_start (ob, LTO_eh_catch);
1421 stream_write_tree (ob, n->type_list, true);
1422 stream_write_tree (ob, n->filter_list, true);
1423 stream_write_tree (ob, n->label, true);
1424 }
1425
1426 streamer_write_record_start (ob, LTO_null);
1427 }
1428
1429
1430 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1431 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1432 detect EH region sharing. */
1433
1434 static void
1435 output_eh_region (struct output_block *ob, eh_region r)
1436 {
1437 enum LTO_tags tag;
1438
1439 if (r == NULL)
1440 {
1441 streamer_write_record_start (ob, LTO_null);
1442 return;
1443 }
1444
1445 if (r->type == ERT_CLEANUP)
1446 tag = LTO_ert_cleanup;
1447 else if (r->type == ERT_TRY)
1448 tag = LTO_ert_try;
1449 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1450 tag = LTO_ert_allowed_exceptions;
1451 else if (r->type == ERT_MUST_NOT_THROW)
1452 tag = LTO_ert_must_not_throw;
1453 else
1454 gcc_unreachable ();
1455
1456 streamer_write_record_start (ob, tag);
1457 streamer_write_hwi (ob, r->index);
1458
1459 if (r->outer)
1460 streamer_write_hwi (ob, r->outer->index);
1461 else
1462 streamer_write_zero (ob);
1463
1464 if (r->inner)
1465 streamer_write_hwi (ob, r->inner->index);
1466 else
1467 streamer_write_zero (ob);
1468
1469 if (r->next_peer)
1470 streamer_write_hwi (ob, r->next_peer->index);
1471 else
1472 streamer_write_zero (ob);
1473
1474 if (r->type == ERT_TRY)
1475 {
1476 output_eh_try_list (ob, r->u.eh_try.first_catch);
1477 }
1478 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1479 {
1480 stream_write_tree (ob, r->u.allowed.type_list, true);
1481 stream_write_tree (ob, r->u.allowed.label, true);
1482 streamer_write_uhwi (ob, r->u.allowed.filter);
1483 }
1484 else if (r->type == ERT_MUST_NOT_THROW)
1485 {
1486 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1487 bitpack_d bp = bitpack_create (ob->main_stream);
1488 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1489 streamer_write_bitpack (&bp);
1490 }
1491
1492 if (r->landing_pads)
1493 streamer_write_hwi (ob, r->landing_pads->index);
1494 else
1495 streamer_write_zero (ob);
1496 }
1497
1498
1499 /* Output landing pad LP to OB. */
1500
1501 static void
1502 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1503 {
1504 if (lp == NULL)
1505 {
1506 streamer_write_record_start (ob, LTO_null);
1507 return;
1508 }
1509
1510 streamer_write_record_start (ob, LTO_eh_landing_pad);
1511 streamer_write_hwi (ob, lp->index);
1512 if (lp->next_lp)
1513 streamer_write_hwi (ob, lp->next_lp->index);
1514 else
1515 streamer_write_zero (ob);
1516
1517 if (lp->region)
1518 streamer_write_hwi (ob, lp->region->index);
1519 else
1520 streamer_write_zero (ob);
1521
1522 stream_write_tree (ob, lp->post_landing_pad, true);
1523 }
1524
1525
1526 /* Output the existing eh_table to OB. */
1527
1528 static void
1529 output_eh_regions (struct output_block *ob, struct function *fn)
1530 {
1531 if (fn->eh && fn->eh->region_tree)
1532 {
1533 unsigned i;
1534 eh_region eh;
1535 eh_landing_pad lp;
1536 tree ttype;
1537
1538 streamer_write_record_start (ob, LTO_eh_table);
1539
1540 /* Emit the index of the root of the EH region tree. */
1541 streamer_write_hwi (ob, fn->eh->region_tree->index);
1542
1543 /* Emit all the EH regions in the region array. */
1544 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1545 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1546 output_eh_region (ob, eh);
1547
1548 /* Emit all landing pads. */
1549 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1550 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1551 output_eh_lp (ob, lp);
1552
1553 /* Emit all the runtime type data. */
1554 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1555 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1556 stream_write_tree (ob, ttype, true);
1557
1558 /* Emit the table of action chains. */
1559 if (targetm.arm_eabi_unwinder)
1560 {
1561 tree t;
1562 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1563 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1564 stream_write_tree (ob, t, true);
1565 }
1566 else
1567 {
1568 uchar c;
1569 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1570 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1571 streamer_write_char_stream (ob->main_stream, c);
1572 }
1573 }
1574
1575 /* The LTO_null either terminates the record or indicates that there
1576 are no eh_records at all. */
1577 streamer_write_record_start (ob, LTO_null);
1578 }
1579
1580
1581 /* Output all of the active ssa names to the ssa_names stream. */
1582
1583 static void
1584 output_ssa_names (struct output_block *ob, struct function *fn)
1585 {
1586 unsigned int i, len;
1587
1588 len = vec_safe_length (SSANAMES (fn));
1589 streamer_write_uhwi (ob, len);
1590
1591 for (i = 1; i < len; i++)
1592 {
1593 tree ptr = (*SSANAMES (fn))[i];
1594
1595 if (ptr == NULL_TREE
1596 || SSA_NAME_IN_FREE_LIST (ptr)
1597 || virtual_operand_p (ptr))
1598 continue;
1599
1600 streamer_write_uhwi (ob, i);
1601 streamer_write_char_stream (ob->main_stream,
1602 SSA_NAME_IS_DEFAULT_DEF (ptr));
1603 if (SSA_NAME_VAR (ptr))
1604 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1605 else
1606 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1607 stream_write_tree (ob, TREE_TYPE (ptr), true);
1608 }
1609
1610 streamer_write_zero (ob);
1611 }
1612
1613
1614 /* Output a wide-int. */
1615
1616 static void
1617 streamer_write_wi (struct output_block *ob,
1618 const widest_int &w)
1619 {
1620 int len = w.get_len ();
1621
1622 streamer_write_uhwi (ob, w.get_precision ());
1623 streamer_write_uhwi (ob, len);
1624 for (int i = 0; i < len; i++)
1625 streamer_write_hwi (ob, w.elt (i));
1626 }
1627
1628
1629 /* Output the cfg. */
1630
1631 static void
1632 output_cfg (struct output_block *ob, struct function *fn)
1633 {
1634 struct lto_output_stream *tmp_stream = ob->main_stream;
1635 basic_block bb;
1636
1637 ob->main_stream = ob->cfg_stream;
1638
1639 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1640 profile_status_for_fn (fn));
1641
1642 /* Output the number of the highest basic block. */
1643 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1644
1645 FOR_ALL_BB_FN (bb, fn)
1646 {
1647 edge_iterator ei;
1648 edge e;
1649
1650 streamer_write_hwi (ob, bb->index);
1651
1652 /* Output the successors and the edge flags. */
1653 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1654 FOR_EACH_EDGE (e, ei, bb->succs)
1655 {
1656 streamer_write_uhwi (ob, e->dest->index);
1657 streamer_write_hwi (ob, e->probability);
1658 streamer_write_gcov_count (ob, e->count);
1659 streamer_write_uhwi (ob, e->flags);
1660 }
1661 }
1662
1663 streamer_write_hwi (ob, -1);
1664
1665 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1666 while (bb->next_bb)
1667 {
1668 streamer_write_hwi (ob, bb->next_bb->index);
1669 bb = bb->next_bb;
1670 }
1671
1672 streamer_write_hwi (ob, -1);
1673
1674 /* ??? The cfgloop interface is tied to cfun. */
1675 gcc_assert (cfun == fn);
1676
1677 /* Output the number of loops. */
1678 streamer_write_uhwi (ob, number_of_loops (fn));
1679
1680 /* Output each loop, skipping the tree root which has number zero. */
1681 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1682 {
1683 struct loop *loop = get_loop (fn, i);
1684
1685 /* Write the index of the loop header. That's enough to rebuild
1686 the loop tree on the reader side. Stream -1 for an unused
1687 loop entry. */
1688 if (!loop)
1689 {
1690 streamer_write_hwi (ob, -1);
1691 continue;
1692 }
1693 else
1694 streamer_write_hwi (ob, loop->header->index);
1695
1696 /* Write everything copy_loop_info copies. */
1697 streamer_write_enum (ob->main_stream,
1698 loop_estimation, EST_LAST, loop->estimate_state);
1699 streamer_write_hwi (ob, loop->any_upper_bound);
1700 if (loop->any_upper_bound)
1701 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1702 streamer_write_hwi (ob, loop->any_estimate);
1703 if (loop->any_estimate)
1704 streamer_write_wi (ob, loop->nb_iterations_estimate);
1705
1706 /* Write OMP SIMD related info. */
1707 streamer_write_hwi (ob, loop->safelen);
1708 streamer_write_hwi (ob, loop->dont_vectorize);
1709 streamer_write_hwi (ob, loop->force_vectorize);
1710 stream_write_tree (ob, loop->simduid, true);
1711 }
1712
1713 ob->main_stream = tmp_stream;
1714 }
1715
1716
1717 /* Create the header in the file using OB. If the section type is for
1718 a function, set FN to the decl for that function. */
1719
1720 void
1721 produce_asm (struct output_block *ob, tree fn)
1722 {
1723 enum lto_section_type section_type = ob->section_type;
1724 struct lto_function_header header;
1725 char *section_name;
1726 struct lto_output_stream *header_stream;
1727
1728 if (section_type == LTO_section_function_body)
1729 {
1730 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1731 section_name = lto_get_section_name (section_type, name, NULL);
1732 }
1733 else
1734 section_name = lto_get_section_name (section_type, NULL, NULL);
1735
1736 lto_begin_section (section_name, !flag_wpa);
1737 free (section_name);
1738
1739 /* The entire header is stream computed here. */
1740 memset (&header, 0, sizeof (struct lto_function_header));
1741
1742 /* Write the header. */
1743 header.lto_header.major_version = LTO_major_version;
1744 header.lto_header.minor_version = LTO_minor_version;
1745
1746 header.compressed_size = 0;
1747
1748 if (section_type == LTO_section_function_body)
1749 header.cfg_size = ob->cfg_stream->total_size;
1750 header.main_size = ob->main_stream->total_size;
1751 header.string_size = ob->string_stream->total_size;
1752
1753 header_stream = XCNEW (struct lto_output_stream);
1754 lto_output_data_stream (header_stream, &header, sizeof header);
1755 lto_write_stream (header_stream);
1756 free (header_stream);
1757
1758 /* Put all of the gimple and the string table out the asm file as a
1759 block of text. */
1760 if (section_type == LTO_section_function_body)
1761 lto_write_stream (ob->cfg_stream);
1762 lto_write_stream (ob->main_stream);
1763 lto_write_stream (ob->string_stream);
1764
1765 lto_end_section ();
1766 }
1767
1768
1769 /* Output the base body of struct function FN using output block OB. */
1770
1771 static void
1772 output_struct_function_base (struct output_block *ob, struct function *fn)
1773 {
1774 struct bitpack_d bp;
1775 unsigned i;
1776 tree t;
1777
1778 /* Output the static chain and non-local goto save area. */
1779 stream_write_tree (ob, fn->static_chain_decl, true);
1780 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1781
1782 /* Output all the local variables in the function. */
1783 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1784 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1785 stream_write_tree (ob, t, true);
1786
1787 /* Output current IL state of the function. */
1788 streamer_write_uhwi (ob, fn->curr_properties);
1789
1790 /* Write all the attributes for FN. */
1791 bp = bitpack_create (ob->main_stream);
1792 bp_pack_value (&bp, fn->is_thunk, 1);
1793 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1794 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1795 bp_pack_value (&bp, fn->returns_struct, 1);
1796 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1797 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1798 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1799 bp_pack_value (&bp, fn->after_inlining, 1);
1800 bp_pack_value (&bp, fn->stdarg, 1);
1801 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1802 bp_pack_value (&bp, fn->calls_alloca, 1);
1803 bp_pack_value (&bp, fn->calls_setjmp, 1);
1804 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1805 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1806 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1807 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1808
1809 /* Output the function start and end loci. */
1810 stream_output_location (ob, &bp, fn->function_start_locus);
1811 stream_output_location (ob, &bp, fn->function_end_locus);
1812
1813 streamer_write_bitpack (&bp);
1814 }
1815
1816
1817 /* Output the body of function NODE->DECL. */
1818
1819 static void
1820 output_function (struct cgraph_node *node)
1821 {
1822 tree function;
1823 struct function *fn;
1824 basic_block bb;
1825 struct output_block *ob;
1826
1827 function = node->decl;
1828 fn = DECL_STRUCT_FUNCTION (function);
1829 ob = create_output_block (LTO_section_function_body);
1830
1831 clear_line_info (ob);
1832 ob->cgraph_node = node;
1833
1834 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1835
1836 /* Set current_function_decl and cfun. */
1837 push_cfun (fn);
1838
1839 /* Make string 0 be a NULL string. */
1840 streamer_write_char_stream (ob->string_stream, 0);
1841
1842 streamer_write_record_start (ob, LTO_function);
1843
1844 /* Output decls for parameters and args. */
1845 stream_write_tree (ob, DECL_RESULT (function), true);
1846 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1847
1848 /* Output DECL_INITIAL for the function, which contains the tree of
1849 lexical scopes. */
1850 stream_write_tree (ob, DECL_INITIAL (function), true);
1851
1852 /* We also stream abstract functions where we stream only stuff needed for
1853 debug info. */
1854 if (gimple_has_body_p (function))
1855 {
1856 streamer_write_uhwi (ob, 1);
1857 output_struct_function_base (ob, fn);
1858
1859 /* Output all the SSA names used in the function. */
1860 output_ssa_names (ob, fn);
1861
1862 /* Output any exception handling regions. */
1863 output_eh_regions (ob, fn);
1864
1865
1866 /* We will renumber the statements. The code that does this uses
1867 the same ordering that we use for serializing them so we can use
1868 the same code on the other end and not have to write out the
1869 statement numbers. We do not assign UIDs to PHIs here because
1870 virtual PHIs get re-computed on-the-fly which would make numbers
1871 inconsistent. */
1872 set_gimple_stmt_max_uid (cfun, 0);
1873 FOR_ALL_BB_FN (bb, cfun)
1874 {
1875 gimple_stmt_iterator gsi;
1876 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1877 {
1878 gimple stmt = gsi_stmt (gsi);
1879
1880 /* Virtual PHIs are not going to be streamed. */
1881 if (!virtual_operand_p (gimple_phi_result (stmt)))
1882 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1883 }
1884 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1885 {
1886 gimple stmt = gsi_stmt (gsi);
1887 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1888 }
1889 }
1890 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1891 virtual phis now. */
1892 FOR_ALL_BB_FN (bb, cfun)
1893 {
1894 gimple_stmt_iterator gsi;
1895 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1896 {
1897 gimple stmt = gsi_stmt (gsi);
1898 if (virtual_operand_p (gimple_phi_result (stmt)))
1899 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1900 }
1901 }
1902
1903 /* Output the code for the function. */
1904 FOR_ALL_BB_FN (bb, fn)
1905 output_bb (ob, bb, fn);
1906
1907 /* The terminator for this function. */
1908 streamer_write_record_start (ob, LTO_null);
1909
1910 output_cfg (ob, fn);
1911
1912 pop_cfun ();
1913 }
1914 else
1915 streamer_write_uhwi (ob, 0);
1916
1917 /* Create a section to hold the pickled output of this function. */
1918 produce_asm (ob, function);
1919
1920 destroy_output_block (ob);
1921 }
1922
1923
1924 /* Emit toplevel asms. */
1925
1926 void
1927 lto_output_toplevel_asms (void)
1928 {
1929 struct output_block *ob;
1930 struct asm_node *can;
1931 char *section_name;
1932 struct lto_output_stream *header_stream;
1933 struct lto_asm_header header;
1934
1935 if (! asm_nodes)
1936 return;
1937
1938 ob = create_output_block (LTO_section_asm);
1939
1940 /* Make string 0 be a NULL string. */
1941 streamer_write_char_stream (ob->string_stream, 0);
1942
1943 for (can = asm_nodes; can; can = can->next)
1944 {
1945 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1946 streamer_write_hwi (ob, can->order);
1947 }
1948
1949 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1950
1951 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1952 lto_begin_section (section_name, !flag_wpa);
1953 free (section_name);
1954
1955 /* The entire header stream is computed here. */
1956 memset (&header, 0, sizeof (header));
1957
1958 /* Write the header. */
1959 header.lto_header.major_version = LTO_major_version;
1960 header.lto_header.minor_version = LTO_minor_version;
1961
1962 header.main_size = ob->main_stream->total_size;
1963 header.string_size = ob->string_stream->total_size;
1964
1965 header_stream = XCNEW (struct lto_output_stream);
1966 lto_output_data_stream (header_stream, &header, sizeof (header));
1967 lto_write_stream (header_stream);
1968 free (header_stream);
1969
1970 /* Put all of the gimple and the string table out the asm file as a
1971 block of text. */
1972 lto_write_stream (ob->main_stream);
1973 lto_write_stream (ob->string_stream);
1974
1975 lto_end_section ();
1976
1977 destroy_output_block (ob);
1978 }
1979
1980
1981 /* Copy the function body of NODE without deserializing. */
1982
1983 static void
1984 copy_function (struct cgraph_node *node)
1985 {
1986 tree function = node->decl;
1987 struct lto_file_decl_data *file_data = node->lto_file_data;
1988 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1989 const char *data;
1990 size_t len;
1991 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1992 char *section_name =
1993 lto_get_section_name (LTO_section_function_body, name, NULL);
1994 size_t i, j;
1995 struct lto_in_decl_state *in_state;
1996 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1997
1998 lto_begin_section (section_name, !flag_wpa);
1999 free (section_name);
2000
2001 /* We may have renamed the declaration, e.g., a static function. */
2002 name = lto_get_decl_name_mapping (file_data, name);
2003
2004 data = lto_get_section_data (file_data, LTO_section_function_body,
2005 name, &len);
2006 gcc_assert (data);
2007
2008 /* Do a bit copy of the function body. */
2009 lto_output_data_stream (output_stream, data, len);
2010 lto_write_stream (output_stream);
2011
2012 /* Copy decls. */
2013 in_state =
2014 lto_get_function_in_decl_state (node->lto_file_data, function);
2015 gcc_assert (in_state);
2016
2017 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2018 {
2019 size_t n = in_state->streams[i].size;
2020 tree *trees = in_state->streams[i].trees;
2021 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2022
2023 /* The out state must have the same indices and the in state.
2024 So just copy the vector. All the encoders in the in state
2025 must be empty where we reach here. */
2026 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2027 encoder->trees.reserve_exact (n);
2028 for (j = 0; j < n; j++)
2029 encoder->trees.safe_push (trees[j]);
2030 }
2031
2032 lto_free_section_data (file_data, LTO_section_function_body, name,
2033 data, len);
2034 free (output_stream);
2035 lto_end_section ();
2036 }
2037
2038 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2039
2040 static tree
2041 wrap_refs (tree *tp, int *ws, void *)
2042 {
2043 tree t = *tp;
2044 if (handled_component_p (t)
2045 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2046 {
2047 tree decl = TREE_OPERAND (t, 0);
2048 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2049 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2050 build1 (ADDR_EXPR, ptrtype, decl),
2051 build_int_cst (ptrtype, 0));
2052 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2053 *ws = 0;
2054 }
2055 else if (TREE_CODE (t) == CONSTRUCTOR)
2056 ;
2057 else if (!EXPR_P (t))
2058 *ws = 0;
2059 return NULL_TREE;
2060 }
2061
2062 /* Main entry point from the pass manager. */
2063
2064 void
2065 lto_output (void)
2066 {
2067 struct lto_out_decl_state *decl_state;
2068 #ifdef ENABLE_CHECKING
2069 bitmap output = lto_bitmap_alloc ();
2070 #endif
2071 int i, n_nodes;
2072 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2073
2074 /* Initialize the streamer. */
2075 lto_streamer_init ();
2076
2077 n_nodes = lto_symtab_encoder_size (encoder);
2078 /* Process only the functions with bodies. */
2079 for (i = 0; i < n_nodes; i++)
2080 {
2081 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2082 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2083 {
2084 if (lto_symtab_encoder_encode_body_p (encoder, node)
2085 && !node->alias)
2086 {
2087 #ifdef ENABLE_CHECKING
2088 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2089 bitmap_set_bit (output, DECL_UID (node->decl));
2090 #endif
2091 decl_state = lto_new_out_decl_state ();
2092 lto_push_out_decl_state (decl_state);
2093 if (gimple_has_body_p (node->decl) || !flag_wpa)
2094 output_function (node);
2095 else
2096 copy_function (node);
2097 gcc_assert (lto_get_out_decl_state () == decl_state);
2098 lto_pop_out_decl_state ();
2099 lto_record_function_out_decl_state (node->decl, decl_state);
2100 }
2101 }
2102 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2103 {
2104 /* Wrap symbol references inside the ctor in a type
2105 preserving MEM_REF. */
2106 tree ctor = DECL_INITIAL (node->decl);
2107 if (ctor && !in_lto_p)
2108 walk_tree (&ctor, wrap_refs, NULL, NULL);
2109 }
2110 }
2111
2112 /* Emit the callgraph after emitting function bodies. This needs to
2113 be done now to make sure that all the statements in every function
2114 have been renumbered so that edges can be associated with call
2115 statements using the statement UIDs. */
2116 output_symtab ();
2117
2118 #ifdef ENABLE_CHECKING
2119 lto_bitmap_free (output);
2120 #endif
2121 }
2122
2123 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2124 from it and required for correct representation of its semantics.
2125 Each node in ENCODER must be a global declaration or a type. A node
2126 is written only once, even if it appears multiple times in the
2127 vector. Certain transitively-reachable nodes, such as those
2128 representing expressions, may be duplicated, but such nodes
2129 must not appear in ENCODER itself. */
2130
2131 static void
2132 write_global_stream (struct output_block *ob,
2133 struct lto_tree_ref_encoder *encoder)
2134 {
2135 tree t;
2136 size_t index;
2137 const size_t size = lto_tree_ref_encoder_size (encoder);
2138
2139 for (index = 0; index < size; index++)
2140 {
2141 t = lto_tree_ref_encoder_get_tree (encoder, index);
2142 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2143 stream_write_tree (ob, t, false);
2144 }
2145 }
2146
2147
2148 /* Write a sequence of indices into the globals vector corresponding
2149 to the trees in ENCODER. These are used by the reader to map the
2150 indices used to refer to global entities within function bodies to
2151 their referents. */
2152
2153 static void
2154 write_global_references (struct output_block *ob,
2155 struct lto_output_stream *ref_stream,
2156 struct lto_tree_ref_encoder *encoder)
2157 {
2158 tree t;
2159 uint32_t index;
2160 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2161
2162 /* Write size as 32-bit unsigned. */
2163 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2164
2165 for (index = 0; index < size; index++)
2166 {
2167 uint32_t slot_num;
2168
2169 t = lto_tree_ref_encoder_get_tree (encoder, index);
2170 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2171 gcc_assert (slot_num != (unsigned)-1);
2172 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2173 }
2174 }
2175
2176
2177 /* Write all the streams in an lto_out_decl_state STATE using
2178 output block OB and output stream OUT_STREAM. */
2179
2180 void
2181 lto_output_decl_state_streams (struct output_block *ob,
2182 struct lto_out_decl_state *state)
2183 {
2184 int i;
2185
2186 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2187 write_global_stream (ob, &state->streams[i]);
2188 }
2189
2190
2191 /* Write all the references in an lto_out_decl_state STATE using
2192 output block OB and output stream OUT_STREAM. */
2193
2194 void
2195 lto_output_decl_state_refs (struct output_block *ob,
2196 struct lto_output_stream *out_stream,
2197 struct lto_out_decl_state *state)
2198 {
2199 unsigned i;
2200 uint32_t ref;
2201 tree decl;
2202
2203 /* Write reference to FUNCTION_DECL. If there is not function,
2204 write reference to void_type_node. */
2205 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2206 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2207 gcc_assert (ref != (unsigned)-1);
2208 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2209
2210 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2211 write_global_references (ob, out_stream, &state->streams[i]);
2212 }
2213
2214
2215 /* Return the written size of STATE. */
2216
2217 static size_t
2218 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2219 {
2220 int i;
2221 size_t size;
2222
2223 size = sizeof (int32_t); /* fn_ref. */
2224 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2225 {
2226 size += sizeof (int32_t); /* vector size. */
2227 size += (lto_tree_ref_encoder_size (&state->streams[i])
2228 * sizeof (int32_t));
2229 }
2230 return size;
2231 }
2232
2233
2234 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2235 so far. */
2236
2237 static void
2238 write_symbol (struct streamer_tree_cache_d *cache,
2239 struct lto_output_stream *stream,
2240 tree t, struct pointer_set_t *seen, bool alias)
2241 {
2242 const char *name;
2243 enum gcc_plugin_symbol_kind kind;
2244 enum gcc_plugin_symbol_visibility visibility;
2245 unsigned slot_num;
2246 unsigned HOST_WIDEST_INT size;
2247 const char *comdat;
2248 unsigned char c;
2249
2250 /* None of the following kinds of symbols are needed in the
2251 symbol table. */
2252 if (!TREE_PUBLIC (t)
2253 || is_builtin_fn (t)
2254 || DECL_ABSTRACT (t)
2255 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2256 return;
2257 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2258
2259 gcc_assert (TREE_CODE (t) == VAR_DECL
2260 || TREE_CODE (t) == FUNCTION_DECL);
2261
2262 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2263
2264 /* This behaves like assemble_name_raw in varasm.c, performing the
2265 same name manipulations that ASM_OUTPUT_LABELREF does. */
2266 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2267
2268 if (pointer_set_contains (seen, name))
2269 return;
2270 pointer_set_insert (seen, name);
2271
2272 streamer_tree_cache_lookup (cache, t, &slot_num);
2273 gcc_assert (slot_num != (unsigned)-1);
2274
2275 if (DECL_EXTERNAL (t))
2276 {
2277 if (DECL_WEAK (t))
2278 kind = GCCPK_WEAKUNDEF;
2279 else
2280 kind = GCCPK_UNDEF;
2281 }
2282 else
2283 {
2284 if (DECL_WEAK (t))
2285 kind = GCCPK_WEAKDEF;
2286 else if (DECL_COMMON (t))
2287 kind = GCCPK_COMMON;
2288 else
2289 kind = GCCPK_DEF;
2290
2291 /* When something is defined, it should have node attached. */
2292 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2293 || varpool_get_node (t)->definition);
2294 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2295 || (cgraph_get_node (t)
2296 && cgraph_get_node (t)->definition));
2297 }
2298
2299 /* Imitate what default_elf_asm_output_external do.
2300 When symbol is external, we need to output it with DEFAULT visibility
2301 when compiling with -fvisibility=default, while with HIDDEN visibility
2302 when symbol has attribute (visibility("hidden")) specified.
2303 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2304 right. */
2305
2306 if (DECL_EXTERNAL (t)
2307 && !targetm.binds_local_p (t))
2308 visibility = GCCPV_DEFAULT;
2309 else
2310 switch (DECL_VISIBILITY (t))
2311 {
2312 case VISIBILITY_DEFAULT:
2313 visibility = GCCPV_DEFAULT;
2314 break;
2315 case VISIBILITY_PROTECTED:
2316 visibility = GCCPV_PROTECTED;
2317 break;
2318 case VISIBILITY_HIDDEN:
2319 visibility = GCCPV_HIDDEN;
2320 break;
2321 case VISIBILITY_INTERNAL:
2322 visibility = GCCPV_INTERNAL;
2323 break;
2324 }
2325
2326 if (kind == GCCPK_COMMON
2327 && DECL_SIZE_UNIT (t)
2328 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2329 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2330 else
2331 size = 0;
2332
2333 if (DECL_ONE_ONLY (t))
2334 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2335 else
2336 comdat = "";
2337
2338 lto_output_data_stream (stream, name, strlen (name) + 1);
2339 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2340 c = (unsigned char) kind;
2341 lto_output_data_stream (stream, &c, 1);
2342 c = (unsigned char) visibility;
2343 lto_output_data_stream (stream, &c, 1);
2344 lto_output_data_stream (stream, &size, 8);
2345 lto_output_data_stream (stream, &slot_num, 4);
2346 }
2347
2348 /* Return true if NODE should appear in the plugin symbol table. */
2349
2350 bool
2351 output_symbol_p (symtab_node *node)
2352 {
2353 struct cgraph_node *cnode;
2354 if (!symtab_real_symbol_p (node))
2355 return false;
2356 /* We keep external functions in symtab for sake of inlining
2357 and devirtualization. We do not want to see them in symbol table as
2358 references unless they are really used. */
2359 cnode = dyn_cast <cgraph_node *> (node);
2360 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2361 && cnode->callers)
2362 return true;
2363
2364 /* Ignore all references from external vars initializers - they are not really
2365 part of the compilation unit until they are used by folding. Some symbols,
2366 like references to external construction vtables can not be referred to at all.
2367 We decide this at can_refer_decl_in_current_unit_p. */
2368 if (!node->definition || DECL_EXTERNAL (node->decl))
2369 {
2370 int i;
2371 struct ipa_ref *ref;
2372 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2373 i, ref); i++)
2374 {
2375 if (ref->use == IPA_REF_ALIAS)
2376 continue;
2377 if (is_a <cgraph_node *> (ref->referring))
2378 return true;
2379 if (!DECL_EXTERNAL (ref->referring->decl))
2380 return true;
2381 }
2382 return false;
2383 }
2384 return true;
2385 }
2386
2387
2388 /* Write an IL symbol table to OB.
2389 SET and VSET are cgraph/varpool node sets we are outputting. */
2390
2391 static void
2392 produce_symtab (struct output_block *ob)
2393 {
2394 struct streamer_tree_cache_d *cache = ob->writer_cache;
2395 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2396 struct pointer_set_t *seen;
2397 struct lto_output_stream stream;
2398 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2399 lto_symtab_encoder_iterator lsei;
2400
2401 lto_begin_section (section_name, false);
2402 free (section_name);
2403
2404 seen = pointer_set_create ();
2405 memset (&stream, 0, sizeof (stream));
2406
2407 /* Write the symbol table.
2408 First write everything defined and then all declarations.
2409 This is necessary to handle cases where we have duplicated symbols. */
2410 for (lsei = lsei_start (encoder);
2411 !lsei_end_p (lsei); lsei_next (&lsei))
2412 {
2413 symtab_node *node = lsei_node (lsei);
2414
2415 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2416 continue;
2417 write_symbol (cache, &stream, node->decl, seen, false);
2418 }
2419 for (lsei = lsei_start (encoder);
2420 !lsei_end_p (lsei); lsei_next (&lsei))
2421 {
2422 symtab_node *node = lsei_node (lsei);
2423
2424 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2425 continue;
2426 write_symbol (cache, &stream, node->decl, seen, false);
2427 }
2428
2429 lto_write_stream (&stream);
2430 pointer_set_destroy (seen);
2431
2432 lto_end_section ();
2433 }
2434
2435
2436 /* This pass is run after all of the functions are serialized and all
2437 of the IPA passes have written their serialized forms. This pass
2438 causes the vector of all of the global decls and types used from
2439 this file to be written in to a section that can then be read in to
2440 recover these on other side. */
2441
2442 void
2443 produce_asm_for_decls (void)
2444 {
2445 struct lto_out_decl_state *out_state;
2446 struct lto_out_decl_state *fn_out_state;
2447 struct lto_decl_header header;
2448 char *section_name;
2449 struct output_block *ob;
2450 struct lto_output_stream *header_stream, *decl_state_stream;
2451 unsigned idx, num_fns;
2452 size_t decl_state_size;
2453 int32_t num_decl_states;
2454
2455 ob = create_output_block (LTO_section_decls);
2456 ob->global = true;
2457
2458 memset (&header, 0, sizeof (struct lto_decl_header));
2459
2460 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2461 lto_begin_section (section_name, !flag_wpa);
2462 free (section_name);
2463
2464 /* Make string 0 be a NULL string. */
2465 streamer_write_char_stream (ob->string_stream, 0);
2466
2467 gcc_assert (!alias_pairs);
2468
2469 /* Get rid of the global decl state hash tables to save some memory. */
2470 out_state = lto_get_out_decl_state ();
2471 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2472 if (out_state->streams[i].tree_hash_table)
2473 {
2474 delete out_state->streams[i].tree_hash_table;
2475 out_state->streams[i].tree_hash_table = NULL;
2476 }
2477
2478 /* Write the global symbols. */
2479 lto_output_decl_state_streams (ob, out_state);
2480 num_fns = lto_function_decl_states.length ();
2481 for (idx = 0; idx < num_fns; idx++)
2482 {
2483 fn_out_state =
2484 lto_function_decl_states[idx];
2485 lto_output_decl_state_streams (ob, fn_out_state);
2486 }
2487
2488 header.lto_header.major_version = LTO_major_version;
2489 header.lto_header.minor_version = LTO_minor_version;
2490
2491 /* Currently not used. This field would allow us to preallocate
2492 the globals vector, so that it need not be resized as it is extended. */
2493 header.num_nodes = -1;
2494
2495 /* Compute the total size of all decl out states. */
2496 decl_state_size = sizeof (int32_t);
2497 decl_state_size += lto_out_decl_state_written_size (out_state);
2498 for (idx = 0; idx < num_fns; idx++)
2499 {
2500 fn_out_state =
2501 lto_function_decl_states[idx];
2502 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2503 }
2504 header.decl_state_size = decl_state_size;
2505
2506 header.main_size = ob->main_stream->total_size;
2507 header.string_size = ob->string_stream->total_size;
2508
2509 header_stream = XCNEW (struct lto_output_stream);
2510 lto_output_data_stream (header_stream, &header, sizeof header);
2511 lto_write_stream (header_stream);
2512 free (header_stream);
2513
2514 /* Write the main out-decl state, followed by out-decl states of
2515 functions. */
2516 decl_state_stream = XCNEW (struct lto_output_stream);
2517 num_decl_states = num_fns + 1;
2518 lto_output_data_stream (decl_state_stream, &num_decl_states,
2519 sizeof (num_decl_states));
2520 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2521 for (idx = 0; idx < num_fns; idx++)
2522 {
2523 fn_out_state =
2524 lto_function_decl_states[idx];
2525 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2526 }
2527 lto_write_stream (decl_state_stream);
2528 free (decl_state_stream);
2529
2530 lto_write_stream (ob->main_stream);
2531 lto_write_stream (ob->string_stream);
2532
2533 lto_end_section ();
2534
2535 /* Write the symbol table. It is used by linker to determine dependencies
2536 and thus we can skip it for WPA. */
2537 if (!flag_wpa)
2538 produce_symtab (ob);
2539
2540 /* Write command line opts. */
2541 lto_write_options ();
2542
2543 /* Deallocate memory and clean up. */
2544 for (idx = 0; idx < num_fns; idx++)
2545 {
2546 fn_out_state =
2547 lto_function_decl_states[idx];
2548 lto_delete_out_decl_state (fn_out_state);
2549 }
2550 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2551 lto_function_decl_states.release ();
2552 destroy_output_block (ob);
2553 }