Add an abstract incremental hash data type
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "inchash.h"
48 #include "except.h"
49 #include "lto-symtab.h"
50 #include "lto-streamer.h"
51 #include "data-streamer.h"
52 #include "gimple-streamer.h"
53 #include "tree-streamer.h"
54 #include "streamer-hooks.h"
55 #include "cfgloop.h"
56 #include "builtins.h"
57
58
59 static void lto_write_tree (struct output_block*, tree, bool);
60
61 /* Clear the line info stored in DATA_IN. */
62
63 static void
64 clear_line_info (struct output_block *ob)
65 {
66 ob->current_file = NULL;
67 ob->current_line = 0;
68 ob->current_col = 0;
69 }
70
71
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
74
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
77 {
78 struct output_block *ob = XCNEW (struct output_block);
79
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
85
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
88
89 clear_line_info (ob);
90
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
93
94 return ob;
95 }
96
97
98 /* Destroy the output block OB. */
99
100 void
101 destroy_output_block (struct output_block *ob)
102 {
103 enum lto_section_type section_type = ob->section_type;
104
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
107
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
112
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
115
116 free (ob);
117 }
118
119
120 /* Look up NODE in the type table and write the index for it to OB. */
121
122 static void
123 output_type_ref (struct output_block *ob, tree node)
124 {
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
127 }
128
129
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
134
135 static bool
136 tree_is_indexable (tree t)
137 {
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
143 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
144 else if (TREE_CODE (t) == IMPORTED_DECL)
145 return false;
146 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
147 || TREE_CODE (t) == TYPE_DECL
148 || TREE_CODE (t) == CONST_DECL
149 || TREE_CODE (t) == NAMELIST_DECL)
150 && decl_function_context (t))
151 return false;
152 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
153 return false;
154 /* Variably modified types need to be streamed alongside function
155 bodies because they can refer to local entities. Together with
156 them we have to localize their members as well.
157 ??? In theory that includes non-FIELD_DECLs as well. */
158 else if (TYPE_P (t)
159 && variably_modified_type_p (t, NULL_TREE))
160 return false;
161 else if (TREE_CODE (t) == FIELD_DECL
162 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
163 return false;
164 else
165 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
166 }
167
168
169 /* Output info about new location into bitpack BP.
170 After outputting bitpack, lto_output_location_data has
171 to be done to output actual data. */
172
173 void
174 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
175 location_t loc)
176 {
177 expanded_location xloc;
178
179 loc = LOCATION_LOCUS (loc);
180 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
181 if (loc == UNKNOWN_LOCATION)
182 return;
183
184 xloc = expand_location (loc);
185
186 bp_pack_value (bp, ob->current_file != xloc.file, 1);
187 bp_pack_value (bp, ob->current_line != xloc.line, 1);
188 bp_pack_value (bp, ob->current_col != xloc.column, 1);
189
190 if (ob->current_file != xloc.file)
191 bp_pack_var_len_unsigned (bp,
192 streamer_string_index (ob, xloc.file,
193 strlen (xloc.file) + 1,
194 true));
195 ob->current_file = xloc.file;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 case PARM_DECL:
244 streamer_write_record_start (ob, LTO_global_decl_ref);
245 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case CONST_DECL:
249 streamer_write_record_start (ob, LTO_const_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case IMPORTED_DECL:
254 gcc_assert (decl_function_context (expr) == NULL);
255 streamer_write_record_start (ob, LTO_imported_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case TYPE_DECL:
260 streamer_write_record_start (ob, LTO_type_decl_ref);
261 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case NAMELIST_DECL:
265 streamer_write_record_start (ob, LTO_namelist_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 case NAMESPACE_DECL:
270 streamer_write_record_start (ob, LTO_namespace_decl_ref);
271 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case LABEL_DECL:
275 streamer_write_record_start (ob, LTO_label_decl_ref);
276 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
278
279 case RESULT_DECL:
280 streamer_write_record_start (ob, LTO_result_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
283
284 case TRANSLATION_UNIT_DECL:
285 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
286 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
288
289 default:
290 /* No other node is indexable, so it should have been handled by
291 lto_output_tree. */
292 gcc_unreachable ();
293 }
294 }
295
296
297 /* Return true if EXPR is a tree node that can be written to disk. */
298
299 static inline bool
300 lto_is_streamable (tree expr)
301 {
302 enum tree_code code = TREE_CODE (expr);
303
304 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
305 name version in lto_output_tree_ref (see output_ssa_names). */
306 return !is_lang_specific (expr)
307 && code != SSA_NAME
308 && code != CALL_EXPR
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
319 }
320
321
322 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
323
324 static tree
325 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
326 {
327 gcc_checking_assert (DECL_P (expr)
328 && TREE_CODE (expr) != FUNCTION_DECL
329 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
330
331 /* Handle DECL_INITIAL for symbols. */
332 tree initial = DECL_INITIAL (expr);
333 if (TREE_CODE (expr) == VAR_DECL
334 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
335 && !DECL_IN_CONSTANT_POOL (expr)
336 && initial)
337 {
338 varpool_node *vnode;
339 /* Extra section needs about 30 bytes; do not produce it for simple
340 scalar values. */
341 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
342 || !(vnode = varpool_node::get (expr))
343 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
344 initial = error_mark_node;
345 }
346
347 return initial;
348 }
349
350
351 /* Write a physical representation of tree node EXPR to output block
352 OB. If REF_P is true, the leaves of EXPR are emitted as references
353 via lto_output_tree_ref. IX is the index into the streamer cache
354 where EXPR is stored. */
355
356 static void
357 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
358 {
359 /* Pack all the non-pointer fields in EXPR into a bitpack and write
360 the resulting bitpack. */
361 bitpack_d bp = bitpack_create (ob->main_stream);
362 streamer_pack_tree_bitfields (ob, &bp, expr);
363 streamer_write_bitpack (&bp);
364
365 /* Write all the pointer fields in EXPR. */
366 streamer_write_tree_body (ob, expr, ref_p);
367
368 /* Write any LTO-specific data to OB. */
369 if (DECL_P (expr)
370 && TREE_CODE (expr) != FUNCTION_DECL
371 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
372 {
373 /* Handle DECL_INITIAL for symbols. */
374 tree initial = get_symbol_initial_value
375 (ob->decl_state->symtab_node_encoder, expr);
376 stream_write_tree (ob, initial, ref_p);
377 }
378 }
379
380 /* Write a physical representation of tree node EXPR to output block
381 OB. If REF_P is true, the leaves of EXPR are emitted as references
382 via lto_output_tree_ref. IX is the index into the streamer cache
383 where EXPR is stored. */
384
385 static void
386 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
387 {
388 if (!lto_is_streamable (expr))
389 internal_error ("tree code %qs is not supported in LTO streams",
390 get_tree_code_name (TREE_CODE (expr)));
391
392 /* Write the header, containing everything needed to materialize
393 EXPR on the reading side. */
394 streamer_write_tree_header (ob, expr);
395
396 lto_write_tree_1 (ob, expr, ref_p);
397
398 /* Mark the end of EXPR. */
399 streamer_write_zero (ob);
400 }
401
402 /* Emit the physical representation of tree node EXPR to output block
403 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
404 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
405
406 static void
407 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
408 bool ref_p, bool this_ref_p)
409 {
410 unsigned ix;
411
412 gcc_checking_assert (expr != NULL_TREE
413 && !(this_ref_p && tree_is_indexable (expr)));
414
415 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
416 expr, hash, &ix);
417 gcc_assert (!exists_p);
418 if (streamer_handle_as_builtin_p (expr))
419 {
420 /* MD and NORMAL builtins do not need to be written out
421 completely as they are always instantiated by the
422 compiler on startup. The only builtins that need to
423 be written out are BUILT_IN_FRONTEND. For all other
424 builtins, we simply write the class and code. */
425 streamer_write_builtin (ob, expr);
426 }
427 else if (TREE_CODE (expr) == INTEGER_CST
428 && !TREE_OVERFLOW (expr))
429 {
430 /* Shared INTEGER_CST nodes are special because they need their
431 original type to be materialized by the reader (to implement
432 TYPE_CACHED_VALUES). */
433 streamer_write_integer_cst (ob, expr, ref_p);
434 }
435 else
436 {
437 /* This is the first time we see EXPR, write its fields
438 to OB. */
439 lto_write_tree (ob, expr, ref_p);
440 }
441 }
442
443 struct sccs
444 {
445 unsigned int dfsnum;
446 unsigned int low;
447 };
448
449 struct scc_entry
450 {
451 tree t;
452 hashval_t hash;
453 };
454
455 static unsigned int next_dfs_num;
456 static vec<scc_entry> sccstack;
457 static struct pointer_map_t *sccstate;
458 static struct obstack sccstate_obstack;
459
460 static void
461 DFS_write_tree (struct output_block *ob, sccs *from_state,
462 tree expr, bool ref_p, bool this_ref_p);
463
464 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
465 DFS recurse for all tree edges originating from it. */
466
467 static void
468 DFS_write_tree_body (struct output_block *ob,
469 tree expr, sccs *expr_state, bool ref_p)
470 {
471 #define DFS_follow_tree_edge(DEST) \
472 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
473
474 enum tree_code code;
475
476 code = TREE_CODE (expr);
477
478 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
479 {
480 if (TREE_CODE (expr) != IDENTIFIER_NODE)
481 DFS_follow_tree_edge (TREE_TYPE (expr));
482 }
483
484 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
485 {
486 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
487 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
488 }
489
490 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
491 {
492 DFS_follow_tree_edge (TREE_REALPART (expr));
493 DFS_follow_tree_edge (TREE_IMAGPART (expr));
494 }
495
496 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
497 {
498 /* Drop names that were created for anonymous entities. */
499 if (DECL_NAME (expr)
500 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
501 && ANON_AGGRNAME_P (DECL_NAME (expr)))
502 ;
503 else
504 DFS_follow_tree_edge (DECL_NAME (expr));
505 DFS_follow_tree_edge (DECL_CONTEXT (expr));
506 }
507
508 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
509 {
510 DFS_follow_tree_edge (DECL_SIZE (expr));
511 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
512
513 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
514 special handling in LTO, it must be handled by streamer hooks. */
515
516 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
517
518 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
519 for early inlining so drop it on the floor instead of ICEing in
520 dwarf2out.c. */
521
522 if ((TREE_CODE (expr) == VAR_DECL
523 || TREE_CODE (expr) == PARM_DECL)
524 && DECL_HAS_VALUE_EXPR_P (expr))
525 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
526 if (TREE_CODE (expr) == VAR_DECL)
527 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
528 }
529
530 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
531 {
532 if (TREE_CODE (expr) == TYPE_DECL)
533 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
534 }
535
536 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
537 {
538 /* Make sure we don't inadvertently set the assembler name. */
539 if (DECL_ASSEMBLER_NAME_SET_P (expr))
540 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
541 }
542
543 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
544 {
545 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
546 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
547 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
548 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
549 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
550 }
551
552 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
553 {
554 DFS_follow_tree_edge (DECL_VINDEX (expr));
555 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
556 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
557 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
558 }
559
560 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
561 {
562 DFS_follow_tree_edge (TYPE_SIZE (expr));
563 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
564 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
565 DFS_follow_tree_edge (TYPE_NAME (expr));
566 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
567 reconstructed during fixup. */
568 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
569 during fixup. */
570 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
571 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
572 /* TYPE_CANONICAL is re-computed during type merging, so no need
573 to follow it here. */
574 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
575 }
576
577 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
578 {
579 if (TREE_CODE (expr) == ENUMERAL_TYPE)
580 DFS_follow_tree_edge (TYPE_VALUES (expr));
581 else if (TREE_CODE (expr) == ARRAY_TYPE)
582 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
583 else if (RECORD_OR_UNION_TYPE_P (expr))
584 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
585 DFS_follow_tree_edge (t);
586 else if (TREE_CODE (expr) == FUNCTION_TYPE
587 || TREE_CODE (expr) == METHOD_TYPE)
588 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
589
590 if (!POINTER_TYPE_P (expr))
591 DFS_follow_tree_edge (TYPE_MINVAL (expr));
592 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
593 if (RECORD_OR_UNION_TYPE_P (expr))
594 DFS_follow_tree_edge (TYPE_BINFO (expr));
595 }
596
597 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
598 {
599 DFS_follow_tree_edge (TREE_PURPOSE (expr));
600 DFS_follow_tree_edge (TREE_VALUE (expr));
601 DFS_follow_tree_edge (TREE_CHAIN (expr));
602 }
603
604 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
605 {
606 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
607 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
608 }
609
610 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
611 {
612 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
613 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
614 DFS_follow_tree_edge (TREE_BLOCK (expr));
615 }
616
617 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
618 {
619 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
620 /* ??? FIXME. See also streamer_write_chain. */
621 if (!(VAR_OR_FUNCTION_DECL_P (t)
622 && DECL_EXTERNAL (t)))
623 DFS_follow_tree_edge (t);
624
625 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
626
627 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
628 handle - those that represent inlined function scopes.
629 For the drop rest them on the floor instead of ICEing
630 in dwarf2out.c. */
631 if (inlined_function_outer_scope_p (expr))
632 {
633 tree ultimate_origin = block_ultimate_origin (expr);
634 DFS_follow_tree_edge (ultimate_origin);
635 }
636 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
637 information for early inlined BLOCKs so drop it on the floor instead
638 of ICEing in dwarf2out.c. */
639
640 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
641 streaming time. */
642
643 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
644 list is re-constructed from BLOCK_SUPERCONTEXT. */
645 }
646
647 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
648 {
649 unsigned i;
650 tree t;
651
652 /* Note that the number of BINFO slots has already been emitted in
653 EXPR's header (see streamer_write_tree_header) because this length
654 is needed to build the empty BINFO node on the reader side. */
655 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
656 DFS_follow_tree_edge (t);
657 DFS_follow_tree_edge (BINFO_OFFSET (expr));
658 DFS_follow_tree_edge (BINFO_VTABLE (expr));
659 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
660
661 /* The number of BINFO_BASE_ACCESSES has already been emitted in
662 EXPR's bitfield section. */
663 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
664 DFS_follow_tree_edge (t);
665
666 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
667 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
668 }
669
670 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
671 {
672 unsigned i;
673 tree index, value;
674
675 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
676 {
677 DFS_follow_tree_edge (index);
678 DFS_follow_tree_edge (value);
679 }
680 }
681
682 if (code == OMP_CLAUSE)
683 {
684 int i;
685 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
686 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
687 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
688 }
689
690 #undef DFS_follow_tree_edge
691 }
692
693 /* Return a hash value for the tree T. */
694
695 static hashval_t
696 hash_tree (struct streamer_tree_cache_d *cache, tree t)
697 {
698 #define visit(SIBLING) \
699 do { \
700 unsigned ix; \
701 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
702 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
703 } while (0)
704
705 /* Hash TS_BASE. */
706 enum tree_code code = TREE_CODE (t);
707 hashval_t v = iterative_hash_host_wide_int (code, 0);
708 if (!TYPE_P (t))
709 {
710 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
711 | (TREE_CONSTANT (t) << 1)
712 | (TREE_READONLY (t) << 2)
713 | (TREE_PUBLIC (t) << 3), v);
714 }
715 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
716 | (TREE_THIS_VOLATILE (t) << 1), v);
717 if (DECL_P (t))
718 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
719 else if (TYPE_P (t))
720 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
721 if (TYPE_P (t))
722 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
723 else
724 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
725 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
726 | (TREE_STATIC (t) << 1)
727 | (TREE_PROTECTED (t) << 2)
728 | (TREE_DEPRECATED (t) << 3), v);
729 if (code != TREE_BINFO)
730 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
731 if (TYPE_P (t))
732 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
733 | (TYPE_ADDR_SPACE (t) << 1), v);
734 else if (code == SSA_NAME)
735 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
736
737 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
738 {
739 int i;
740 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
741 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
742 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
743 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
744 }
745
746 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
747 {
748 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
749 v = iterative_hash_host_wide_int (r.cl, v);
750 v = iterative_hash_host_wide_int (r.decimal
751 | (r.sign << 1)
752 | (r.signalling << 2)
753 | (r.canonical << 3), v);
754 v = iterative_hash_host_wide_int (r.uexp, v);
755 for (unsigned i = 0; i < SIGSZ; ++i)
756 v = iterative_hash_host_wide_int (r.sig[i], v);
757 }
758
759 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
760 {
761 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
762 v = iterative_hash_host_wide_int (f.mode, v);
763 v = iterative_hash_host_wide_int (f.data.low, v);
764 v = iterative_hash_host_wide_int (f.data.high, v);
765 }
766
767 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
768 {
769 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
770 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
771 | (DECL_VIRTUAL_P (t) << 1)
772 | (DECL_IGNORED_P (t) << 2)
773 | (DECL_ABSTRACT (t) << 3)
774 | (DECL_ARTIFICIAL (t) << 4)
775 | (DECL_USER_ALIGN (t) << 5)
776 | (DECL_PRESERVE_P (t) << 6)
777 | (DECL_EXTERNAL (t) << 7)
778 | (DECL_GIMPLE_REG_P (t) << 8), v);
779 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
780 if (code == LABEL_DECL)
781 {
782 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
783 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
784 }
785 else if (code == FIELD_DECL)
786 {
787 v = iterative_hash_host_wide_int (DECL_PACKED (t)
788 | (DECL_NONADDRESSABLE_P (t) << 1),
789 v);
790 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
791 }
792 else if (code == VAR_DECL)
793 {
794 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
795 | (DECL_NONLOCAL_FRAME (t) << 1),
796 v);
797 }
798 if (code == RESULT_DECL
799 || code == PARM_DECL
800 || code == VAR_DECL)
801 {
802 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
803 if (code == VAR_DECL
804 || code == PARM_DECL)
805 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
806 }
807 }
808
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
810 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
811
812 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
813 {
814 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
815 | (DECL_DLLIMPORT_P (t) << 1)
816 | (DECL_WEAK (t) << 2)
817 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
818 | (DECL_COMDAT (t) << 4)
819 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
820 v);
821 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
822 if (code == VAR_DECL)
823 {
824 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
825 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
826 | (DECL_IN_CONSTANT_POOL (t) << 1),
827 v);
828 }
829 if (TREE_CODE (t) == FUNCTION_DECL)
830 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
831 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
832 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
833 v);
834 }
835
836 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
837 {
838 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
839 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
840 | (DECL_STATIC_DESTRUCTOR (t) << 1)
841 | (DECL_UNINLINABLE (t) << 2)
842 | (DECL_POSSIBLY_INLINED (t) << 3)
843 | (DECL_IS_NOVOPS (t) << 4)
844 | (DECL_IS_RETURNS_TWICE (t) << 5)
845 | (DECL_IS_MALLOC (t) << 6)
846 | (DECL_IS_OPERATOR_NEW (t) << 7)
847 | (DECL_DECLARED_INLINE_P (t) << 8)
848 | (DECL_STATIC_CHAIN (t) << 9)
849 | (DECL_NO_INLINE_WARNING_P (t) << 10)
850 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
851 | (DECL_NO_LIMIT_STACK (t) << 12)
852 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
853 | (DECL_PURE_P (t) << 14)
854 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
855 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
856 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
857 }
858
859 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
860 {
861 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
862 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
863 | (TYPE_NO_FORCE_BLK (t) << 1)
864 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
865 | (TYPE_PACKED (t) << 3)
866 | (TYPE_RESTRICT (t) << 4)
867 | (TYPE_USER_ALIGN (t) << 5)
868 | (TYPE_READONLY (t) << 6), v);
869 if (RECORD_OR_UNION_TYPE_P (t))
870 {
871 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
872 | (TYPE_FINAL_P (t) << 1), v);
873 }
874 else if (code == ARRAY_TYPE)
875 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
876 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
877 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
878 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
879 || (!in_lto_p
880 && get_alias_set (t) == 0))
881 ? 0 : -1, v);
882 }
883
884 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
885 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
886 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
887
888 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
889 gcc_unreachable ();
890
891 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
892 v = iterative_hash (t, sizeof (struct cl_optimization), v);
893
894 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
895 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
896
897 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
898 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
899
900 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
901 {
902 if (POINTER_TYPE_P (t))
903 {
904 /* For pointers factor in the pointed-to type recursively as
905 we cannot recurse through only pointers.
906 ??? We can generalize this by keeping track of the
907 in-SCC edges for each tree (or arbitrarily the first
908 such edge) and hashing that in in a second stage
909 (instead of the quadratic mixing of the SCC we do now). */
910 hashval_t x;
911 unsigned ix;
912 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
913 x = streamer_tree_cache_get_hash (cache, ix);
914 else
915 x = hash_tree (cache, TREE_TYPE (t));
916 v = iterative_hash_hashval_t (x, v);
917 }
918 else if (code != IDENTIFIER_NODE)
919 visit (TREE_TYPE (t));
920 }
921
922 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
923 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
924 visit (VECTOR_CST_ELT (t, i));
925
926 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
927 {
928 visit (TREE_REALPART (t));
929 visit (TREE_IMAGPART (t));
930 }
931
932 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
933 {
934 /* Drop names that were created for anonymous entities. */
935 if (DECL_NAME (t)
936 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
937 && ANON_AGGRNAME_P (DECL_NAME (t)))
938 ;
939 else
940 visit (DECL_NAME (t));
941 if (DECL_FILE_SCOPE_P (t))
942 ;
943 else
944 visit (DECL_CONTEXT (t));
945 }
946
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
948 {
949 visit (DECL_SIZE (t));
950 visit (DECL_SIZE_UNIT (t));
951 visit (DECL_ATTRIBUTES (t));
952 if ((code == VAR_DECL
953 || code == PARM_DECL)
954 && DECL_HAS_VALUE_EXPR_P (t))
955 visit (DECL_VALUE_EXPR (t));
956 if (code == VAR_DECL
957 && DECL_HAS_DEBUG_EXPR_P (t))
958 visit (DECL_DEBUG_EXPR (t));
959 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
960 be able to call get_symbol_initial_value. */
961 }
962
963 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
964 {
965 if (code == TYPE_DECL)
966 visit (DECL_ORIGINAL_TYPE (t));
967 }
968
969 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
970 {
971 if (DECL_ASSEMBLER_NAME_SET_P (t))
972 visit (DECL_ASSEMBLER_NAME (t));
973 }
974
975 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
976 {
977 visit (DECL_FIELD_OFFSET (t));
978 visit (DECL_BIT_FIELD_TYPE (t));
979 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
980 visit (DECL_FIELD_BIT_OFFSET (t));
981 visit (DECL_FCONTEXT (t));
982 }
983
984 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
985 {
986 visit (DECL_VINDEX (t));
987 visit (DECL_FUNCTION_PERSONALITY (t));
988 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
989 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
990 }
991
992 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
993 {
994 visit (TYPE_SIZE (t));
995 visit (TYPE_SIZE_UNIT (t));
996 visit (TYPE_ATTRIBUTES (t));
997 visit (TYPE_NAME (t));
998 visit (TYPE_MAIN_VARIANT (t));
999 if (TYPE_FILE_SCOPE_P (t))
1000 ;
1001 else
1002 visit (TYPE_CONTEXT (t));
1003 visit (TYPE_STUB_DECL (t));
1004 }
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1007 {
1008 if (code == ENUMERAL_TYPE)
1009 visit (TYPE_VALUES (t));
1010 else if (code == ARRAY_TYPE)
1011 visit (TYPE_DOMAIN (t));
1012 else if (RECORD_OR_UNION_TYPE_P (t))
1013 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1014 visit (f);
1015 else if (code == FUNCTION_TYPE
1016 || code == METHOD_TYPE)
1017 visit (TYPE_ARG_TYPES (t));
1018 if (!POINTER_TYPE_P (t))
1019 visit (TYPE_MINVAL (t));
1020 visit (TYPE_MAXVAL (t));
1021 if (RECORD_OR_UNION_TYPE_P (t))
1022 visit (TYPE_BINFO (t));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1026 {
1027 visit (TREE_PURPOSE (t));
1028 visit (TREE_VALUE (t));
1029 visit (TREE_CHAIN (t));
1030 }
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1033 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1034 visit (TREE_VEC_ELT (t, i));
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1037 {
1038 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1039 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1040 visit (TREE_OPERAND (t, i));
1041 }
1042
1043 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1044 {
1045 unsigned i;
1046 tree b;
1047 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1048 visit (b);
1049 visit (BINFO_OFFSET (t));
1050 visit (BINFO_VTABLE (t));
1051 visit (BINFO_VPTR_FIELD (t));
1052 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1053 visit (b);
1054 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1055 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1056 }
1057
1058 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1059 {
1060 unsigned i;
1061 tree index, value;
1062 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1063 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1064 {
1065 visit (index);
1066 visit (value);
1067 }
1068 }
1069
1070 if (code == OMP_CLAUSE)
1071 {
1072 int i;
1073
1074 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1075 switch (OMP_CLAUSE_CODE (t))
1076 {
1077 case OMP_CLAUSE_DEFAULT:
1078 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1079 break;
1080 case OMP_CLAUSE_SCHEDULE:
1081 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1082 break;
1083 case OMP_CLAUSE_DEPEND:
1084 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1085 break;
1086 case OMP_CLAUSE_MAP:
1087 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1088 break;
1089 case OMP_CLAUSE_PROC_BIND:
1090 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1091 break;
1092 case OMP_CLAUSE_REDUCTION:
1093 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1094 break;
1095 default:
1096 break;
1097 }
1098 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1099 visit (OMP_CLAUSE_OPERAND (t, i));
1100 visit (OMP_CLAUSE_CHAIN (t));
1101 }
1102
1103 return v;
1104
1105 #undef visit
1106 }
1107
1108 /* Compare two SCC entries by their hash value for qsorting them. */
1109
1110 static int
1111 scc_entry_compare (const void *p1_, const void *p2_)
1112 {
1113 const scc_entry *p1 = (const scc_entry *) p1_;
1114 const scc_entry *p2 = (const scc_entry *) p2_;
1115 if (p1->hash < p2->hash)
1116 return -1;
1117 else if (p1->hash > p2->hash)
1118 return 1;
1119 return 0;
1120 }
1121
1122 /* Return a hash value for the SCC on the SCC stack from FIRST with
1123 size SIZE. */
1124
1125 static hashval_t
1126 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1127 {
1128 /* Compute hash values for the SCC members. */
1129 for (unsigned i = 0; i < size; ++i)
1130 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1131
1132 if (size == 1)
1133 return sccstack[first].hash;
1134
1135 /* Sort the SCC of type, hash pairs so that when we mix in
1136 all members of the SCC the hash value becomes independent on
1137 the order we visited the SCC. Produce hash of the whole SCC as
1138 combination of hashes of individual elements. Then combine that hash into
1139 hash of each element, so othewise identically looking elements from two
1140 different SCCs are distinguished. */
1141 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1142
1143 hashval_t scc_hash = sccstack[first].hash;
1144 for (unsigned i = 1; i < size; ++i)
1145 scc_hash = iterative_hash_hashval_t (scc_hash,
1146 sccstack[first+i].hash);
1147 for (unsigned i = 0; i < size; ++i)
1148 sccstack[first+i].hash = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1149 return scc_hash;
1150 }
1151
1152 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1153 already in the streamer cache. Main routine called for
1154 each visit of EXPR. */
1155
1156 static void
1157 DFS_write_tree (struct output_block *ob, sccs *from_state,
1158 tree expr, bool ref_p, bool this_ref_p)
1159 {
1160 unsigned ix;
1161 sccs **slot;
1162
1163 /* Handle special cases. */
1164 if (expr == NULL_TREE)
1165 return;
1166
1167 /* Do not DFS walk into indexable trees. */
1168 if (this_ref_p && tree_is_indexable (expr))
1169 return;
1170
1171 /* Check if we already streamed EXPR. */
1172 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1173 return;
1174
1175 slot = (sccs **)pointer_map_insert (sccstate, expr);
1176 sccs *cstate = *slot;
1177 if (!cstate)
1178 {
1179 scc_entry e = { expr, 0 };
1180 /* Not yet visited. DFS recurse and push it onto the stack. */
1181 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1182 sccstack.safe_push (e);
1183 cstate->dfsnum = next_dfs_num++;
1184 cstate->low = cstate->dfsnum;
1185
1186 if (streamer_handle_as_builtin_p (expr))
1187 ;
1188 else if (TREE_CODE (expr) == INTEGER_CST
1189 && !TREE_OVERFLOW (expr))
1190 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1191 else
1192 {
1193 DFS_write_tree_body (ob, expr, cstate, ref_p);
1194
1195 /* Walk any LTO-specific edges. */
1196 if (DECL_P (expr)
1197 && TREE_CODE (expr) != FUNCTION_DECL
1198 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1199 {
1200 /* Handle DECL_INITIAL for symbols. */
1201 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1202 expr);
1203 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1204 }
1205 }
1206
1207 /* See if we found an SCC. */
1208 if (cstate->low == cstate->dfsnum)
1209 {
1210 unsigned first, size;
1211 tree x;
1212
1213 /* Pop the SCC and compute its size. */
1214 first = sccstack.length ();
1215 do
1216 {
1217 x = sccstack[--first].t;
1218 }
1219 while (x != expr);
1220 size = sccstack.length () - first;
1221
1222 /* No need to compute hashes for LTRANS units, we don't perform
1223 any merging there. */
1224 hashval_t scc_hash = 0;
1225 unsigned scc_entry_len = 0;
1226 if (!flag_wpa)
1227 {
1228 scc_hash = hash_scc (ob->writer_cache, first, size);
1229
1230 /* Put the entries with the least number of collisions first. */
1231 unsigned entry_start = 0;
1232 scc_entry_len = size + 1;
1233 for (unsigned i = 0; i < size;)
1234 {
1235 unsigned from = i;
1236 for (i = i + 1; i < size
1237 && (sccstack[first + i].hash
1238 == sccstack[first + from].hash); ++i)
1239 ;
1240 if (i - from < scc_entry_len)
1241 {
1242 scc_entry_len = i - from;
1243 entry_start = from;
1244 }
1245 }
1246 for (unsigned i = 0; i < scc_entry_len; ++i)
1247 {
1248 scc_entry tem = sccstack[first + i];
1249 sccstack[first + i] = sccstack[first + entry_start + i];
1250 sccstack[first + entry_start + i] = tem;
1251 }
1252 }
1253
1254 /* Write LTO_tree_scc. */
1255 streamer_write_record_start (ob, LTO_tree_scc);
1256 streamer_write_uhwi (ob, size);
1257 streamer_write_uhwi (ob, scc_hash);
1258
1259 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1260 All INTEGER_CSTs need to be handled this way as we need
1261 their type to materialize them. Also builtins are handled
1262 this way.
1263 ??? We still wrap these in LTO_tree_scc so at the
1264 input side we can properly identify the tree we want
1265 to ultimatively return. */
1266 if (size == 1)
1267 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1268 else
1269 {
1270 /* Write the size of the SCC entry candidates. */
1271 streamer_write_uhwi (ob, scc_entry_len);
1272
1273 /* Write all headers and populate the streamer cache. */
1274 for (unsigned i = 0; i < size; ++i)
1275 {
1276 hashval_t hash = sccstack[first+i].hash;
1277 tree t = sccstack[first+i].t;
1278 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1279 t, hash, &ix);
1280 gcc_assert (!exists_p);
1281
1282 if (!lto_is_streamable (t))
1283 internal_error ("tree code %qs is not supported "
1284 "in LTO streams",
1285 get_tree_code_name (TREE_CODE (t)));
1286
1287 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1288
1289 /* Write the header, containing everything needed to
1290 materialize EXPR on the reading side. */
1291 streamer_write_tree_header (ob, t);
1292 }
1293
1294 /* Write the bitpacks and tree references. */
1295 for (unsigned i = 0; i < size; ++i)
1296 {
1297 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1298
1299 /* Mark the end of the tree. */
1300 streamer_write_zero (ob);
1301 }
1302 }
1303
1304 /* Finally truncate the vector. */
1305 sccstack.truncate (first);
1306
1307 if (from_state)
1308 from_state->low = MIN (from_state->low, cstate->low);
1309 return;
1310 }
1311
1312 if (from_state)
1313 from_state->low = MIN (from_state->low, cstate->low);
1314 }
1315 gcc_checking_assert (from_state);
1316 if (cstate->dfsnum < from_state->dfsnum)
1317 from_state->low = MIN (cstate->dfsnum, from_state->low);
1318 }
1319
1320
1321 /* Emit the physical representation of tree node EXPR to output block
1322 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1323 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1324
1325 void
1326 lto_output_tree (struct output_block *ob, tree expr,
1327 bool ref_p, bool this_ref_p)
1328 {
1329 unsigned ix;
1330 bool existed_p;
1331
1332 if (expr == NULL_TREE)
1333 {
1334 streamer_write_record_start (ob, LTO_null);
1335 return;
1336 }
1337
1338 if (this_ref_p && tree_is_indexable (expr))
1339 {
1340 lto_output_tree_ref (ob, expr);
1341 return;
1342 }
1343
1344 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1345 if (existed_p)
1346 {
1347 /* If a node has already been streamed out, make sure that
1348 we don't write it more than once. Otherwise, the reader
1349 will instantiate two different nodes for the same object. */
1350 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1351 streamer_write_uhwi (ob, ix);
1352 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1353 lto_tree_code_to_tag (TREE_CODE (expr)));
1354 lto_stats.num_pickle_refs_output++;
1355 }
1356 else
1357 {
1358 /* This is the first time we see EXPR, write all reachable
1359 trees to OB. */
1360 static bool in_dfs_walk;
1361
1362 /* Protect against recursion which means disconnect between
1363 what tree edges we walk in the DFS walk and what edges
1364 we stream out. */
1365 gcc_assert (!in_dfs_walk);
1366
1367 /* Start the DFS walk. */
1368 /* Save ob state ... */
1369 /* let's see ... */
1370 in_dfs_walk = true;
1371 sccstate = pointer_map_create ();
1372 gcc_obstack_init (&sccstate_obstack);
1373 next_dfs_num = 1;
1374 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1375 sccstack.release ();
1376 pointer_map_destroy (sccstate);
1377 obstack_free (&sccstate_obstack, NULL);
1378 in_dfs_walk = false;
1379
1380 /* Finally append a reference to the tree we were writing.
1381 ??? If expr ended up as a singleton we could have
1382 inlined it here and avoid outputting a reference. */
1383 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1384 gcc_assert (existed_p);
1385 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1386 streamer_write_uhwi (ob, ix);
1387 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1388 lto_tree_code_to_tag (TREE_CODE (expr)));
1389 lto_stats.num_pickle_refs_output++;
1390 }
1391 }
1392
1393
1394 /* Output to OB a list of try/catch handlers starting with FIRST. */
1395
1396 static void
1397 output_eh_try_list (struct output_block *ob, eh_catch first)
1398 {
1399 eh_catch n;
1400
1401 for (n = first; n; n = n->next_catch)
1402 {
1403 streamer_write_record_start (ob, LTO_eh_catch);
1404 stream_write_tree (ob, n->type_list, true);
1405 stream_write_tree (ob, n->filter_list, true);
1406 stream_write_tree (ob, n->label, true);
1407 }
1408
1409 streamer_write_record_start (ob, LTO_null);
1410 }
1411
1412
1413 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1414 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1415 detect EH region sharing. */
1416
1417 static void
1418 output_eh_region (struct output_block *ob, eh_region r)
1419 {
1420 enum LTO_tags tag;
1421
1422 if (r == NULL)
1423 {
1424 streamer_write_record_start (ob, LTO_null);
1425 return;
1426 }
1427
1428 if (r->type == ERT_CLEANUP)
1429 tag = LTO_ert_cleanup;
1430 else if (r->type == ERT_TRY)
1431 tag = LTO_ert_try;
1432 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1433 tag = LTO_ert_allowed_exceptions;
1434 else if (r->type == ERT_MUST_NOT_THROW)
1435 tag = LTO_ert_must_not_throw;
1436 else
1437 gcc_unreachable ();
1438
1439 streamer_write_record_start (ob, tag);
1440 streamer_write_hwi (ob, r->index);
1441
1442 if (r->outer)
1443 streamer_write_hwi (ob, r->outer->index);
1444 else
1445 streamer_write_zero (ob);
1446
1447 if (r->inner)
1448 streamer_write_hwi (ob, r->inner->index);
1449 else
1450 streamer_write_zero (ob);
1451
1452 if (r->next_peer)
1453 streamer_write_hwi (ob, r->next_peer->index);
1454 else
1455 streamer_write_zero (ob);
1456
1457 if (r->type == ERT_TRY)
1458 {
1459 output_eh_try_list (ob, r->u.eh_try.first_catch);
1460 }
1461 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1462 {
1463 stream_write_tree (ob, r->u.allowed.type_list, true);
1464 stream_write_tree (ob, r->u.allowed.label, true);
1465 streamer_write_uhwi (ob, r->u.allowed.filter);
1466 }
1467 else if (r->type == ERT_MUST_NOT_THROW)
1468 {
1469 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1470 bitpack_d bp = bitpack_create (ob->main_stream);
1471 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1472 streamer_write_bitpack (&bp);
1473 }
1474
1475 if (r->landing_pads)
1476 streamer_write_hwi (ob, r->landing_pads->index);
1477 else
1478 streamer_write_zero (ob);
1479 }
1480
1481
1482 /* Output landing pad LP to OB. */
1483
1484 static void
1485 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1486 {
1487 if (lp == NULL)
1488 {
1489 streamer_write_record_start (ob, LTO_null);
1490 return;
1491 }
1492
1493 streamer_write_record_start (ob, LTO_eh_landing_pad);
1494 streamer_write_hwi (ob, lp->index);
1495 if (lp->next_lp)
1496 streamer_write_hwi (ob, lp->next_lp->index);
1497 else
1498 streamer_write_zero (ob);
1499
1500 if (lp->region)
1501 streamer_write_hwi (ob, lp->region->index);
1502 else
1503 streamer_write_zero (ob);
1504
1505 stream_write_tree (ob, lp->post_landing_pad, true);
1506 }
1507
1508
1509 /* Output the existing eh_table to OB. */
1510
1511 static void
1512 output_eh_regions (struct output_block *ob, struct function *fn)
1513 {
1514 if (fn->eh && fn->eh->region_tree)
1515 {
1516 unsigned i;
1517 eh_region eh;
1518 eh_landing_pad lp;
1519 tree ttype;
1520
1521 streamer_write_record_start (ob, LTO_eh_table);
1522
1523 /* Emit the index of the root of the EH region tree. */
1524 streamer_write_hwi (ob, fn->eh->region_tree->index);
1525
1526 /* Emit all the EH regions in the region array. */
1527 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1528 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1529 output_eh_region (ob, eh);
1530
1531 /* Emit all landing pads. */
1532 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1533 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1534 output_eh_lp (ob, lp);
1535
1536 /* Emit all the runtime type data. */
1537 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1538 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1539 stream_write_tree (ob, ttype, true);
1540
1541 /* Emit the table of action chains. */
1542 if (targetm.arm_eabi_unwinder)
1543 {
1544 tree t;
1545 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1546 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1547 stream_write_tree (ob, t, true);
1548 }
1549 else
1550 {
1551 uchar c;
1552 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1553 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1554 streamer_write_char_stream (ob->main_stream, c);
1555 }
1556 }
1557
1558 /* The LTO_null either terminates the record or indicates that there
1559 are no eh_records at all. */
1560 streamer_write_record_start (ob, LTO_null);
1561 }
1562
1563
1564 /* Output all of the active ssa names to the ssa_names stream. */
1565
1566 static void
1567 output_ssa_names (struct output_block *ob, struct function *fn)
1568 {
1569 unsigned int i, len;
1570
1571 len = vec_safe_length (SSANAMES (fn));
1572 streamer_write_uhwi (ob, len);
1573
1574 for (i = 1; i < len; i++)
1575 {
1576 tree ptr = (*SSANAMES (fn))[i];
1577
1578 if (ptr == NULL_TREE
1579 || SSA_NAME_IN_FREE_LIST (ptr)
1580 || virtual_operand_p (ptr))
1581 continue;
1582
1583 streamer_write_uhwi (ob, i);
1584 streamer_write_char_stream (ob->main_stream,
1585 SSA_NAME_IS_DEFAULT_DEF (ptr));
1586 if (SSA_NAME_VAR (ptr))
1587 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1588 else
1589 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1590 stream_write_tree (ob, TREE_TYPE (ptr), true);
1591 }
1592
1593 streamer_write_zero (ob);
1594 }
1595
1596
1597 /* Output a wide-int. */
1598
1599 static void
1600 streamer_write_wi (struct output_block *ob,
1601 const widest_int &w)
1602 {
1603 int len = w.get_len ();
1604
1605 streamer_write_uhwi (ob, w.get_precision ());
1606 streamer_write_uhwi (ob, len);
1607 for (int i = 0; i < len; i++)
1608 streamer_write_hwi (ob, w.elt (i));
1609 }
1610
1611
1612 /* Output the cfg. */
1613
1614 static void
1615 output_cfg (struct output_block *ob, struct function *fn)
1616 {
1617 struct lto_output_stream *tmp_stream = ob->main_stream;
1618 basic_block bb;
1619
1620 ob->main_stream = ob->cfg_stream;
1621
1622 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1623 profile_status_for_fn (fn));
1624
1625 /* Output the number of the highest basic block. */
1626 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1627
1628 FOR_ALL_BB_FN (bb, fn)
1629 {
1630 edge_iterator ei;
1631 edge e;
1632
1633 streamer_write_hwi (ob, bb->index);
1634
1635 /* Output the successors and the edge flags. */
1636 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1637 FOR_EACH_EDGE (e, ei, bb->succs)
1638 {
1639 streamer_write_uhwi (ob, e->dest->index);
1640 streamer_write_hwi (ob, e->probability);
1641 streamer_write_gcov_count (ob, e->count);
1642 streamer_write_uhwi (ob, e->flags);
1643 }
1644 }
1645
1646 streamer_write_hwi (ob, -1);
1647
1648 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1649 while (bb->next_bb)
1650 {
1651 streamer_write_hwi (ob, bb->next_bb->index);
1652 bb = bb->next_bb;
1653 }
1654
1655 streamer_write_hwi (ob, -1);
1656
1657 /* ??? The cfgloop interface is tied to cfun. */
1658 gcc_assert (cfun == fn);
1659
1660 /* Output the number of loops. */
1661 streamer_write_uhwi (ob, number_of_loops (fn));
1662
1663 /* Output each loop, skipping the tree root which has number zero. */
1664 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1665 {
1666 struct loop *loop = get_loop (fn, i);
1667
1668 /* Write the index of the loop header. That's enough to rebuild
1669 the loop tree on the reader side. Stream -1 for an unused
1670 loop entry. */
1671 if (!loop)
1672 {
1673 streamer_write_hwi (ob, -1);
1674 continue;
1675 }
1676 else
1677 streamer_write_hwi (ob, loop->header->index);
1678
1679 /* Write everything copy_loop_info copies. */
1680 streamer_write_enum (ob->main_stream,
1681 loop_estimation, EST_LAST, loop->estimate_state);
1682 streamer_write_hwi (ob, loop->any_upper_bound);
1683 if (loop->any_upper_bound)
1684 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1685 streamer_write_hwi (ob, loop->any_estimate);
1686 if (loop->any_estimate)
1687 streamer_write_wi (ob, loop->nb_iterations_estimate);
1688
1689 /* Write OMP SIMD related info. */
1690 streamer_write_hwi (ob, loop->safelen);
1691 streamer_write_hwi (ob, loop->dont_vectorize);
1692 streamer_write_hwi (ob, loop->force_vectorize);
1693 stream_write_tree (ob, loop->simduid, true);
1694 }
1695
1696 ob->main_stream = tmp_stream;
1697 }
1698
1699
1700 /* Create the header in the file using OB. If the section type is for
1701 a function, set FN to the decl for that function. */
1702
1703 void
1704 produce_asm (struct output_block *ob, tree fn)
1705 {
1706 enum lto_section_type section_type = ob->section_type;
1707 struct lto_function_header header;
1708 char *section_name;
1709 struct lto_output_stream *header_stream;
1710
1711 if (section_type == LTO_section_function_body)
1712 {
1713 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1714 section_name = lto_get_section_name (section_type, name, NULL);
1715 }
1716 else
1717 section_name = lto_get_section_name (section_type, NULL, NULL);
1718
1719 lto_begin_section (section_name, !flag_wpa);
1720 free (section_name);
1721
1722 /* The entire header is stream computed here. */
1723 memset (&header, 0, sizeof (struct lto_function_header));
1724
1725 /* Write the header. */
1726 header.lto_header.major_version = LTO_major_version;
1727 header.lto_header.minor_version = LTO_minor_version;
1728
1729 header.compressed_size = 0;
1730
1731 if (section_type == LTO_section_function_body)
1732 header.cfg_size = ob->cfg_stream->total_size;
1733 header.main_size = ob->main_stream->total_size;
1734 header.string_size = ob->string_stream->total_size;
1735
1736 header_stream = XCNEW (struct lto_output_stream);
1737 lto_output_data_stream (header_stream, &header, sizeof header);
1738 lto_write_stream (header_stream);
1739 free (header_stream);
1740
1741 /* Put all of the gimple and the string table out the asm file as a
1742 block of text. */
1743 if (section_type == LTO_section_function_body)
1744 lto_write_stream (ob->cfg_stream);
1745 lto_write_stream (ob->main_stream);
1746 lto_write_stream (ob->string_stream);
1747
1748 lto_end_section ();
1749 }
1750
1751
1752 /* Output the base body of struct function FN using output block OB. */
1753
1754 static void
1755 output_struct_function_base (struct output_block *ob, struct function *fn)
1756 {
1757 struct bitpack_d bp;
1758 unsigned i;
1759 tree t;
1760
1761 /* Output the static chain and non-local goto save area. */
1762 stream_write_tree (ob, fn->static_chain_decl, true);
1763 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1764
1765 /* Output all the local variables in the function. */
1766 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1767 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1768 stream_write_tree (ob, t, true);
1769
1770 /* Output current IL state of the function. */
1771 streamer_write_uhwi (ob, fn->curr_properties);
1772
1773 /* Write all the attributes for FN. */
1774 bp = bitpack_create (ob->main_stream);
1775 bp_pack_value (&bp, fn->is_thunk, 1);
1776 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1777 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1778 bp_pack_value (&bp, fn->returns_struct, 1);
1779 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1780 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1781 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1782 bp_pack_value (&bp, fn->after_inlining, 1);
1783 bp_pack_value (&bp, fn->stdarg, 1);
1784 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1785 bp_pack_value (&bp, fn->calls_alloca, 1);
1786 bp_pack_value (&bp, fn->calls_setjmp, 1);
1787 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1788 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1789 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1790 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1791
1792 /* Output the function start and end loci. */
1793 stream_output_location (ob, &bp, fn->function_start_locus);
1794 stream_output_location (ob, &bp, fn->function_end_locus);
1795
1796 streamer_write_bitpack (&bp);
1797 }
1798
1799
1800 /* Output the body of function NODE->DECL. */
1801
1802 static void
1803 output_function (struct cgraph_node *node)
1804 {
1805 tree function;
1806 struct function *fn;
1807 basic_block bb;
1808 struct output_block *ob;
1809
1810 function = node->decl;
1811 fn = DECL_STRUCT_FUNCTION (function);
1812 ob = create_output_block (LTO_section_function_body);
1813
1814 clear_line_info (ob);
1815 ob->symbol = node;
1816
1817 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1818
1819 /* Set current_function_decl and cfun. */
1820 push_cfun (fn);
1821
1822 /* Make string 0 be a NULL string. */
1823 streamer_write_char_stream (ob->string_stream, 0);
1824
1825 streamer_write_record_start (ob, LTO_function);
1826
1827 /* Output decls for parameters and args. */
1828 stream_write_tree (ob, DECL_RESULT (function), true);
1829 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1830
1831 /* Output DECL_INITIAL for the function, which contains the tree of
1832 lexical scopes. */
1833 stream_write_tree (ob, DECL_INITIAL (function), true);
1834
1835 /* We also stream abstract functions where we stream only stuff needed for
1836 debug info. */
1837 if (gimple_has_body_p (function))
1838 {
1839 streamer_write_uhwi (ob, 1);
1840 output_struct_function_base (ob, fn);
1841
1842 /* Output all the SSA names used in the function. */
1843 output_ssa_names (ob, fn);
1844
1845 /* Output any exception handling regions. */
1846 output_eh_regions (ob, fn);
1847
1848
1849 /* We will renumber the statements. The code that does this uses
1850 the same ordering that we use for serializing them so we can use
1851 the same code on the other end and not have to write out the
1852 statement numbers. We do not assign UIDs to PHIs here because
1853 virtual PHIs get re-computed on-the-fly which would make numbers
1854 inconsistent. */
1855 set_gimple_stmt_max_uid (cfun, 0);
1856 FOR_ALL_BB_FN (bb, cfun)
1857 {
1858 gimple_stmt_iterator gsi;
1859 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1860 {
1861 gimple stmt = gsi_stmt (gsi);
1862
1863 /* Virtual PHIs are not going to be streamed. */
1864 if (!virtual_operand_p (gimple_phi_result (stmt)))
1865 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1866 }
1867 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1868 {
1869 gimple stmt = gsi_stmt (gsi);
1870 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1871 }
1872 }
1873 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1874 virtual phis now. */
1875 FOR_ALL_BB_FN (bb, cfun)
1876 {
1877 gimple_stmt_iterator gsi;
1878 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1879 {
1880 gimple stmt = gsi_stmt (gsi);
1881 if (virtual_operand_p (gimple_phi_result (stmt)))
1882 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1883 }
1884 }
1885
1886 /* Output the code for the function. */
1887 FOR_ALL_BB_FN (bb, fn)
1888 output_bb (ob, bb, fn);
1889
1890 /* The terminator for this function. */
1891 streamer_write_record_start (ob, LTO_null);
1892
1893 output_cfg (ob, fn);
1894
1895 pop_cfun ();
1896 }
1897 else
1898 streamer_write_uhwi (ob, 0);
1899
1900 /* Create a section to hold the pickled output of this function. */
1901 produce_asm (ob, function);
1902
1903 destroy_output_block (ob);
1904 }
1905
1906 /* Output the body of function NODE->DECL. */
1907
1908 static void
1909 output_constructor (struct varpool_node *node)
1910 {
1911 tree var = node->decl;
1912 struct output_block *ob;
1913
1914 ob = create_output_block (LTO_section_function_body);
1915
1916 clear_line_info (ob);
1917 ob->symbol = node;
1918
1919 /* Make string 0 be a NULL string. */
1920 streamer_write_char_stream (ob->string_stream, 0);
1921
1922 /* Output DECL_INITIAL for the function, which contains the tree of
1923 lexical scopes. */
1924 stream_write_tree (ob, DECL_INITIAL (var), true);
1925
1926 /* Create a section to hold the pickled output of this function. */
1927 produce_asm (ob, var);
1928
1929 destroy_output_block (ob);
1930 }
1931
1932
1933 /* Emit toplevel asms. */
1934
1935 void
1936 lto_output_toplevel_asms (void)
1937 {
1938 struct output_block *ob;
1939 struct asm_node *can;
1940 char *section_name;
1941 struct lto_output_stream *header_stream;
1942 struct lto_asm_header header;
1943
1944 if (! asm_nodes)
1945 return;
1946
1947 ob = create_output_block (LTO_section_asm);
1948
1949 /* Make string 0 be a NULL string. */
1950 streamer_write_char_stream (ob->string_stream, 0);
1951
1952 for (can = asm_nodes; can; can = can->next)
1953 {
1954 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1955 streamer_write_hwi (ob, can->order);
1956 }
1957
1958 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1959
1960 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1961 lto_begin_section (section_name, !flag_wpa);
1962 free (section_name);
1963
1964 /* The entire header stream is computed here. */
1965 memset (&header, 0, sizeof (header));
1966
1967 /* Write the header. */
1968 header.lto_header.major_version = LTO_major_version;
1969 header.lto_header.minor_version = LTO_minor_version;
1970
1971 header.main_size = ob->main_stream->total_size;
1972 header.string_size = ob->string_stream->total_size;
1973
1974 header_stream = XCNEW (struct lto_output_stream);
1975 lto_output_data_stream (header_stream, &header, sizeof (header));
1976 lto_write_stream (header_stream);
1977 free (header_stream);
1978
1979 /* Put all of the gimple and the string table out the asm file as a
1980 block of text. */
1981 lto_write_stream (ob->main_stream);
1982 lto_write_stream (ob->string_stream);
1983
1984 lto_end_section ();
1985
1986 destroy_output_block (ob);
1987 }
1988
1989
1990 /* Copy the function body or variable constructor of NODE without deserializing. */
1991
1992 static void
1993 copy_function_or_variable (struct symtab_node *node)
1994 {
1995 tree function = node->decl;
1996 struct lto_file_decl_data *file_data = node->lto_file_data;
1997 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1998 const char *data;
1999 size_t len;
2000 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2001 char *section_name =
2002 lto_get_section_name (LTO_section_function_body, name, NULL);
2003 size_t i, j;
2004 struct lto_in_decl_state *in_state;
2005 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2006
2007 lto_begin_section (section_name, !flag_wpa);
2008 free (section_name);
2009
2010 /* We may have renamed the declaration, e.g., a static function. */
2011 name = lto_get_decl_name_mapping (file_data, name);
2012
2013 data = lto_get_section_data (file_data, LTO_section_function_body,
2014 name, &len);
2015 gcc_assert (data);
2016
2017 /* Do a bit copy of the function body. */
2018 lto_output_data_stream (output_stream, data, len);
2019 lto_write_stream (output_stream);
2020
2021 /* Copy decls. */
2022 in_state =
2023 lto_get_function_in_decl_state (node->lto_file_data, function);
2024 gcc_assert (in_state);
2025
2026 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2027 {
2028 size_t n = in_state->streams[i].size;
2029 tree *trees = in_state->streams[i].trees;
2030 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2031
2032 /* The out state must have the same indices and the in state.
2033 So just copy the vector. All the encoders in the in state
2034 must be empty where we reach here. */
2035 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2036 encoder->trees.reserve_exact (n);
2037 for (j = 0; j < n; j++)
2038 encoder->trees.safe_push (trees[j]);
2039 }
2040
2041 lto_free_section_data (file_data, LTO_section_function_body, name,
2042 data, len);
2043 free (output_stream);
2044 lto_end_section ();
2045 }
2046
2047 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2048
2049 static tree
2050 wrap_refs (tree *tp, int *ws, void *)
2051 {
2052 tree t = *tp;
2053 if (handled_component_p (t)
2054 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2055 {
2056 tree decl = TREE_OPERAND (t, 0);
2057 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2058 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2059 build1 (ADDR_EXPR, ptrtype, decl),
2060 build_int_cst (ptrtype, 0));
2061 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2062 *ws = 0;
2063 }
2064 else if (TREE_CODE (t) == CONSTRUCTOR)
2065 ;
2066 else if (!EXPR_P (t))
2067 *ws = 0;
2068 return NULL_TREE;
2069 }
2070
2071 /* Main entry point from the pass manager. */
2072
2073 void
2074 lto_output (void)
2075 {
2076 struct lto_out_decl_state *decl_state;
2077 #ifdef ENABLE_CHECKING
2078 bitmap output = lto_bitmap_alloc ();
2079 #endif
2080 int i, n_nodes;
2081 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2082
2083 /* Initialize the streamer. */
2084 lto_streamer_init ();
2085
2086 n_nodes = lto_symtab_encoder_size (encoder);
2087 /* Process only the functions with bodies. */
2088 for (i = 0; i < n_nodes; i++)
2089 {
2090 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2091 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2092 {
2093 if (lto_symtab_encoder_encode_body_p (encoder, node)
2094 && !node->alias)
2095 {
2096 #ifdef ENABLE_CHECKING
2097 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2098 bitmap_set_bit (output, DECL_UID (node->decl));
2099 #endif
2100 decl_state = lto_new_out_decl_state ();
2101 lto_push_out_decl_state (decl_state);
2102 if (gimple_has_body_p (node->decl) || !flag_wpa)
2103 output_function (node);
2104 else
2105 copy_function_or_variable (node);
2106 gcc_assert (lto_get_out_decl_state () == decl_state);
2107 lto_pop_out_decl_state ();
2108 lto_record_function_out_decl_state (node->decl, decl_state);
2109 }
2110 }
2111 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2112 {
2113 /* Wrap symbol references inside the ctor in a type
2114 preserving MEM_REF. */
2115 tree ctor = DECL_INITIAL (node->decl);
2116 if (ctor && !in_lto_p)
2117 walk_tree (&ctor, wrap_refs, NULL, NULL);
2118 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2119 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2120 && !node->alias)
2121 {
2122 timevar_push (TV_IPA_LTO_CTORS_OUT);
2123 #ifdef ENABLE_CHECKING
2124 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2125 bitmap_set_bit (output, DECL_UID (node->decl));
2126 #endif
2127 decl_state = lto_new_out_decl_state ();
2128 lto_push_out_decl_state (decl_state);
2129 if (DECL_INITIAL (node->decl) != error_mark_node
2130 || !flag_wpa)
2131 output_constructor (node);
2132 else
2133 copy_function_or_variable (node);
2134 gcc_assert (lto_get_out_decl_state () == decl_state);
2135 lto_pop_out_decl_state ();
2136 lto_record_function_out_decl_state (node->decl, decl_state);
2137 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2138 }
2139 }
2140 }
2141
2142 /* Emit the callgraph after emitting function bodies. This needs to
2143 be done now to make sure that all the statements in every function
2144 have been renumbered so that edges can be associated with call
2145 statements using the statement UIDs. */
2146 output_symtab ();
2147
2148 #ifdef ENABLE_CHECKING
2149 lto_bitmap_free (output);
2150 #endif
2151 }
2152
2153 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2154 from it and required for correct representation of its semantics.
2155 Each node in ENCODER must be a global declaration or a type. A node
2156 is written only once, even if it appears multiple times in the
2157 vector. Certain transitively-reachable nodes, such as those
2158 representing expressions, may be duplicated, but such nodes
2159 must not appear in ENCODER itself. */
2160
2161 static void
2162 write_global_stream (struct output_block *ob,
2163 struct lto_tree_ref_encoder *encoder)
2164 {
2165 tree t;
2166 size_t index;
2167 const size_t size = lto_tree_ref_encoder_size (encoder);
2168
2169 for (index = 0; index < size; index++)
2170 {
2171 t = lto_tree_ref_encoder_get_tree (encoder, index);
2172 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2173 stream_write_tree (ob, t, false);
2174 }
2175 }
2176
2177
2178 /* Write a sequence of indices into the globals vector corresponding
2179 to the trees in ENCODER. These are used by the reader to map the
2180 indices used to refer to global entities within function bodies to
2181 their referents. */
2182
2183 static void
2184 write_global_references (struct output_block *ob,
2185 struct lto_output_stream *ref_stream,
2186 struct lto_tree_ref_encoder *encoder)
2187 {
2188 tree t;
2189 uint32_t index;
2190 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2191
2192 /* Write size as 32-bit unsigned. */
2193 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2194
2195 for (index = 0; index < size; index++)
2196 {
2197 uint32_t slot_num;
2198
2199 t = lto_tree_ref_encoder_get_tree (encoder, index);
2200 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2201 gcc_assert (slot_num != (unsigned)-1);
2202 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2203 }
2204 }
2205
2206
2207 /* Write all the streams in an lto_out_decl_state STATE using
2208 output block OB and output stream OUT_STREAM. */
2209
2210 void
2211 lto_output_decl_state_streams (struct output_block *ob,
2212 struct lto_out_decl_state *state)
2213 {
2214 int i;
2215
2216 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2217 write_global_stream (ob, &state->streams[i]);
2218 }
2219
2220
2221 /* Write all the references in an lto_out_decl_state STATE using
2222 output block OB and output stream OUT_STREAM. */
2223
2224 void
2225 lto_output_decl_state_refs (struct output_block *ob,
2226 struct lto_output_stream *out_stream,
2227 struct lto_out_decl_state *state)
2228 {
2229 unsigned i;
2230 uint32_t ref;
2231 tree decl;
2232
2233 /* Write reference to FUNCTION_DECL. If there is not function,
2234 write reference to void_type_node. */
2235 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2236 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2237 gcc_assert (ref != (unsigned)-1);
2238 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2239
2240 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2241 write_global_references (ob, out_stream, &state->streams[i]);
2242 }
2243
2244
2245 /* Return the written size of STATE. */
2246
2247 static size_t
2248 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2249 {
2250 int i;
2251 size_t size;
2252
2253 size = sizeof (int32_t); /* fn_ref. */
2254 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2255 {
2256 size += sizeof (int32_t); /* vector size. */
2257 size += (lto_tree_ref_encoder_size (&state->streams[i])
2258 * sizeof (int32_t));
2259 }
2260 return size;
2261 }
2262
2263
2264 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2265 so far. */
2266
2267 static void
2268 write_symbol (struct streamer_tree_cache_d *cache,
2269 struct lto_output_stream *stream,
2270 tree t, struct pointer_set_t *seen, bool alias)
2271 {
2272 const char *name;
2273 enum gcc_plugin_symbol_kind kind;
2274 enum gcc_plugin_symbol_visibility visibility;
2275 unsigned slot_num;
2276 uint64_t size;
2277 const char *comdat;
2278 unsigned char c;
2279
2280 /* None of the following kinds of symbols are needed in the
2281 symbol table. */
2282 if (!TREE_PUBLIC (t)
2283 || is_builtin_fn (t)
2284 || DECL_ABSTRACT (t)
2285 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2286 return;
2287 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2288
2289 gcc_assert (TREE_CODE (t) == VAR_DECL
2290 || TREE_CODE (t) == FUNCTION_DECL);
2291
2292 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2293
2294 /* This behaves like assemble_name_raw in varasm.c, performing the
2295 same name manipulations that ASM_OUTPUT_LABELREF does. */
2296 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2297
2298 if (pointer_set_contains (seen, name))
2299 return;
2300 pointer_set_insert (seen, name);
2301
2302 streamer_tree_cache_lookup (cache, t, &slot_num);
2303 gcc_assert (slot_num != (unsigned)-1);
2304
2305 if (DECL_EXTERNAL (t))
2306 {
2307 if (DECL_WEAK (t))
2308 kind = GCCPK_WEAKUNDEF;
2309 else
2310 kind = GCCPK_UNDEF;
2311 }
2312 else
2313 {
2314 if (DECL_WEAK (t))
2315 kind = GCCPK_WEAKDEF;
2316 else if (DECL_COMMON (t))
2317 kind = GCCPK_COMMON;
2318 else
2319 kind = GCCPK_DEF;
2320
2321 /* When something is defined, it should have node attached. */
2322 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2323 || varpool_node::get (t)->definition);
2324 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2325 || (cgraph_node::get (t)
2326 && cgraph_node::get (t)->definition));
2327 }
2328
2329 /* Imitate what default_elf_asm_output_external do.
2330 When symbol is external, we need to output it with DEFAULT visibility
2331 when compiling with -fvisibility=default, while with HIDDEN visibility
2332 when symbol has attribute (visibility("hidden")) specified.
2333 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2334 right. */
2335
2336 if (DECL_EXTERNAL (t)
2337 && !targetm.binds_local_p (t))
2338 visibility = GCCPV_DEFAULT;
2339 else
2340 switch (DECL_VISIBILITY (t))
2341 {
2342 case VISIBILITY_DEFAULT:
2343 visibility = GCCPV_DEFAULT;
2344 break;
2345 case VISIBILITY_PROTECTED:
2346 visibility = GCCPV_PROTECTED;
2347 break;
2348 case VISIBILITY_HIDDEN:
2349 visibility = GCCPV_HIDDEN;
2350 break;
2351 case VISIBILITY_INTERNAL:
2352 visibility = GCCPV_INTERNAL;
2353 break;
2354 }
2355
2356 if (kind == GCCPK_COMMON
2357 && DECL_SIZE_UNIT (t)
2358 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2359 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2360 else
2361 size = 0;
2362
2363 if (DECL_ONE_ONLY (t))
2364 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2365 else
2366 comdat = "";
2367
2368 lto_output_data_stream (stream, name, strlen (name) + 1);
2369 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2370 c = (unsigned char) kind;
2371 lto_output_data_stream (stream, &c, 1);
2372 c = (unsigned char) visibility;
2373 lto_output_data_stream (stream, &c, 1);
2374 lto_output_data_stream (stream, &size, 8);
2375 lto_output_data_stream (stream, &slot_num, 4);
2376 }
2377
2378 /* Return true if NODE should appear in the plugin symbol table. */
2379
2380 bool
2381 output_symbol_p (symtab_node *node)
2382 {
2383 struct cgraph_node *cnode;
2384 if (!node->real_symbol_p ())
2385 return false;
2386 /* We keep external functions in symtab for sake of inlining
2387 and devirtualization. We do not want to see them in symbol table as
2388 references unless they are really used. */
2389 cnode = dyn_cast <cgraph_node *> (node);
2390 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2391 && cnode->callers)
2392 return true;
2393
2394 /* Ignore all references from external vars initializers - they are not really
2395 part of the compilation unit until they are used by folding. Some symbols,
2396 like references to external construction vtables can not be referred to at all.
2397 We decide this at can_refer_decl_in_current_unit_p. */
2398 if (!node->definition || DECL_EXTERNAL (node->decl))
2399 {
2400 int i;
2401 struct ipa_ref *ref;
2402 for (i = 0; node->iterate_referring (i, ref); i++)
2403 {
2404 if (ref->use == IPA_REF_ALIAS)
2405 continue;
2406 if (is_a <cgraph_node *> (ref->referring))
2407 return true;
2408 if (!DECL_EXTERNAL (ref->referring->decl))
2409 return true;
2410 }
2411 return false;
2412 }
2413 return true;
2414 }
2415
2416
2417 /* Write an IL symbol table to OB.
2418 SET and VSET are cgraph/varpool node sets we are outputting. */
2419
2420 static void
2421 produce_symtab (struct output_block *ob)
2422 {
2423 struct streamer_tree_cache_d *cache = ob->writer_cache;
2424 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2425 struct pointer_set_t *seen;
2426 struct lto_output_stream stream;
2427 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2428 lto_symtab_encoder_iterator lsei;
2429
2430 lto_begin_section (section_name, false);
2431 free (section_name);
2432
2433 seen = pointer_set_create ();
2434 memset (&stream, 0, sizeof (stream));
2435
2436 /* Write the symbol table.
2437 First write everything defined and then all declarations.
2438 This is necessary to handle cases where we have duplicated symbols. */
2439 for (lsei = lsei_start (encoder);
2440 !lsei_end_p (lsei); lsei_next (&lsei))
2441 {
2442 symtab_node *node = lsei_node (lsei);
2443
2444 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2445 continue;
2446 write_symbol (cache, &stream, node->decl, seen, false);
2447 }
2448 for (lsei = lsei_start (encoder);
2449 !lsei_end_p (lsei); lsei_next (&lsei))
2450 {
2451 symtab_node *node = lsei_node (lsei);
2452
2453 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2454 continue;
2455 write_symbol (cache, &stream, node->decl, seen, false);
2456 }
2457
2458 lto_write_stream (&stream);
2459 pointer_set_destroy (seen);
2460
2461 lto_end_section ();
2462 }
2463
2464
2465 /* This pass is run after all of the functions are serialized and all
2466 of the IPA passes have written their serialized forms. This pass
2467 causes the vector of all of the global decls and types used from
2468 this file to be written in to a section that can then be read in to
2469 recover these on other side. */
2470
2471 void
2472 produce_asm_for_decls (void)
2473 {
2474 struct lto_out_decl_state *out_state;
2475 struct lto_out_decl_state *fn_out_state;
2476 struct lto_decl_header header;
2477 char *section_name;
2478 struct output_block *ob;
2479 struct lto_output_stream *header_stream, *decl_state_stream;
2480 unsigned idx, num_fns;
2481 size_t decl_state_size;
2482 int32_t num_decl_states;
2483
2484 ob = create_output_block (LTO_section_decls);
2485 ob->global = true;
2486
2487 memset (&header, 0, sizeof (struct lto_decl_header));
2488
2489 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2490 lto_begin_section (section_name, !flag_wpa);
2491 free (section_name);
2492
2493 /* Make string 0 be a NULL string. */
2494 streamer_write_char_stream (ob->string_stream, 0);
2495
2496 gcc_assert (!alias_pairs);
2497
2498 /* Get rid of the global decl state hash tables to save some memory. */
2499 out_state = lto_get_out_decl_state ();
2500 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2501 if (out_state->streams[i].tree_hash_table)
2502 {
2503 delete out_state->streams[i].tree_hash_table;
2504 out_state->streams[i].tree_hash_table = NULL;
2505 }
2506
2507 /* Write the global symbols. */
2508 lto_output_decl_state_streams (ob, out_state);
2509 num_fns = lto_function_decl_states.length ();
2510 for (idx = 0; idx < num_fns; idx++)
2511 {
2512 fn_out_state =
2513 lto_function_decl_states[idx];
2514 lto_output_decl_state_streams (ob, fn_out_state);
2515 }
2516
2517 header.lto_header.major_version = LTO_major_version;
2518 header.lto_header.minor_version = LTO_minor_version;
2519
2520 /* Currently not used. This field would allow us to preallocate
2521 the globals vector, so that it need not be resized as it is extended. */
2522 header.num_nodes = -1;
2523
2524 /* Compute the total size of all decl out states. */
2525 decl_state_size = sizeof (int32_t);
2526 decl_state_size += lto_out_decl_state_written_size (out_state);
2527 for (idx = 0; idx < num_fns; idx++)
2528 {
2529 fn_out_state =
2530 lto_function_decl_states[idx];
2531 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2532 }
2533 header.decl_state_size = decl_state_size;
2534
2535 header.main_size = ob->main_stream->total_size;
2536 header.string_size = ob->string_stream->total_size;
2537
2538 header_stream = XCNEW (struct lto_output_stream);
2539 lto_output_data_stream (header_stream, &header, sizeof header);
2540 lto_write_stream (header_stream);
2541 free (header_stream);
2542
2543 /* Write the main out-decl state, followed by out-decl states of
2544 functions. */
2545 decl_state_stream = XCNEW (struct lto_output_stream);
2546 num_decl_states = num_fns + 1;
2547 lto_output_data_stream (decl_state_stream, &num_decl_states,
2548 sizeof (num_decl_states));
2549 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2550 for (idx = 0; idx < num_fns; idx++)
2551 {
2552 fn_out_state =
2553 lto_function_decl_states[idx];
2554 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2555 }
2556 lto_write_stream (decl_state_stream);
2557 free (decl_state_stream);
2558
2559 lto_write_stream (ob->main_stream);
2560 lto_write_stream (ob->string_stream);
2561
2562 lto_end_section ();
2563
2564 /* Write the symbol table. It is used by linker to determine dependencies
2565 and thus we can skip it for WPA. */
2566 if (!flag_wpa)
2567 produce_symtab (ob);
2568
2569 /* Write command line opts. */
2570 lto_write_options ();
2571
2572 /* Deallocate memory and clean up. */
2573 for (idx = 0; idx < num_fns; idx++)
2574 {
2575 fn_out_state =
2576 lto_function_decl_states[idx];
2577 lto_delete_out_decl_state (fn_out_state);
2578 }
2579 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2580 lto_function_decl_states.release ();
2581 destroy_output_block (ob);
2582 }