tree-core.h (tree_code_name): Remove.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "tree-ssa.h"
35 #include "tree-pass.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "lto-symtab.h"
43 #include "lto-streamer.h"
44 #include "data-streamer.h"
45 #include "gimple-streamer.h"
46 #include "tree-streamer.h"
47 #include "streamer-hooks.h"
48 #include "cfgloop.h"
49
50
51 /* Clear the line info stored in DATA_IN. */
52
53 static void
54 clear_line_info (struct output_block *ob)
55 {
56 ob->current_file = NULL;
57 ob->current_line = 0;
58 ob->current_col = 0;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69
70 ob->section_type = section_type;
71 ob->decl_state = lto_get_out_decl_state ();
72 ob->main_stream = XCNEW (struct lto_output_stream);
73 ob->string_stream = XCNEW (struct lto_output_stream);
74 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
75
76 if (section_type == LTO_section_function_body)
77 ob->cfg_stream = XCNEW (struct lto_output_stream);
78
79 clear_line_info (ob);
80
81 ob->string_hash_table.create (37);
82 gcc_obstack_init (&ob->obstack);
83
84 return ob;
85 }
86
87
88 /* Destroy the output block OB. */
89
90 void
91 destroy_output_block (struct output_block *ob)
92 {
93 enum lto_section_type section_type = ob->section_type;
94
95 ob->string_hash_table.dispose ();
96
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
101
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
104
105 free (ob);
106 }
107
108
109 /* Look up NODE in the type table and write the index for it to OB. */
110
111 static void
112 output_type_ref (struct output_block *ob, tree node)
113 {
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
116 }
117
118
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
123
124 static bool
125 tree_is_indexable (tree t)
126 {
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
132 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
133 && !TREE_STATIC (t))
134 return false;
135 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
136 return false;
137 /* Variably modified types need to be streamed alongside function
138 bodies because they can refer to local entities. Together with
139 them we have to localize their members as well.
140 ??? In theory that includes non-FIELD_DECLs as well. */
141 else if (TYPE_P (t)
142 && variably_modified_type_p (t, NULL_TREE))
143 return false;
144 else if (TREE_CODE (t) == FIELD_DECL
145 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
146 return false;
147 else
148 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
149 }
150
151
152 /* Output info about new location into bitpack BP.
153 After outputting bitpack, lto_output_location_data has
154 to be done to output actual data. */
155
156 void
157 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
158 location_t loc)
159 {
160 expanded_location xloc;
161
162 loc = LOCATION_LOCUS (loc);
163 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
164 if (loc == UNKNOWN_LOCATION)
165 return;
166
167 xloc = expand_location (loc);
168
169 bp_pack_value (bp, ob->current_file != xloc.file, 1);
170 bp_pack_value (bp, ob->current_line != xloc.line, 1);
171 bp_pack_value (bp, ob->current_col != xloc.column, 1);
172
173 if (ob->current_file != xloc.file)
174 bp_pack_var_len_unsigned (bp,
175 streamer_string_index (ob, xloc.file,
176 strlen (xloc.file) + 1,
177 true));
178 ob->current_file = xloc.file;
179
180 if (ob->current_line != xloc.line)
181 bp_pack_var_len_unsigned (bp, xloc.line);
182 ob->current_line = xloc.line;
183
184 if (ob->current_col != xloc.column)
185 bp_pack_var_len_unsigned (bp, xloc.column);
186 ob->current_col = xloc.column;
187 }
188
189
190 /* If EXPR is an indexable tree node, output a reference to it to
191 output block OB. Otherwise, output the physical representation of
192 EXPR to OB. */
193
194 static void
195 lto_output_tree_ref (struct output_block *ob, tree expr)
196 {
197 enum tree_code code;
198
199 if (TYPE_P (expr))
200 {
201 output_type_ref (ob, expr);
202 return;
203 }
204
205 code = TREE_CODE (expr);
206 switch (code)
207 {
208 case SSA_NAME:
209 streamer_write_record_start (ob, LTO_ssa_name_ref);
210 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
211 break;
212
213 case FIELD_DECL:
214 streamer_write_record_start (ob, LTO_field_decl_ref);
215 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
216 break;
217
218 case FUNCTION_DECL:
219 streamer_write_record_start (ob, LTO_function_decl_ref);
220 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
221 break;
222
223 case VAR_DECL:
224 case DEBUG_EXPR_DECL:
225 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
226 case PARM_DECL:
227 streamer_write_record_start (ob, LTO_global_decl_ref);
228 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case CONST_DECL:
232 streamer_write_record_start (ob, LTO_const_decl_ref);
233 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case IMPORTED_DECL:
237 gcc_assert (decl_function_context (expr) == NULL);
238 streamer_write_record_start (ob, LTO_imported_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case TYPE_DECL:
243 streamer_write_record_start (ob, LTO_type_decl_ref);
244 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case NAMESPACE_DECL:
248 streamer_write_record_start (ob, LTO_namespace_decl_ref);
249 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
251
252 case LABEL_DECL:
253 streamer_write_record_start (ob, LTO_label_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case RESULT_DECL:
258 streamer_write_record_start (ob, LTO_result_decl_ref);
259 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case TRANSLATION_UNIT_DECL:
263 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 default:
268 /* No other node is indexable, so it should have been handled by
269 lto_output_tree. */
270 gcc_unreachable ();
271 }
272 }
273
274
275 /* Return true if EXPR is a tree node that can be written to disk. */
276
277 static inline bool
278 lto_is_streamable (tree expr)
279 {
280 enum tree_code code = TREE_CODE (expr);
281
282 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
283 name version in lto_output_tree_ref (see output_ssa_names). */
284 return !is_lang_specific (expr)
285 && code != SSA_NAME
286 && code != CALL_EXPR
287 && code != LANG_TYPE
288 && code != MODIFY_EXPR
289 && code != INIT_EXPR
290 && code != TARGET_EXPR
291 && code != BIND_EXPR
292 && code != WITH_CLEANUP_EXPR
293 && code != STATEMENT_LIST
294 && code != OMP_CLAUSE
295 && (code == CASE_LABEL_EXPR
296 || code == DECL_EXPR
297 || TREE_CODE_CLASS (code) != tcc_statement);
298 }
299
300
301 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
302
303 static tree
304 get_symbol_initial_value (struct output_block *ob, tree expr)
305 {
306 gcc_checking_assert (DECL_P (expr)
307 && TREE_CODE (expr) != FUNCTION_DECL
308 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
309
310 /* Handle DECL_INITIAL for symbols. */
311 tree initial = DECL_INITIAL (expr);
312 if (TREE_CODE (expr) == VAR_DECL
313 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
314 && !DECL_IN_CONSTANT_POOL (expr)
315 && initial)
316 {
317 lto_symtab_encoder_t encoder;
318 struct varpool_node *vnode;
319
320 encoder = ob->decl_state->symtab_node_encoder;
321 vnode = varpool_get_node (expr);
322 if (!vnode
323 || !lto_symtab_encoder_encode_initializer_p (encoder,
324 vnode))
325 initial = error_mark_node;
326 }
327
328 return initial;
329 }
330
331
332 /* Write a physical representation of tree node EXPR to output block
333 OB. If REF_P is true, the leaves of EXPR are emitted as references
334 via lto_output_tree_ref. IX is the index into the streamer cache
335 where EXPR is stored. */
336
337 static void
338 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
339 {
340 /* Pack all the non-pointer fields in EXPR into a bitpack and write
341 the resulting bitpack. */
342 bitpack_d bp = bitpack_create (ob->main_stream);
343 streamer_pack_tree_bitfields (ob, &bp, expr);
344 streamer_write_bitpack (&bp);
345
346 /* Write all the pointer fields in EXPR. */
347 streamer_write_tree_body (ob, expr, ref_p);
348
349 /* Write any LTO-specific data to OB. */
350 if (DECL_P (expr)
351 && TREE_CODE (expr) != FUNCTION_DECL
352 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
353 {
354 /* Handle DECL_INITIAL for symbols. */
355 tree initial = get_symbol_initial_value (ob, expr);
356 stream_write_tree (ob, initial, ref_p);
357 }
358 }
359
360 /* Write a physical representation of tree node EXPR to output block
361 OB. If REF_P is true, the leaves of EXPR are emitted as references
362 via lto_output_tree_ref. IX is the index into the streamer cache
363 where EXPR is stored. */
364
365 static void
366 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
367 {
368 if (!lto_is_streamable (expr))
369 internal_error ("tree code %qs is not supported in LTO streams",
370 get_tree_code_name (TREE_CODE (expr)));
371
372 /* Write the header, containing everything needed to materialize
373 EXPR on the reading side. */
374 streamer_write_tree_header (ob, expr);
375
376 lto_write_tree_1 (ob, expr, ref_p);
377
378 /* Mark the end of EXPR. */
379 streamer_write_zero (ob);
380 }
381
382 /* Emit the physical representation of tree node EXPR to output block
383 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
384 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
385
386 static void
387 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
388 bool ref_p, bool this_ref_p)
389 {
390 unsigned ix;
391
392 gcc_checking_assert (expr != NULL_TREE
393 && !(this_ref_p && tree_is_indexable (expr)));
394
395 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
396 expr, hash, &ix);
397 gcc_assert (!exists_p);
398 if (streamer_handle_as_builtin_p (expr))
399 {
400 /* MD and NORMAL builtins do not need to be written out
401 completely as they are always instantiated by the
402 compiler on startup. The only builtins that need to
403 be written out are BUILT_IN_FRONTEND. For all other
404 builtins, we simply write the class and code. */
405 streamer_write_builtin (ob, expr);
406 }
407 else if (TREE_CODE (expr) == INTEGER_CST
408 && !TREE_OVERFLOW (expr))
409 {
410 /* Shared INTEGER_CST nodes are special because they need their
411 original type to be materialized by the reader (to implement
412 TYPE_CACHED_VALUES). */
413 streamer_write_integer_cst (ob, expr, ref_p);
414 }
415 else
416 {
417 /* This is the first time we see EXPR, write its fields
418 to OB. */
419 lto_write_tree (ob, expr, ref_p);
420 }
421 }
422
423 struct sccs
424 {
425 unsigned int dfsnum;
426 unsigned int low;
427 };
428
429 struct scc_entry
430 {
431 tree t;
432 hashval_t hash;
433 };
434
435 static unsigned int next_dfs_num;
436 static vec<scc_entry> sccstack;
437 static struct pointer_map_t *sccstate;
438 static struct obstack sccstate_obstack;
439
440 static void
441 DFS_write_tree (struct output_block *ob, sccs *from_state,
442 tree expr, bool ref_p, bool this_ref_p);
443
444 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
445 DFS recurse for all tree edges originating from it. */
446
447 static void
448 DFS_write_tree_body (struct output_block *ob,
449 tree expr, sccs *expr_state, bool ref_p)
450 {
451 #define DFS_follow_tree_edge(DEST) \
452 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
453
454 enum tree_code code;
455
456 code = TREE_CODE (expr);
457
458 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
459 {
460 if (TREE_CODE (expr) != IDENTIFIER_NODE)
461 DFS_follow_tree_edge (TREE_TYPE (expr));
462 }
463
464 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
465 {
466 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
467 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
468 }
469
470 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
471 {
472 DFS_follow_tree_edge (TREE_REALPART (expr));
473 DFS_follow_tree_edge (TREE_IMAGPART (expr));
474 }
475
476 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
477 {
478 /* Drop names that were created for anonymous entities. */
479 if (DECL_NAME (expr)
480 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
481 && ANON_AGGRNAME_P (DECL_NAME (expr)))
482 ;
483 else
484 DFS_follow_tree_edge (DECL_NAME (expr));
485 DFS_follow_tree_edge (DECL_CONTEXT (expr));
486 }
487
488 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
489 {
490 DFS_follow_tree_edge (DECL_SIZE (expr));
491 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
492
493 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
494 special handling in LTO, it must be handled by streamer hooks. */
495
496 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
497
498 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
499 for early inlining so drop it on the floor instead of ICEing in
500 dwarf2out.c. */
501
502 if ((TREE_CODE (expr) == VAR_DECL
503 || TREE_CODE (expr) == PARM_DECL)
504 && DECL_HAS_VALUE_EXPR_P (expr))
505 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
506 if (TREE_CODE (expr) == VAR_DECL)
507 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
508 }
509
510 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
511 {
512 if (TREE_CODE (expr) == TYPE_DECL)
513 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
514 DFS_follow_tree_edge (DECL_VINDEX (expr));
515 }
516
517 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
518 {
519 /* Make sure we don't inadvertently set the assembler name. */
520 if (DECL_ASSEMBLER_NAME_SET_P (expr))
521 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
522 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
523 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
524 }
525
526 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
527 {
528 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
529 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
530 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
531 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
532 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
533 }
534
535 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
536 {
537 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
538 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
539 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
540 }
541
542 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
543 {
544 DFS_follow_tree_edge (TYPE_SIZE (expr));
545 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
546 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
547 DFS_follow_tree_edge (TYPE_NAME (expr));
548 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
549 reconstructed during fixup. */
550 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
551 during fixup. */
552 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
553 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
554 /* TYPE_CANONICAL is re-computed during type merging, so no need
555 to follow it here. */
556 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
557 }
558
559 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
560 {
561 if (TREE_CODE (expr) == ENUMERAL_TYPE)
562 DFS_follow_tree_edge (TYPE_VALUES (expr));
563 else if (TREE_CODE (expr) == ARRAY_TYPE)
564 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
565 else if (RECORD_OR_UNION_TYPE_P (expr))
566 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
567 DFS_follow_tree_edge (t);
568 else if (TREE_CODE (expr) == FUNCTION_TYPE
569 || TREE_CODE (expr) == METHOD_TYPE)
570 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
571
572 if (!POINTER_TYPE_P (expr))
573 DFS_follow_tree_edge (TYPE_MINVAL (expr));
574 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
575 if (RECORD_OR_UNION_TYPE_P (expr))
576 DFS_follow_tree_edge (TYPE_BINFO (expr));
577 }
578
579 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
580 {
581 DFS_follow_tree_edge (TREE_PURPOSE (expr));
582 DFS_follow_tree_edge (TREE_VALUE (expr));
583 DFS_follow_tree_edge (TREE_CHAIN (expr));
584 }
585
586 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
587 {
588 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
589 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
590 }
591
592 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
593 {
594 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
595 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
596 DFS_follow_tree_edge (TREE_BLOCK (expr));
597 }
598
599 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
600 {
601 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
602 /* ??? FIXME. See also streamer_write_chain. */
603 if (!(VAR_OR_FUNCTION_DECL_P (t)
604 && DECL_EXTERNAL (t)))
605 DFS_follow_tree_edge (t);
606
607 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
608
609 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
610 handle - those that represent inlined function scopes.
611 For the drop rest them on the floor instead of ICEing
612 in dwarf2out.c. */
613 if (inlined_function_outer_scope_p (expr))
614 {
615 tree ultimate_origin = block_ultimate_origin (expr);
616 DFS_follow_tree_edge (ultimate_origin);
617 }
618 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
619 information for early inlined BLOCKs so drop it on the floor instead
620 of ICEing in dwarf2out.c. */
621
622 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
623 streaming time. */
624
625 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
626 list is re-constructed from BLOCK_SUPERCONTEXT. */
627 }
628
629 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
630 {
631 unsigned i;
632 tree t;
633
634 /* Note that the number of BINFO slots has already been emitted in
635 EXPR's header (see streamer_write_tree_header) because this length
636 is needed to build the empty BINFO node on the reader side. */
637 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
638 DFS_follow_tree_edge (t);
639 DFS_follow_tree_edge (BINFO_OFFSET (expr));
640 DFS_follow_tree_edge (BINFO_VTABLE (expr));
641 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
642
643 /* The number of BINFO_BASE_ACCESSES has already been emitted in
644 EXPR's bitfield section. */
645 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
646 DFS_follow_tree_edge (t);
647
648 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
649 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
650 }
651
652 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
653 {
654 unsigned i;
655 tree index, value;
656
657 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
658 {
659 DFS_follow_tree_edge (index);
660 DFS_follow_tree_edge (value);
661 }
662 }
663
664 #undef DFS_follow_tree_edge
665 }
666
667 /* Return a hash value for the tree T. */
668
669 static hashval_t
670 hash_tree (struct streamer_tree_cache_d *cache, tree t)
671 {
672 #define visit(SIBLING) \
673 do { \
674 unsigned ix; \
675 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
676 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
677 } while (0)
678
679 /* Hash TS_BASE. */
680 enum tree_code code = TREE_CODE (t);
681 hashval_t v = iterative_hash_host_wide_int (code, 0);
682 if (!TYPE_P (t))
683 {
684 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
685 | (TREE_CONSTANT (t) << 1)
686 | (TREE_READONLY (t) << 2)
687 | (TREE_PUBLIC (t) << 3), v);
688 }
689 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
690 | (TREE_THIS_VOLATILE (t) << 1), v);
691 if (DECL_P (t))
692 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
693 else if (TYPE_P (t))
694 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
695 if (TYPE_P (t))
696 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
697 else
698 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
699 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
700 | (TREE_STATIC (t) << 1)
701 | (TREE_PROTECTED (t) << 2)
702 | (TREE_DEPRECATED (t) << 3), v);
703 if (code != TREE_BINFO)
704 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
705 if (TYPE_P (t))
706 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
707 | (TYPE_ADDR_SPACE (t) << 1), v);
708 else if (code == SSA_NAME)
709 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
710
711 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
712 {
713 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
714 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
715 }
716
717 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
718 {
719 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
720 v = iterative_hash_host_wide_int (r.cl, v);
721 v = iterative_hash_host_wide_int (r.decimal
722 | (r.sign << 1)
723 | (r.signalling << 2)
724 | (r.canonical << 3), v);
725 v = iterative_hash_host_wide_int (r.uexp, v);
726 for (unsigned i = 0; i < SIGSZ; ++i)
727 v = iterative_hash_host_wide_int (r.sig[i], v);
728 }
729
730 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
731 {
732 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
733 v = iterative_hash_host_wide_int (f.mode, v);
734 v = iterative_hash_host_wide_int (f.data.low, v);
735 v = iterative_hash_host_wide_int (f.data.high, v);
736 }
737
738 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
739 {
740 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
741 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
742 | (DECL_VIRTUAL_P (t) << 1)
743 | (DECL_IGNORED_P (t) << 2)
744 | (DECL_ABSTRACT (t) << 3)
745 | (DECL_ARTIFICIAL (t) << 4)
746 | (DECL_USER_ALIGN (t) << 5)
747 | (DECL_PRESERVE_P (t) << 6)
748 | (DECL_EXTERNAL (t) << 7)
749 | (DECL_GIMPLE_REG_P (t) << 8), v);
750 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
751 if (code == LABEL_DECL)
752 {
753 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
754 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
755 }
756 else if (code == FIELD_DECL)
757 {
758 v = iterative_hash_host_wide_int (DECL_PACKED (t)
759 | (DECL_NONADDRESSABLE_P (t) << 1),
760 v);
761 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
762 }
763 else if (code == VAR_DECL)
764 {
765 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
766 | (DECL_NONLOCAL_FRAME (t) << 1),
767 v);
768 }
769 if (code == RESULT_DECL
770 || code == PARM_DECL
771 || code == VAR_DECL)
772 {
773 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
774 if (code == VAR_DECL
775 || code == PARM_DECL)
776 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
777 }
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
781 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
782
783 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
784 {
785 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
786 | (DECL_DLLIMPORT_P (t) << 1)
787 | (DECL_WEAK (t) << 2)
788 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
789 | (DECL_COMDAT (t) << 4)
790 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
791 v);
792 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
793 if (code == VAR_DECL)
794 {
795 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
796 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
797 | (DECL_IN_CONSTANT_POOL (t) << 1),
798 v);
799 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
800 }
801 if (TREE_CODE (t) == FUNCTION_DECL)
802 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
803 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
804 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
805 v);
806 if (VAR_OR_FUNCTION_DECL_P (t))
807 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
808 }
809
810 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
811 {
812 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
813 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
814 | (DECL_STATIC_DESTRUCTOR (t) << 1)
815 | (DECL_UNINLINABLE (t) << 2)
816 | (DECL_POSSIBLY_INLINED (t) << 3)
817 | (DECL_IS_NOVOPS (t) << 4)
818 | (DECL_IS_RETURNS_TWICE (t) << 5)
819 | (DECL_IS_MALLOC (t) << 6)
820 | (DECL_IS_OPERATOR_NEW (t) << 7)
821 | (DECL_DECLARED_INLINE_P (t) << 8)
822 | (DECL_STATIC_CHAIN (t) << 9)
823 | (DECL_NO_INLINE_WARNING_P (t) << 10)
824 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
825 | (DECL_NO_LIMIT_STACK (t) << 12)
826 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
827 | (DECL_PURE_P (t) << 14)
828 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
829 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
830 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
831 if (DECL_STATIC_DESTRUCTOR (t))
832 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
833 }
834
835 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
836 {
837 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
838 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
839 | (TYPE_NO_FORCE_BLK (t) << 1)
840 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
841 | (TYPE_PACKED (t) << 3)
842 | (TYPE_RESTRICT (t) << 4)
843 | (TYPE_USER_ALIGN (t) << 5)
844 | (TYPE_READONLY (t) << 6), v);
845 if (RECORD_OR_UNION_TYPE_P (t))
846 {
847 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
848 | (TYPE_FINAL_P (t) << 1), v);
849 }
850 else if (code == ARRAY_TYPE)
851 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
852 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
853 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
854 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
855 || (!in_lto_p
856 && get_alias_set (t) == 0))
857 ? 0 : -1, v);
858 }
859
860 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
861 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
862 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
863
864 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
865 v = iterative_hash (t, sizeof (struct cl_target_option), v);
866
867 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
868 v = iterative_hash (t, sizeof (struct cl_optimization), v);
869
870 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
871 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
872
873 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
874 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
875
876 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
877 {
878 if (POINTER_TYPE_P (t))
879 {
880 /* For pointers factor in the pointed-to type recursively as
881 we cannot recurse through only pointers.
882 ??? We can generalize this by keeping track of the
883 in-SCC edges for each tree (or arbitrarily the first
884 such edge) and hashing that in in a second stage
885 (instead of the quadratic mixing of the SCC we do now). */
886 hashval_t x;
887 unsigned ix;
888 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
889 x = streamer_tree_cache_get_hash (cache, ix);
890 else
891 x = hash_tree (cache, TREE_TYPE (t));
892 v = iterative_hash_hashval_t (x, v);
893 }
894 else if (code != IDENTIFIER_NODE)
895 visit (TREE_TYPE (t));
896 }
897
898 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
899 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
900 visit (VECTOR_CST_ELT (t, i));
901
902 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
903 {
904 visit (TREE_REALPART (t));
905 visit (TREE_IMAGPART (t));
906 }
907
908 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
909 {
910 /* Drop names that were created for anonymous entities. */
911 if (DECL_NAME (t)
912 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
913 && ANON_AGGRNAME_P (DECL_NAME (t)))
914 ;
915 else
916 visit (DECL_NAME (t));
917 if (DECL_FILE_SCOPE_P (t))
918 ;
919 else
920 visit (DECL_CONTEXT (t));
921 }
922
923 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
924 {
925 visit (DECL_SIZE (t));
926 visit (DECL_SIZE_UNIT (t));
927 visit (DECL_ATTRIBUTES (t));
928 if ((code == VAR_DECL
929 || code == PARM_DECL)
930 && DECL_HAS_VALUE_EXPR_P (t))
931 visit (DECL_VALUE_EXPR (t));
932 if (code == VAR_DECL
933 && DECL_HAS_DEBUG_EXPR_P (t))
934 visit (DECL_DEBUG_EXPR (t));
935 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
936 be able to call get_symbol_initial_value. */
937 }
938
939 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
940 {
941 if (code == TYPE_DECL)
942 visit (DECL_ORIGINAL_TYPE (t));
943 visit (DECL_VINDEX (t));
944 }
945
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
947 {
948 if (DECL_ASSEMBLER_NAME_SET_P (t))
949 visit (DECL_ASSEMBLER_NAME (t));
950 visit (DECL_SECTION_NAME (t));
951 visit (DECL_COMDAT_GROUP (t));
952 }
953
954 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
955 {
956 visit (DECL_FIELD_OFFSET (t));
957 visit (DECL_BIT_FIELD_TYPE (t));
958 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
959 visit (DECL_FIELD_BIT_OFFSET (t));
960 visit (DECL_FCONTEXT (t));
961 }
962
963 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
964 {
965 visit (DECL_FUNCTION_PERSONALITY (t));
966 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
967 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
968 }
969
970 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
971 {
972 visit (TYPE_SIZE (t));
973 visit (TYPE_SIZE_UNIT (t));
974 visit (TYPE_ATTRIBUTES (t));
975 visit (TYPE_NAME (t));
976 visit (TYPE_MAIN_VARIANT (t));
977 if (TYPE_FILE_SCOPE_P (t))
978 ;
979 else
980 visit (TYPE_CONTEXT (t));
981 visit (TYPE_STUB_DECL (t));
982 }
983
984 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
985 {
986 if (code == ENUMERAL_TYPE)
987 visit (TYPE_VALUES (t));
988 else if (code == ARRAY_TYPE)
989 visit (TYPE_DOMAIN (t));
990 else if (RECORD_OR_UNION_TYPE_P (t))
991 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
992 visit (f);
993 else if (code == FUNCTION_TYPE
994 || code == METHOD_TYPE)
995 visit (TYPE_ARG_TYPES (t));
996 if (!POINTER_TYPE_P (t))
997 visit (TYPE_MINVAL (t));
998 visit (TYPE_MAXVAL (t));
999 if (RECORD_OR_UNION_TYPE_P (t))
1000 visit (TYPE_BINFO (t));
1001 }
1002
1003 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1004 {
1005 visit (TREE_PURPOSE (t));
1006 visit (TREE_VALUE (t));
1007 visit (TREE_CHAIN (t));
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1011 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1012 visit (TREE_VEC_ELT (t, i));
1013
1014 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1015 {
1016 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1017 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1018 visit (TREE_OPERAND (t, i));
1019 }
1020
1021 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1022 {
1023 unsigned i;
1024 tree b;
1025 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1026 visit (b);
1027 visit (BINFO_OFFSET (t));
1028 visit (BINFO_VTABLE (t));
1029 visit (BINFO_VPTR_FIELD (t));
1030 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1031 visit (b);
1032 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1033 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1034 }
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1037 {
1038 unsigned i;
1039 tree index, value;
1040 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1041 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1042 {
1043 visit (index);
1044 visit (value);
1045 }
1046 }
1047
1048 return v;
1049
1050 #undef visit
1051 }
1052
1053 /* Compare two SCC entries by their hash value for qsorting them. */
1054
1055 static int
1056 scc_entry_compare (const void *p1_, const void *p2_)
1057 {
1058 const scc_entry *p1 = (const scc_entry *) p1_;
1059 const scc_entry *p2 = (const scc_entry *) p2_;
1060 if (p1->hash < p2->hash)
1061 return -1;
1062 else if (p1->hash > p2->hash)
1063 return 1;
1064 return 0;
1065 }
1066
1067 /* Return a hash value for the SCC on the SCC stack from FIRST with
1068 size SIZE. */
1069
1070 static hashval_t
1071 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1072 {
1073 /* Compute hash values for the SCC members. */
1074 for (unsigned i = 0; i < size; ++i)
1075 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1076
1077 if (size == 1)
1078 return sccstack[first].hash;
1079
1080 /* Sort the SCC of type, hash pairs so that when we mix in
1081 all members of the SCC the hash value becomes independent on
1082 the order we visited the SCC. Disregard hashes equal to
1083 the hash of the tree we mix into because we cannot guarantee
1084 a stable sort for those across different TUs. */
1085 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1086 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1087 for (unsigned i = 0; i < size; ++i)
1088 {
1089 hashval_t hash = sccstack[first+i].hash;
1090 hashval_t orig_hash = hash;
1091 unsigned j;
1092 /* Skip same hashes. */
1093 for (j = i + 1;
1094 j < size && sccstack[first+j].hash == orig_hash; ++j)
1095 ;
1096 for (; j < size; ++j)
1097 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1098 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1099 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1100 tem[i] = hash;
1101 }
1102 hashval_t scc_hash = 0;
1103 for (unsigned i = 0; i < size; ++i)
1104 {
1105 sccstack[first+i].hash = tem[i];
1106 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1107 }
1108 return scc_hash;
1109 }
1110
1111 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1112 already in the streamer cache. Main routine called for
1113 each visit of EXPR. */
1114
1115 static void
1116 DFS_write_tree (struct output_block *ob, sccs *from_state,
1117 tree expr, bool ref_p, bool this_ref_p)
1118 {
1119 unsigned ix;
1120 sccs **slot;
1121
1122 /* Handle special cases. */
1123 if (expr == NULL_TREE)
1124 return;
1125
1126 /* Do not DFS walk into indexable trees. */
1127 if (this_ref_p && tree_is_indexable (expr))
1128 return;
1129
1130 /* Check if we already streamed EXPR. */
1131 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1132 return;
1133
1134 slot = (sccs **)pointer_map_insert (sccstate, expr);
1135 sccs *cstate = *slot;
1136 if (!cstate)
1137 {
1138 scc_entry e = { expr, 0 };
1139 /* Not yet visited. DFS recurse and push it onto the stack. */
1140 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1141 sccstack.safe_push (e);
1142 cstate->dfsnum = next_dfs_num++;
1143 cstate->low = cstate->dfsnum;
1144
1145 if (streamer_handle_as_builtin_p (expr))
1146 ;
1147 else if (TREE_CODE (expr) == INTEGER_CST
1148 && !TREE_OVERFLOW (expr))
1149 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1150 else
1151 {
1152 DFS_write_tree_body (ob, expr, cstate, ref_p);
1153
1154 /* Walk any LTO-specific edges. */
1155 if (DECL_P (expr)
1156 && TREE_CODE (expr) != FUNCTION_DECL
1157 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1158 {
1159 /* Handle DECL_INITIAL for symbols. */
1160 tree initial = get_symbol_initial_value (ob, expr);
1161 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1162 }
1163 }
1164
1165 /* See if we found an SCC. */
1166 if (cstate->low == cstate->dfsnum)
1167 {
1168 unsigned first, size;
1169 tree x;
1170
1171 /* Pop the SCC and compute its size. */
1172 first = sccstack.length ();
1173 do
1174 {
1175 x = sccstack[--first].t;
1176 }
1177 while (x != expr);
1178 size = sccstack.length () - first;
1179
1180 /* No need to compute hashes for LTRANS units, we don't perform
1181 any merging there. */
1182 hashval_t scc_hash = 0;
1183 unsigned scc_entry_len = 0;
1184 if (!flag_wpa)
1185 {
1186 scc_hash = hash_scc (ob->writer_cache, first, size);
1187
1188 /* Put the entries with the least number of collisions first. */
1189 unsigned entry_start = 0;
1190 scc_entry_len = size + 1;
1191 for (unsigned i = 0; i < size;)
1192 {
1193 unsigned from = i;
1194 for (i = i + 1; i < size
1195 && (sccstack[first + i].hash
1196 == sccstack[first + from].hash); ++i)
1197 ;
1198 if (i - from < scc_entry_len)
1199 {
1200 scc_entry_len = i - from;
1201 entry_start = from;
1202 }
1203 }
1204 for (unsigned i = 0; i < scc_entry_len; ++i)
1205 {
1206 scc_entry tem = sccstack[first + i];
1207 sccstack[first + i] = sccstack[first + entry_start + i];
1208 sccstack[first + entry_start + i] = tem;
1209 }
1210 }
1211
1212 /* Write LTO_tree_scc. */
1213 streamer_write_record_start (ob, LTO_tree_scc);
1214 streamer_write_uhwi (ob, size);
1215 streamer_write_uhwi (ob, scc_hash);
1216
1217 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1218 All INTEGER_CSTs need to be handled this way as we need
1219 their type to materialize them. Also builtins are handled
1220 this way.
1221 ??? We still wrap these in LTO_tree_scc so at the
1222 input side we can properly identify the tree we want
1223 to ultimatively return. */
1224 size_t old_len = ob->writer_cache->nodes.length ();
1225 if (size == 1)
1226 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1227 else
1228 {
1229 /* Write the size of the SCC entry candidates. */
1230 streamer_write_uhwi (ob, scc_entry_len);
1231
1232 /* Write all headers and populate the streamer cache. */
1233 for (unsigned i = 0; i < size; ++i)
1234 {
1235 hashval_t hash = sccstack[first+i].hash;
1236 tree t = sccstack[first+i].t;
1237 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1238 t, hash, &ix);
1239 gcc_assert (!exists_p);
1240
1241 if (!lto_is_streamable (t))
1242 internal_error ("tree code %qs is not supported "
1243 "in LTO streams",
1244 get_tree_code_name (TREE_CODE (t)));
1245
1246 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1247
1248 /* Write the header, containing everything needed to
1249 materialize EXPR on the reading side. */
1250 streamer_write_tree_header (ob, t);
1251 }
1252
1253 /* Write the bitpacks and tree references. */
1254 for (unsigned i = 0; i < size; ++i)
1255 {
1256 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1257
1258 /* Mark the end of the tree. */
1259 streamer_write_zero (ob);
1260 }
1261 }
1262 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1263
1264 /* Finally truncate the vector. */
1265 sccstack.truncate (first);
1266
1267 if (from_state)
1268 from_state->low = MIN (from_state->low, cstate->low);
1269 return;
1270 }
1271
1272 if (from_state)
1273 from_state->low = MIN (from_state->low, cstate->low);
1274 }
1275 gcc_checking_assert (from_state);
1276 if (cstate->dfsnum < from_state->dfsnum)
1277 from_state->low = MIN (cstate->dfsnum, from_state->low);
1278 }
1279
1280
1281 /* Emit the physical representation of tree node EXPR to output block
1282 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1283 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1284
1285 void
1286 lto_output_tree (struct output_block *ob, tree expr,
1287 bool ref_p, bool this_ref_p)
1288 {
1289 unsigned ix;
1290 bool existed_p;
1291
1292 if (expr == NULL_TREE)
1293 {
1294 streamer_write_record_start (ob, LTO_null);
1295 return;
1296 }
1297
1298 if (this_ref_p && tree_is_indexable (expr))
1299 {
1300 lto_output_tree_ref (ob, expr);
1301 return;
1302 }
1303
1304 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1305 if (existed_p)
1306 {
1307 /* If a node has already been streamed out, make sure that
1308 we don't write it more than once. Otherwise, the reader
1309 will instantiate two different nodes for the same object. */
1310 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1311 streamer_write_uhwi (ob, ix);
1312 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1313 lto_tree_code_to_tag (TREE_CODE (expr)));
1314 lto_stats.num_pickle_refs_output++;
1315 }
1316 else
1317 {
1318 /* This is the first time we see EXPR, write all reachable
1319 trees to OB. */
1320 static bool in_dfs_walk;
1321
1322 /* Protect against recursion which means disconnect between
1323 what tree edges we walk in the DFS walk and what edges
1324 we stream out. */
1325 gcc_assert (!in_dfs_walk);
1326
1327 /* Start the DFS walk. */
1328 /* Save ob state ... */
1329 /* let's see ... */
1330 in_dfs_walk = true;
1331 sccstate = pointer_map_create ();
1332 gcc_obstack_init (&sccstate_obstack);
1333 next_dfs_num = 1;
1334 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1335 sccstack.release ();
1336 pointer_map_destroy (sccstate);
1337 obstack_free (&sccstate_obstack, NULL);
1338 in_dfs_walk = false;
1339
1340 /* Finally append a reference to the tree we were writing.
1341 ??? If expr ended up as a singleton we could have
1342 inlined it here and avoid outputting a reference. */
1343 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1344 gcc_assert (existed_p);
1345 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1346 streamer_write_uhwi (ob, ix);
1347 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1348 lto_tree_code_to_tag (TREE_CODE (expr)));
1349 lto_stats.num_pickle_refs_output++;
1350 }
1351 }
1352
1353
1354 /* Output to OB a list of try/catch handlers starting with FIRST. */
1355
1356 static void
1357 output_eh_try_list (struct output_block *ob, eh_catch first)
1358 {
1359 eh_catch n;
1360
1361 for (n = first; n; n = n->next_catch)
1362 {
1363 streamer_write_record_start (ob, LTO_eh_catch);
1364 stream_write_tree (ob, n->type_list, true);
1365 stream_write_tree (ob, n->filter_list, true);
1366 stream_write_tree (ob, n->label, true);
1367 }
1368
1369 streamer_write_record_start (ob, LTO_null);
1370 }
1371
1372
1373 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1374 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1375 detect EH region sharing. */
1376
1377 static void
1378 output_eh_region (struct output_block *ob, eh_region r)
1379 {
1380 enum LTO_tags tag;
1381
1382 if (r == NULL)
1383 {
1384 streamer_write_record_start (ob, LTO_null);
1385 return;
1386 }
1387
1388 if (r->type == ERT_CLEANUP)
1389 tag = LTO_ert_cleanup;
1390 else if (r->type == ERT_TRY)
1391 tag = LTO_ert_try;
1392 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1393 tag = LTO_ert_allowed_exceptions;
1394 else if (r->type == ERT_MUST_NOT_THROW)
1395 tag = LTO_ert_must_not_throw;
1396 else
1397 gcc_unreachable ();
1398
1399 streamer_write_record_start (ob, tag);
1400 streamer_write_hwi (ob, r->index);
1401
1402 if (r->outer)
1403 streamer_write_hwi (ob, r->outer->index);
1404 else
1405 streamer_write_zero (ob);
1406
1407 if (r->inner)
1408 streamer_write_hwi (ob, r->inner->index);
1409 else
1410 streamer_write_zero (ob);
1411
1412 if (r->next_peer)
1413 streamer_write_hwi (ob, r->next_peer->index);
1414 else
1415 streamer_write_zero (ob);
1416
1417 if (r->type == ERT_TRY)
1418 {
1419 output_eh_try_list (ob, r->u.eh_try.first_catch);
1420 }
1421 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1422 {
1423 stream_write_tree (ob, r->u.allowed.type_list, true);
1424 stream_write_tree (ob, r->u.allowed.label, true);
1425 streamer_write_uhwi (ob, r->u.allowed.filter);
1426 }
1427 else if (r->type == ERT_MUST_NOT_THROW)
1428 {
1429 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1430 bitpack_d bp = bitpack_create (ob->main_stream);
1431 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1432 streamer_write_bitpack (&bp);
1433 }
1434
1435 if (r->landing_pads)
1436 streamer_write_hwi (ob, r->landing_pads->index);
1437 else
1438 streamer_write_zero (ob);
1439 }
1440
1441
1442 /* Output landing pad LP to OB. */
1443
1444 static void
1445 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1446 {
1447 if (lp == NULL)
1448 {
1449 streamer_write_record_start (ob, LTO_null);
1450 return;
1451 }
1452
1453 streamer_write_record_start (ob, LTO_eh_landing_pad);
1454 streamer_write_hwi (ob, lp->index);
1455 if (lp->next_lp)
1456 streamer_write_hwi (ob, lp->next_lp->index);
1457 else
1458 streamer_write_zero (ob);
1459
1460 if (lp->region)
1461 streamer_write_hwi (ob, lp->region->index);
1462 else
1463 streamer_write_zero (ob);
1464
1465 stream_write_tree (ob, lp->post_landing_pad, true);
1466 }
1467
1468
1469 /* Output the existing eh_table to OB. */
1470
1471 static void
1472 output_eh_regions (struct output_block *ob, struct function *fn)
1473 {
1474 if (fn->eh && fn->eh->region_tree)
1475 {
1476 unsigned i;
1477 eh_region eh;
1478 eh_landing_pad lp;
1479 tree ttype;
1480
1481 streamer_write_record_start (ob, LTO_eh_table);
1482
1483 /* Emit the index of the root of the EH region tree. */
1484 streamer_write_hwi (ob, fn->eh->region_tree->index);
1485
1486 /* Emit all the EH regions in the region array. */
1487 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1488 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1489 output_eh_region (ob, eh);
1490
1491 /* Emit all landing pads. */
1492 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1493 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1494 output_eh_lp (ob, lp);
1495
1496 /* Emit all the runtime type data. */
1497 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1498 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1499 stream_write_tree (ob, ttype, true);
1500
1501 /* Emit the table of action chains. */
1502 if (targetm.arm_eabi_unwinder)
1503 {
1504 tree t;
1505 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1506 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1507 stream_write_tree (ob, t, true);
1508 }
1509 else
1510 {
1511 uchar c;
1512 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1513 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1514 streamer_write_char_stream (ob->main_stream, c);
1515 }
1516 }
1517
1518 /* The LTO_null either terminates the record or indicates that there
1519 are no eh_records at all. */
1520 streamer_write_record_start (ob, LTO_null);
1521 }
1522
1523
1524 /* Output all of the active ssa names to the ssa_names stream. */
1525
1526 static void
1527 output_ssa_names (struct output_block *ob, struct function *fn)
1528 {
1529 unsigned int i, len;
1530
1531 len = vec_safe_length (SSANAMES (fn));
1532 streamer_write_uhwi (ob, len);
1533
1534 for (i = 1; i < len; i++)
1535 {
1536 tree ptr = (*SSANAMES (fn))[i];
1537
1538 if (ptr == NULL_TREE
1539 || SSA_NAME_IN_FREE_LIST (ptr)
1540 || virtual_operand_p (ptr))
1541 continue;
1542
1543 streamer_write_uhwi (ob, i);
1544 streamer_write_char_stream (ob->main_stream,
1545 SSA_NAME_IS_DEFAULT_DEF (ptr));
1546 if (SSA_NAME_VAR (ptr))
1547 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1548 else
1549 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1550 stream_write_tree (ob, TREE_TYPE (ptr), true);
1551 }
1552
1553 streamer_write_zero (ob);
1554 }
1555
1556
1557 /* Output the cfg. */
1558
1559 static void
1560 output_cfg (struct output_block *ob, struct function *fn)
1561 {
1562 struct lto_output_stream *tmp_stream = ob->main_stream;
1563 basic_block bb;
1564
1565 ob->main_stream = ob->cfg_stream;
1566
1567 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1568 profile_status_for_function (fn));
1569
1570 /* Output the number of the highest basic block. */
1571 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1572
1573 FOR_ALL_BB_FN (bb, fn)
1574 {
1575 edge_iterator ei;
1576 edge e;
1577
1578 streamer_write_hwi (ob, bb->index);
1579
1580 /* Output the successors and the edge flags. */
1581 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1582 FOR_EACH_EDGE (e, ei, bb->succs)
1583 {
1584 streamer_write_uhwi (ob, e->dest->index);
1585 streamer_write_hwi (ob, e->probability);
1586 streamer_write_gcov_count (ob, e->count);
1587 streamer_write_uhwi (ob, e->flags);
1588 }
1589 }
1590
1591 streamer_write_hwi (ob, -1);
1592
1593 bb = ENTRY_BLOCK_PTR;
1594 while (bb->next_bb)
1595 {
1596 streamer_write_hwi (ob, bb->next_bb->index);
1597 bb = bb->next_bb;
1598 }
1599
1600 streamer_write_hwi (ob, -1);
1601
1602 /* ??? The cfgloop interface is tied to cfun. */
1603 gcc_assert (cfun == fn);
1604
1605 /* Output the number of loops. */
1606 streamer_write_uhwi (ob, number_of_loops (fn));
1607
1608 /* Output each loop, skipping the tree root which has number zero. */
1609 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1610 {
1611 struct loop *loop = get_loop (fn, i);
1612
1613 /* Write the index of the loop header. That's enough to rebuild
1614 the loop tree on the reader side. Stream -1 for an unused
1615 loop entry. */
1616 if (!loop)
1617 {
1618 streamer_write_hwi (ob, -1);
1619 continue;
1620 }
1621 else
1622 streamer_write_hwi (ob, loop->header->index);
1623
1624 /* Write everything copy_loop_info copies. */
1625 streamer_write_enum (ob->main_stream,
1626 loop_estimation, EST_LAST, loop->estimate_state);
1627 streamer_write_hwi (ob, loop->any_upper_bound);
1628 if (loop->any_upper_bound)
1629 {
1630 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1631 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1632 }
1633 streamer_write_hwi (ob, loop->any_estimate);
1634 if (loop->any_estimate)
1635 {
1636 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1637 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1638 }
1639 }
1640
1641 ob->main_stream = tmp_stream;
1642 }
1643
1644
1645 /* Create the header in the file using OB. If the section type is for
1646 a function, set FN to the decl for that function. */
1647
1648 void
1649 produce_asm (struct output_block *ob, tree fn)
1650 {
1651 enum lto_section_type section_type = ob->section_type;
1652 struct lto_function_header header;
1653 char *section_name;
1654 struct lto_output_stream *header_stream;
1655
1656 if (section_type == LTO_section_function_body)
1657 {
1658 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1659 section_name = lto_get_section_name (section_type, name, NULL);
1660 }
1661 else
1662 section_name = lto_get_section_name (section_type, NULL, NULL);
1663
1664 lto_begin_section (section_name, !flag_wpa);
1665 free (section_name);
1666
1667 /* The entire header is stream computed here. */
1668 memset (&header, 0, sizeof (struct lto_function_header));
1669
1670 /* Write the header. */
1671 header.lto_header.major_version = LTO_major_version;
1672 header.lto_header.minor_version = LTO_minor_version;
1673
1674 header.compressed_size = 0;
1675
1676 if (section_type == LTO_section_function_body)
1677 header.cfg_size = ob->cfg_stream->total_size;
1678 header.main_size = ob->main_stream->total_size;
1679 header.string_size = ob->string_stream->total_size;
1680
1681 header_stream = XCNEW (struct lto_output_stream);
1682 lto_output_data_stream (header_stream, &header, sizeof header);
1683 lto_write_stream (header_stream);
1684 free (header_stream);
1685
1686 /* Put all of the gimple and the string table out the asm file as a
1687 block of text. */
1688 if (section_type == LTO_section_function_body)
1689 lto_write_stream (ob->cfg_stream);
1690 lto_write_stream (ob->main_stream);
1691 lto_write_stream (ob->string_stream);
1692
1693 lto_end_section ();
1694 }
1695
1696
1697 /* Output the base body of struct function FN using output block OB. */
1698
1699 static void
1700 output_struct_function_base (struct output_block *ob, struct function *fn)
1701 {
1702 struct bitpack_d bp;
1703 unsigned i;
1704 tree t;
1705
1706 /* Output the static chain and non-local goto save area. */
1707 stream_write_tree (ob, fn->static_chain_decl, true);
1708 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1709
1710 /* Output all the local variables in the function. */
1711 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1712 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1713 stream_write_tree (ob, t, true);
1714
1715 /* Output current IL state of the function. */
1716 streamer_write_uhwi (ob, fn->curr_properties);
1717
1718 /* Write all the attributes for FN. */
1719 bp = bitpack_create (ob->main_stream);
1720 bp_pack_value (&bp, fn->is_thunk, 1);
1721 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1722 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1723 bp_pack_value (&bp, fn->returns_struct, 1);
1724 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1725 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1726 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1727 bp_pack_value (&bp, fn->after_inlining, 1);
1728 bp_pack_value (&bp, fn->stdarg, 1);
1729 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1730 bp_pack_value (&bp, fn->calls_alloca, 1);
1731 bp_pack_value (&bp, fn->calls_setjmp, 1);
1732 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1733 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1734
1735 /* Output the function start and end loci. */
1736 stream_output_location (ob, &bp, fn->function_start_locus);
1737 stream_output_location (ob, &bp, fn->function_end_locus);
1738
1739 streamer_write_bitpack (&bp);
1740 }
1741
1742
1743 /* Output the body of function NODE->DECL. */
1744
1745 static void
1746 output_function (struct cgraph_node *node)
1747 {
1748 tree function;
1749 struct function *fn;
1750 basic_block bb;
1751 struct output_block *ob;
1752
1753 function = node->symbol.decl;
1754 fn = DECL_STRUCT_FUNCTION (function);
1755 ob = create_output_block (LTO_section_function_body);
1756
1757 clear_line_info (ob);
1758 ob->cgraph_node = node;
1759
1760 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1761
1762 /* Set current_function_decl and cfun. */
1763 push_cfun (fn);
1764
1765 /* Make string 0 be a NULL string. */
1766 streamer_write_char_stream (ob->string_stream, 0);
1767
1768 streamer_write_record_start (ob, LTO_function);
1769
1770 /* Output decls for parameters and args. */
1771 stream_write_tree (ob, DECL_RESULT (function), true);
1772 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1773
1774 /* Output DECL_INITIAL for the function, which contains the tree of
1775 lexical scopes. */
1776 stream_write_tree (ob, DECL_INITIAL (function), true);
1777
1778 /* We also stream abstract functions where we stream only stuff needed for
1779 debug info. */
1780 if (gimple_has_body_p (function))
1781 {
1782 streamer_write_uhwi (ob, 1);
1783 output_struct_function_base (ob, fn);
1784
1785 /* Output all the SSA names used in the function. */
1786 output_ssa_names (ob, fn);
1787
1788 /* Output any exception handling regions. */
1789 output_eh_regions (ob, fn);
1790
1791
1792 /* We will renumber the statements. The code that does this uses
1793 the same ordering that we use for serializing them so we can use
1794 the same code on the other end and not have to write out the
1795 statement numbers. We do not assign UIDs to PHIs here because
1796 virtual PHIs get re-computed on-the-fly which would make numbers
1797 inconsistent. */
1798 set_gimple_stmt_max_uid (cfun, 0);
1799 FOR_ALL_BB (bb)
1800 {
1801 gimple_stmt_iterator gsi;
1802 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1803 {
1804 gimple stmt = gsi_stmt (gsi);
1805
1806 /* Virtual PHIs are not going to be streamed. */
1807 if (!virtual_operand_p (gimple_phi_result (stmt)))
1808 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1809 }
1810 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1811 {
1812 gimple stmt = gsi_stmt (gsi);
1813 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1814 }
1815 }
1816 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1817 virtual phis now. */
1818 FOR_ALL_BB (bb)
1819 {
1820 gimple_stmt_iterator gsi;
1821 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1822 {
1823 gimple stmt = gsi_stmt (gsi);
1824 if (virtual_operand_p (gimple_phi_result (stmt)))
1825 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1826 }
1827 }
1828
1829 /* Output the code for the function. */
1830 FOR_ALL_BB_FN (bb, fn)
1831 output_bb (ob, bb, fn);
1832
1833 /* The terminator for this function. */
1834 streamer_write_record_start (ob, LTO_null);
1835
1836 output_cfg (ob, fn);
1837
1838 pop_cfun ();
1839 }
1840 else
1841 streamer_write_uhwi (ob, 0);
1842
1843 /* Create a section to hold the pickled output of this function. */
1844 produce_asm (ob, function);
1845
1846 destroy_output_block (ob);
1847 }
1848
1849
1850 /* Emit toplevel asms. */
1851
1852 void
1853 lto_output_toplevel_asms (void)
1854 {
1855 struct output_block *ob;
1856 struct asm_node *can;
1857 char *section_name;
1858 struct lto_output_stream *header_stream;
1859 struct lto_asm_header header;
1860
1861 if (! asm_nodes)
1862 return;
1863
1864 ob = create_output_block (LTO_section_asm);
1865
1866 /* Make string 0 be a NULL string. */
1867 streamer_write_char_stream (ob->string_stream, 0);
1868
1869 for (can = asm_nodes; can; can = can->next)
1870 {
1871 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1872 streamer_write_hwi (ob, can->order);
1873 }
1874
1875 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1876
1877 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1878 lto_begin_section (section_name, !flag_wpa);
1879 free (section_name);
1880
1881 /* The entire header stream is computed here. */
1882 memset (&header, 0, sizeof (header));
1883
1884 /* Write the header. */
1885 header.lto_header.major_version = LTO_major_version;
1886 header.lto_header.minor_version = LTO_minor_version;
1887
1888 header.main_size = ob->main_stream->total_size;
1889 header.string_size = ob->string_stream->total_size;
1890
1891 header_stream = XCNEW (struct lto_output_stream);
1892 lto_output_data_stream (header_stream, &header, sizeof (header));
1893 lto_write_stream (header_stream);
1894 free (header_stream);
1895
1896 /* Put all of the gimple and the string table out the asm file as a
1897 block of text. */
1898 lto_write_stream (ob->main_stream);
1899 lto_write_stream (ob->string_stream);
1900
1901 lto_end_section ();
1902
1903 destroy_output_block (ob);
1904 }
1905
1906
1907 /* Copy the function body of NODE without deserializing. */
1908
1909 static void
1910 copy_function (struct cgraph_node *node)
1911 {
1912 tree function = node->symbol.decl;
1913 struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
1914 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1915 const char *data;
1916 size_t len;
1917 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1918 char *section_name =
1919 lto_get_section_name (LTO_section_function_body, name, NULL);
1920 size_t i, j;
1921 struct lto_in_decl_state *in_state;
1922 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1923
1924 lto_begin_section (section_name, !flag_wpa);
1925 free (section_name);
1926
1927 /* We may have renamed the declaration, e.g., a static function. */
1928 name = lto_get_decl_name_mapping (file_data, name);
1929
1930 data = lto_get_section_data (file_data, LTO_section_function_body,
1931 name, &len);
1932 gcc_assert (data);
1933
1934 /* Do a bit copy of the function body. */
1935 lto_output_data_stream (output_stream, data, len);
1936 lto_write_stream (output_stream);
1937
1938 /* Copy decls. */
1939 in_state =
1940 lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
1941 gcc_assert (in_state);
1942
1943 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1944 {
1945 size_t n = in_state->streams[i].size;
1946 tree *trees = in_state->streams[i].trees;
1947 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1948
1949 /* The out state must have the same indices and the in state.
1950 So just copy the vector. All the encoders in the in state
1951 must be empty where we reach here. */
1952 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1953 encoder->trees.reserve_exact (n);
1954 for (j = 0; j < n; j++)
1955 encoder->trees.safe_push (trees[j]);
1956 }
1957
1958 lto_free_section_data (file_data, LTO_section_function_body, name,
1959 data, len);
1960 free (output_stream);
1961 lto_end_section ();
1962 }
1963
1964
1965 /* Main entry point from the pass manager. */
1966
1967 static void
1968 lto_output (void)
1969 {
1970 struct lto_out_decl_state *decl_state;
1971 #ifdef ENABLE_CHECKING
1972 bitmap output = lto_bitmap_alloc ();
1973 #endif
1974 int i, n_nodes;
1975 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1976
1977 /* Initialize the streamer. */
1978 lto_streamer_init ();
1979
1980 n_nodes = lto_symtab_encoder_size (encoder);
1981 /* Process only the functions with bodies. */
1982 for (i = 0; i < n_nodes; i++)
1983 {
1984 symtab_node snode = lto_symtab_encoder_deref (encoder, i);
1985 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1986 if (node
1987 && lto_symtab_encoder_encode_body_p (encoder, node)
1988 && !node->symbol.alias)
1989 {
1990 #ifdef ENABLE_CHECKING
1991 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
1992 bitmap_set_bit (output, DECL_UID (node->symbol.decl));
1993 #endif
1994 decl_state = lto_new_out_decl_state ();
1995 lto_push_out_decl_state (decl_state);
1996 if (gimple_has_body_p (node->symbol.decl) || !flag_wpa)
1997 output_function (node);
1998 else
1999 copy_function (node);
2000 gcc_assert (lto_get_out_decl_state () == decl_state);
2001 lto_pop_out_decl_state ();
2002 lto_record_function_out_decl_state (node->symbol.decl, decl_state);
2003 }
2004 }
2005
2006 /* Emit the callgraph after emitting function bodies. This needs to
2007 be done now to make sure that all the statements in every function
2008 have been renumbered so that edges can be associated with call
2009 statements using the statement UIDs. */
2010 output_symtab ();
2011
2012 #ifdef ENABLE_CHECKING
2013 lto_bitmap_free (output);
2014 #endif
2015 }
2016
2017 namespace {
2018
2019 const pass_data pass_data_ipa_lto_gimple_out =
2020 {
2021 IPA_PASS, /* type */
2022 "lto_gimple_out", /* name */
2023 OPTGROUP_NONE, /* optinfo_flags */
2024 true, /* has_gate */
2025 false, /* has_execute */
2026 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2027 0, /* properties_required */
2028 0, /* properties_provided */
2029 0, /* properties_destroyed */
2030 0, /* todo_flags_start */
2031 0, /* todo_flags_finish */
2032 };
2033
2034 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2035 {
2036 public:
2037 pass_ipa_lto_gimple_out (gcc::context *ctxt)
2038 : ipa_opt_pass_d (pass_data_ipa_lto_gimple_out, ctxt,
2039 NULL, /* generate_summary */
2040 lto_output, /* write_summary */
2041 NULL, /* read_summary */
2042 lto_output, /* write_optimization_summary */
2043 NULL, /* read_optimization_summary */
2044 NULL, /* stmt_fixup */
2045 0, /* function_transform_todo_flags_start */
2046 NULL, /* function_transform */
2047 NULL) /* variable_transform */
2048 {}
2049
2050 /* opt_pass methods: */
2051 bool gate () { return gate_lto_out (); }
2052
2053 }; // class pass_ipa_lto_gimple_out
2054
2055 } // anon namespace
2056
2057 ipa_opt_pass_d *
2058 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2059 {
2060 return new pass_ipa_lto_gimple_out (ctxt);
2061 }
2062
2063
2064 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2065 from it and required for correct representation of its semantics.
2066 Each node in ENCODER must be a global declaration or a type. A node
2067 is written only once, even if it appears multiple times in the
2068 vector. Certain transitively-reachable nodes, such as those
2069 representing expressions, may be duplicated, but such nodes
2070 must not appear in ENCODER itself. */
2071
2072 static void
2073 write_global_stream (struct output_block *ob,
2074 struct lto_tree_ref_encoder *encoder)
2075 {
2076 tree t;
2077 size_t index;
2078 const size_t size = lto_tree_ref_encoder_size (encoder);
2079
2080 for (index = 0; index < size; index++)
2081 {
2082 t = lto_tree_ref_encoder_get_tree (encoder, index);
2083 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2084 stream_write_tree (ob, t, false);
2085 }
2086 }
2087
2088
2089 /* Write a sequence of indices into the globals vector corresponding
2090 to the trees in ENCODER. These are used by the reader to map the
2091 indices used to refer to global entities within function bodies to
2092 their referents. */
2093
2094 static void
2095 write_global_references (struct output_block *ob,
2096 struct lto_output_stream *ref_stream,
2097 struct lto_tree_ref_encoder *encoder)
2098 {
2099 tree t;
2100 uint32_t index;
2101 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2102
2103 /* Write size as 32-bit unsigned. */
2104 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2105
2106 for (index = 0; index < size; index++)
2107 {
2108 uint32_t slot_num;
2109
2110 t = lto_tree_ref_encoder_get_tree (encoder, index);
2111 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2112 gcc_assert (slot_num != (unsigned)-1);
2113 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2114 }
2115 }
2116
2117
2118 /* Write all the streams in an lto_out_decl_state STATE using
2119 output block OB and output stream OUT_STREAM. */
2120
2121 void
2122 lto_output_decl_state_streams (struct output_block *ob,
2123 struct lto_out_decl_state *state)
2124 {
2125 int i;
2126
2127 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2128 write_global_stream (ob, &state->streams[i]);
2129 }
2130
2131
2132 /* Write all the references in an lto_out_decl_state STATE using
2133 output block OB and output stream OUT_STREAM. */
2134
2135 void
2136 lto_output_decl_state_refs (struct output_block *ob,
2137 struct lto_output_stream *out_stream,
2138 struct lto_out_decl_state *state)
2139 {
2140 unsigned i;
2141 uint32_t ref;
2142 tree decl;
2143
2144 /* Write reference to FUNCTION_DECL. If there is not function,
2145 write reference to void_type_node. */
2146 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2147 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2148 gcc_assert (ref != (unsigned)-1);
2149 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2150
2151 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2152 write_global_references (ob, out_stream, &state->streams[i]);
2153 }
2154
2155
2156 /* Return the written size of STATE. */
2157
2158 static size_t
2159 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2160 {
2161 int i;
2162 size_t size;
2163
2164 size = sizeof (int32_t); /* fn_ref. */
2165 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2166 {
2167 size += sizeof (int32_t); /* vector size. */
2168 size += (lto_tree_ref_encoder_size (&state->streams[i])
2169 * sizeof (int32_t));
2170 }
2171 return size;
2172 }
2173
2174
2175 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2176 so far. */
2177
2178 static void
2179 write_symbol (struct streamer_tree_cache_d *cache,
2180 struct lto_output_stream *stream,
2181 tree t, struct pointer_set_t *seen, bool alias)
2182 {
2183 const char *name;
2184 enum gcc_plugin_symbol_kind kind;
2185 enum gcc_plugin_symbol_visibility visibility;
2186 unsigned slot_num;
2187 unsigned HOST_WIDEST_INT size;
2188 const char *comdat;
2189 unsigned char c;
2190
2191 /* None of the following kinds of symbols are needed in the
2192 symbol table. */
2193 if (!TREE_PUBLIC (t)
2194 || is_builtin_fn (t)
2195 || DECL_ABSTRACT (t)
2196 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2197 return;
2198 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2199
2200 gcc_assert (TREE_CODE (t) == VAR_DECL
2201 || TREE_CODE (t) == FUNCTION_DECL);
2202
2203 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2204
2205 /* This behaves like assemble_name_raw in varasm.c, performing the
2206 same name manipulations that ASM_OUTPUT_LABELREF does. */
2207 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2208
2209 if (pointer_set_contains (seen, name))
2210 return;
2211 pointer_set_insert (seen, name);
2212
2213 streamer_tree_cache_lookup (cache, t, &slot_num);
2214 gcc_assert (slot_num != (unsigned)-1);
2215
2216 if (DECL_EXTERNAL (t))
2217 {
2218 if (DECL_WEAK (t))
2219 kind = GCCPK_WEAKUNDEF;
2220 else
2221 kind = GCCPK_UNDEF;
2222 }
2223 else
2224 {
2225 if (DECL_WEAK (t))
2226 kind = GCCPK_WEAKDEF;
2227 else if (DECL_COMMON (t))
2228 kind = GCCPK_COMMON;
2229 else
2230 kind = GCCPK_DEF;
2231
2232 /* When something is defined, it should have node attached. */
2233 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2234 || varpool_get_node (t)->symbol.definition);
2235 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2236 || (cgraph_get_node (t)
2237 && cgraph_get_node (t)->symbol.definition));
2238 }
2239
2240 /* Imitate what default_elf_asm_output_external do.
2241 When symbol is external, we need to output it with DEFAULT visibility
2242 when compiling with -fvisibility=default, while with HIDDEN visibility
2243 when symbol has attribute (visibility("hidden")) specified.
2244 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2245 right. */
2246
2247 if (DECL_EXTERNAL (t)
2248 && !targetm.binds_local_p (t))
2249 visibility = GCCPV_DEFAULT;
2250 else
2251 switch (DECL_VISIBILITY (t))
2252 {
2253 case VISIBILITY_DEFAULT:
2254 visibility = GCCPV_DEFAULT;
2255 break;
2256 case VISIBILITY_PROTECTED:
2257 visibility = GCCPV_PROTECTED;
2258 break;
2259 case VISIBILITY_HIDDEN:
2260 visibility = GCCPV_HIDDEN;
2261 break;
2262 case VISIBILITY_INTERNAL:
2263 visibility = GCCPV_INTERNAL;
2264 break;
2265 }
2266
2267 if (kind == GCCPK_COMMON
2268 && DECL_SIZE_UNIT (t)
2269 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2270 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2271 else
2272 size = 0;
2273
2274 if (DECL_ONE_ONLY (t))
2275 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2276 else
2277 comdat = "";
2278
2279 lto_output_data_stream (stream, name, strlen (name) + 1);
2280 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2281 c = (unsigned char) kind;
2282 lto_output_data_stream (stream, &c, 1);
2283 c = (unsigned char) visibility;
2284 lto_output_data_stream (stream, &c, 1);
2285 lto_output_data_stream (stream, &size, 8);
2286 lto_output_data_stream (stream, &slot_num, 4);
2287 }
2288
2289 /* Return true if NODE should appear in the plugin symbol table. */
2290
2291 bool
2292 output_symbol_p (symtab_node node)
2293 {
2294 struct cgraph_node *cnode;
2295 if (!symtab_real_symbol_p (node))
2296 return false;
2297 /* We keep external functions in symtab for sake of inlining
2298 and devirtualization. We do not want to see them in symbol table as
2299 references unless they are really used. */
2300 cnode = dyn_cast <cgraph_node> (node);
2301 if (cnode && (!node->symbol.definition || DECL_EXTERNAL (cnode->symbol.decl))
2302 && cnode->callers)
2303 return true;
2304
2305 /* Ignore all references from external vars initializers - they are not really
2306 part of the compilation unit until they are used by folding. Some symbols,
2307 like references to external construction vtables can not be referred to at all.
2308 We decide this at can_refer_decl_in_current_unit_p. */
2309 if (!node->symbol.definition || DECL_EXTERNAL (node->symbol.decl))
2310 {
2311 int i;
2312 struct ipa_ref *ref;
2313 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
2314 i, ref); i++)
2315 {
2316 if (ref->use == IPA_REF_ALIAS)
2317 continue;
2318 if (is_a <cgraph_node> (ref->referring))
2319 return true;
2320 if (!DECL_EXTERNAL (ref->referring->symbol.decl))
2321 return true;
2322 }
2323 return false;
2324 }
2325 return true;
2326 }
2327
2328
2329 /* Write an IL symbol table to OB.
2330 SET and VSET are cgraph/varpool node sets we are outputting. */
2331
2332 static void
2333 produce_symtab (struct output_block *ob)
2334 {
2335 struct streamer_tree_cache_d *cache = ob->writer_cache;
2336 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2337 struct pointer_set_t *seen;
2338 struct lto_output_stream stream;
2339 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2340 lto_symtab_encoder_iterator lsei;
2341
2342 lto_begin_section (section_name, false);
2343 free (section_name);
2344
2345 seen = pointer_set_create ();
2346 memset (&stream, 0, sizeof (stream));
2347
2348 /* Write the symbol table.
2349 First write everything defined and then all declarations.
2350 This is necessary to handle cases where we have duplicated symbols. */
2351 for (lsei = lsei_start (encoder);
2352 !lsei_end_p (lsei); lsei_next (&lsei))
2353 {
2354 symtab_node node = lsei_node (lsei);
2355
2356 if (!output_symbol_p (node) || DECL_EXTERNAL (node->symbol.decl))
2357 continue;
2358 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2359 }
2360 for (lsei = lsei_start (encoder);
2361 !lsei_end_p (lsei); lsei_next (&lsei))
2362 {
2363 symtab_node node = lsei_node (lsei);
2364
2365 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->symbol.decl))
2366 continue;
2367 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2368 }
2369
2370 lto_write_stream (&stream);
2371 pointer_set_destroy (seen);
2372
2373 lto_end_section ();
2374 }
2375
2376
2377 /* This pass is run after all of the functions are serialized and all
2378 of the IPA passes have written their serialized forms. This pass
2379 causes the vector of all of the global decls and types used from
2380 this file to be written in to a section that can then be read in to
2381 recover these on other side. */
2382
2383 static void
2384 produce_asm_for_decls (void)
2385 {
2386 struct lto_out_decl_state *out_state;
2387 struct lto_out_decl_state *fn_out_state;
2388 struct lto_decl_header header;
2389 char *section_name;
2390 struct output_block *ob;
2391 struct lto_output_stream *header_stream, *decl_state_stream;
2392 unsigned idx, num_fns;
2393 size_t decl_state_size;
2394 int32_t num_decl_states;
2395
2396 ob = create_output_block (LTO_section_decls);
2397 ob->global = true;
2398
2399 memset (&header, 0, sizeof (struct lto_decl_header));
2400
2401 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2402 lto_begin_section (section_name, !flag_wpa);
2403 free (section_name);
2404
2405 /* Make string 0 be a NULL string. */
2406 streamer_write_char_stream (ob->string_stream, 0);
2407
2408 gcc_assert (!alias_pairs);
2409
2410 /* Write the global symbols. */
2411 out_state = lto_get_out_decl_state ();
2412 num_fns = lto_function_decl_states.length ();
2413 lto_output_decl_state_streams (ob, out_state);
2414 for (idx = 0; idx < num_fns; idx++)
2415 {
2416 fn_out_state =
2417 lto_function_decl_states[idx];
2418 lto_output_decl_state_streams (ob, fn_out_state);
2419 }
2420
2421 header.lto_header.major_version = LTO_major_version;
2422 header.lto_header.minor_version = LTO_minor_version;
2423
2424 /* Currently not used. This field would allow us to preallocate
2425 the globals vector, so that it need not be resized as it is extended. */
2426 header.num_nodes = -1;
2427
2428 /* Compute the total size of all decl out states. */
2429 decl_state_size = sizeof (int32_t);
2430 decl_state_size += lto_out_decl_state_written_size (out_state);
2431 for (idx = 0; idx < num_fns; idx++)
2432 {
2433 fn_out_state =
2434 lto_function_decl_states[idx];
2435 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2436 }
2437 header.decl_state_size = decl_state_size;
2438
2439 header.main_size = ob->main_stream->total_size;
2440 header.string_size = ob->string_stream->total_size;
2441
2442 header_stream = XCNEW (struct lto_output_stream);
2443 lto_output_data_stream (header_stream, &header, sizeof header);
2444 lto_write_stream (header_stream);
2445 free (header_stream);
2446
2447 /* Write the main out-decl state, followed by out-decl states of
2448 functions. */
2449 decl_state_stream = XCNEW (struct lto_output_stream);
2450 num_decl_states = num_fns + 1;
2451 lto_output_data_stream (decl_state_stream, &num_decl_states,
2452 sizeof (num_decl_states));
2453 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2454 for (idx = 0; idx < num_fns; idx++)
2455 {
2456 fn_out_state =
2457 lto_function_decl_states[idx];
2458 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2459 }
2460 lto_write_stream (decl_state_stream);
2461 free (decl_state_stream);
2462
2463 lto_write_stream (ob->main_stream);
2464 lto_write_stream (ob->string_stream);
2465
2466 lto_end_section ();
2467
2468 /* Write the symbol table. It is used by linker to determine dependencies
2469 and thus we can skip it for WPA. */
2470 if (!flag_wpa)
2471 produce_symtab (ob);
2472
2473 /* Write command line opts. */
2474 lto_write_options ();
2475
2476 /* Deallocate memory and clean up. */
2477 for (idx = 0; idx < num_fns; idx++)
2478 {
2479 fn_out_state =
2480 lto_function_decl_states[idx];
2481 lto_delete_out_decl_state (fn_out_state);
2482 }
2483 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2484 lto_function_decl_states.release ();
2485 destroy_output_block (ob);
2486 }
2487
2488
2489 namespace {
2490
2491 const pass_data pass_data_ipa_lto_finish_out =
2492 {
2493 IPA_PASS, /* type */
2494 "lto_decls_out", /* name */
2495 OPTGROUP_NONE, /* optinfo_flags */
2496 true, /* has_gate */
2497 false, /* has_execute */
2498 TV_IPA_LTO_DECL_OUT, /* tv_id */
2499 0, /* properties_required */
2500 0, /* properties_provided */
2501 0, /* properties_destroyed */
2502 0, /* todo_flags_start */
2503 0, /* todo_flags_finish */
2504 };
2505
2506 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2507 {
2508 public:
2509 pass_ipa_lto_finish_out (gcc::context *ctxt)
2510 : ipa_opt_pass_d (pass_data_ipa_lto_finish_out, ctxt,
2511 NULL, /* generate_summary */
2512 produce_asm_for_decls, /* write_summary */
2513 NULL, /* read_summary */
2514 produce_asm_for_decls, /* write_optimization_summary */
2515 NULL, /* read_optimization_summary */
2516 NULL, /* stmt_fixup */
2517 0, /* function_transform_todo_flags_start */
2518 NULL, /* function_transform */
2519 NULL) /* variable_transform */
2520 {}
2521
2522 /* opt_pass methods: */
2523 bool gate () { return gate_lto_out (); }
2524
2525 }; // class pass_ipa_lto_finish_out
2526
2527 } // anon namespace
2528
2529 ipa_opt_pass_d *
2530 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2531 {
2532 return new pass_ipa_lto_finish_out (ctxt);
2533 }