lto.c (compare_tree_sccs_1): Drop DECL_ERROR_ISSUED, DECL_DEFER_OUTPUT and DECL_IN_TE...
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "tree-flow.h"
35 #include "tree-pass.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "lto-symtab.h"
43 #include "lto-streamer.h"
44 #include "data-streamer.h"
45 #include "gimple-streamer.h"
46 #include "tree-streamer.h"
47 #include "streamer-hooks.h"
48 #include "cfgloop.h"
49
50
51 /* Clear the line info stored in DATA_IN. */
52
53 static void
54 clear_line_info (struct output_block *ob)
55 {
56 ob->current_file = NULL;
57 ob->current_line = 0;
58 ob->current_col = 0;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69
70 ob->section_type = section_type;
71 ob->decl_state = lto_get_out_decl_state ();
72 ob->main_stream = XCNEW (struct lto_output_stream);
73 ob->string_stream = XCNEW (struct lto_output_stream);
74 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
75
76 if (section_type == LTO_section_function_body)
77 ob->cfg_stream = XCNEW (struct lto_output_stream);
78
79 clear_line_info (ob);
80
81 ob->string_hash_table.create (37);
82 gcc_obstack_init (&ob->obstack);
83
84 return ob;
85 }
86
87
88 /* Destroy the output block OB. */
89
90 void
91 destroy_output_block (struct output_block *ob)
92 {
93 enum lto_section_type section_type = ob->section_type;
94
95 ob->string_hash_table.dispose ();
96
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
101
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
104
105 free (ob);
106 }
107
108
109 /* Look up NODE in the type table and write the index for it to OB. */
110
111 static void
112 output_type_ref (struct output_block *ob, tree node)
113 {
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
116 }
117
118
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
123
124 static bool
125 tree_is_indexable (tree t)
126 {
127 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
128 return false;
129 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
130 && !TREE_STATIC (t))
131 return false;
132 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
133 return false;
134 /* Variably modified types need to be streamed alongside function
135 bodies because they can refer to local entities. Together with
136 them we have to localize their members as well.
137 ??? In theory that includes non-FIELD_DECLs as well. */
138 else if (TYPE_P (t)
139 && variably_modified_type_p (t, NULL_TREE))
140 return false;
141 else if (TREE_CODE (t) == FIELD_DECL
142 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
143 return false;
144 else
145 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
146 }
147
148
149 /* Output info about new location into bitpack BP.
150 After outputting bitpack, lto_output_location_data has
151 to be done to output actual data. */
152
153 void
154 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
155 location_t loc)
156 {
157 expanded_location xloc;
158
159 loc = LOCATION_LOCUS (loc);
160 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
161 if (loc == UNKNOWN_LOCATION)
162 return;
163
164 xloc = expand_location (loc);
165
166 bp_pack_value (bp, ob->current_file != xloc.file, 1);
167 bp_pack_value (bp, ob->current_line != xloc.line, 1);
168 bp_pack_value (bp, ob->current_col != xloc.column, 1);
169
170 if (ob->current_file != xloc.file)
171 bp_pack_var_len_unsigned (bp,
172 streamer_string_index (ob, xloc.file,
173 strlen (xloc.file) + 1,
174 true));
175 ob->current_file = xloc.file;
176
177 if (ob->current_line != xloc.line)
178 bp_pack_var_len_unsigned (bp, xloc.line);
179 ob->current_line = xloc.line;
180
181 if (ob->current_col != xloc.column)
182 bp_pack_var_len_unsigned (bp, xloc.column);
183 ob->current_col = xloc.column;
184 }
185
186
187 /* If EXPR is an indexable tree node, output a reference to it to
188 output block OB. Otherwise, output the physical representation of
189 EXPR to OB. */
190
191 static void
192 lto_output_tree_ref (struct output_block *ob, tree expr)
193 {
194 enum tree_code code;
195
196 if (TYPE_P (expr))
197 {
198 output_type_ref (ob, expr);
199 return;
200 }
201
202 code = TREE_CODE (expr);
203 switch (code)
204 {
205 case SSA_NAME:
206 streamer_write_record_start (ob, LTO_ssa_name_ref);
207 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
208 break;
209
210 case FIELD_DECL:
211 streamer_write_record_start (ob, LTO_field_decl_ref);
212 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
213 break;
214
215 case FUNCTION_DECL:
216 streamer_write_record_start (ob, LTO_function_decl_ref);
217 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
218 break;
219
220 case VAR_DECL:
221 case DEBUG_EXPR_DECL:
222 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
223 case PARM_DECL:
224 streamer_write_record_start (ob, LTO_global_decl_ref);
225 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
227
228 case CONST_DECL:
229 streamer_write_record_start (ob, LTO_const_decl_ref);
230 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case IMPORTED_DECL:
234 gcc_assert (decl_function_context (expr) == NULL);
235 streamer_write_record_start (ob, LTO_imported_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
238
239 case TYPE_DECL:
240 streamer_write_record_start (ob, LTO_type_decl_ref);
241 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case NAMESPACE_DECL:
245 streamer_write_record_start (ob, LTO_namespace_decl_ref);
246 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case LABEL_DECL:
250 streamer_write_record_start (ob, LTO_label_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case RESULT_DECL:
255 streamer_write_record_start (ob, LTO_result_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case TRANSLATION_UNIT_DECL:
260 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
261 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 default:
265 /* No other node is indexable, so it should have been handled by
266 lto_output_tree. */
267 gcc_unreachable ();
268 }
269 }
270
271
272 /* Return true if EXPR is a tree node that can be written to disk. */
273
274 static inline bool
275 lto_is_streamable (tree expr)
276 {
277 enum tree_code code = TREE_CODE (expr);
278
279 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
280 name version in lto_output_tree_ref (see output_ssa_names). */
281 return !is_lang_specific (expr)
282 && code != SSA_NAME
283 && code != CALL_EXPR
284 && code != LANG_TYPE
285 && code != MODIFY_EXPR
286 && code != INIT_EXPR
287 && code != TARGET_EXPR
288 && code != BIND_EXPR
289 && code != WITH_CLEANUP_EXPR
290 && code != STATEMENT_LIST
291 && code != OMP_CLAUSE
292 && (code == CASE_LABEL_EXPR
293 || code == DECL_EXPR
294 || TREE_CODE_CLASS (code) != tcc_statement);
295 }
296
297
298 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
299
300 static tree
301 get_symbol_initial_value (struct output_block *ob, tree expr)
302 {
303 gcc_checking_assert (DECL_P (expr)
304 && TREE_CODE (expr) != FUNCTION_DECL
305 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
306
307 /* Handle DECL_INITIAL for symbols. */
308 tree initial = DECL_INITIAL (expr);
309 if (TREE_CODE (expr) == VAR_DECL
310 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
311 && !DECL_IN_CONSTANT_POOL (expr)
312 && initial)
313 {
314 lto_symtab_encoder_t encoder;
315 struct varpool_node *vnode;
316
317 encoder = ob->decl_state->symtab_node_encoder;
318 vnode = varpool_get_node (expr);
319 if (!vnode
320 || !lto_symtab_encoder_encode_initializer_p (encoder,
321 vnode))
322 initial = error_mark_node;
323 }
324
325 return initial;
326 }
327
328
329 /* Write a physical representation of tree node EXPR to output block
330 OB. If REF_P is true, the leaves of EXPR are emitted as references
331 via lto_output_tree_ref. IX is the index into the streamer cache
332 where EXPR is stored. */
333
334 static void
335 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
336 {
337 /* Pack all the non-pointer fields in EXPR into a bitpack and write
338 the resulting bitpack. */
339 bitpack_d bp = bitpack_create (ob->main_stream);
340 streamer_pack_tree_bitfields (ob, &bp, expr);
341 streamer_write_bitpack (&bp);
342
343 /* Write all the pointer fields in EXPR. */
344 streamer_write_tree_body (ob, expr, ref_p);
345
346 /* Write any LTO-specific data to OB. */
347 if (DECL_P (expr)
348 && TREE_CODE (expr) != FUNCTION_DECL
349 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
350 {
351 /* Handle DECL_INITIAL for symbols. */
352 tree initial = get_symbol_initial_value (ob, expr);
353 stream_write_tree (ob, initial, ref_p);
354 }
355 }
356
357 /* Write a physical representation of tree node EXPR to output block
358 OB. If REF_P is true, the leaves of EXPR are emitted as references
359 via lto_output_tree_ref. IX is the index into the streamer cache
360 where EXPR is stored. */
361
362 static void
363 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
364 {
365 if (!lto_is_streamable (expr))
366 internal_error ("tree code %qs is not supported in LTO streams",
367 tree_code_name[TREE_CODE (expr)]);
368
369 /* Write the header, containing everything needed to materialize
370 EXPR on the reading side. */
371 streamer_write_tree_header (ob, expr);
372
373 lto_write_tree_1 (ob, expr, ref_p);
374
375 /* Mark the end of EXPR. */
376 streamer_write_zero (ob);
377 }
378
379 /* Emit the physical representation of tree node EXPR to output block
380 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
381 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
382
383 static void
384 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
385 bool ref_p, bool this_ref_p)
386 {
387 unsigned ix;
388
389 gcc_checking_assert (expr != NULL_TREE
390 && !(this_ref_p && tree_is_indexable (expr)));
391
392 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
393 expr, hash, &ix);
394 gcc_assert (!exists_p);
395 if (streamer_handle_as_builtin_p (expr))
396 {
397 /* MD and NORMAL builtins do not need to be written out
398 completely as they are always instantiated by the
399 compiler on startup. The only builtins that need to
400 be written out are BUILT_IN_FRONTEND. For all other
401 builtins, we simply write the class and code. */
402 streamer_write_builtin (ob, expr);
403 }
404 else if (TREE_CODE (expr) == INTEGER_CST
405 && !TREE_OVERFLOW (expr))
406 {
407 /* Shared INTEGER_CST nodes are special because they need their
408 original type to be materialized by the reader (to implement
409 TYPE_CACHED_VALUES). */
410 streamer_write_integer_cst (ob, expr, ref_p);
411 }
412 else
413 {
414 /* This is the first time we see EXPR, write its fields
415 to OB. */
416 lto_write_tree (ob, expr, ref_p);
417 }
418 }
419
420 struct sccs
421 {
422 unsigned int dfsnum;
423 unsigned int low;
424 };
425
426 struct scc_entry
427 {
428 tree t;
429 hashval_t hash;
430 };
431
432 static unsigned int next_dfs_num;
433 static vec<scc_entry> sccstack;
434 static struct pointer_map_t *sccstate;
435 static struct obstack sccstate_obstack;
436
437 static void
438 DFS_write_tree (struct output_block *ob, sccs *from_state,
439 tree expr, bool ref_p, bool this_ref_p);
440
441 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
442 DFS recurse for all tree edges originating from it. */
443
444 static void
445 DFS_write_tree_body (struct output_block *ob,
446 tree expr, sccs *expr_state, bool ref_p)
447 {
448 #define DFS_follow_tree_edge(DEST) \
449 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
450
451 enum tree_code code;
452
453 code = TREE_CODE (expr);
454
455 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
456 {
457 if (TREE_CODE (expr) != IDENTIFIER_NODE)
458 DFS_follow_tree_edge (TREE_TYPE (expr));
459 }
460
461 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
462 {
463 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
464 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
465 }
466
467 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
468 {
469 DFS_follow_tree_edge (TREE_REALPART (expr));
470 DFS_follow_tree_edge (TREE_IMAGPART (expr));
471 }
472
473 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
474 {
475 /* Drop names that were created for anonymous entities. */
476 if (DECL_NAME (expr)
477 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
478 && ANON_AGGRNAME_P (DECL_NAME (expr)))
479 ;
480 else
481 DFS_follow_tree_edge (DECL_NAME (expr));
482 DFS_follow_tree_edge (DECL_CONTEXT (expr));
483 }
484
485 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
486 {
487 DFS_follow_tree_edge (DECL_SIZE (expr));
488 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
489
490 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
491 special handling in LTO, it must be handled by streamer hooks. */
492
493 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
494
495 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
496 for early inlining so drop it on the floor instead of ICEing in
497 dwarf2out.c. */
498
499 if ((TREE_CODE (expr) == VAR_DECL
500 || TREE_CODE (expr) == PARM_DECL)
501 && DECL_HAS_VALUE_EXPR_P (expr))
502 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
503 if (TREE_CODE (expr) == VAR_DECL)
504 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
505 }
506
507 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
508 {
509 if (TREE_CODE (expr) == TYPE_DECL)
510 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
511 DFS_follow_tree_edge (DECL_VINDEX (expr));
512 }
513
514 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
515 {
516 /* Make sure we don't inadvertently set the assembler name. */
517 if (DECL_ASSEMBLER_NAME_SET_P (expr))
518 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
519 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
520 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
521 }
522
523 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
524 {
525 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
526 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
527 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
528 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
529 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
533 {
534 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
535 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
536 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
540 {
541 DFS_follow_tree_edge (TYPE_SIZE (expr));
542 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
543 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
544 DFS_follow_tree_edge (TYPE_NAME (expr));
545 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
546 reconstructed during fixup. */
547 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
548 during fixup. */
549 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
550 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
551 /* TYPE_CANONICAL is re-computed during type merging, so no need
552 to follow it here. */
553 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
554 }
555
556 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
557 {
558 if (TREE_CODE (expr) == ENUMERAL_TYPE)
559 DFS_follow_tree_edge (TYPE_VALUES (expr));
560 else if (TREE_CODE (expr) == ARRAY_TYPE)
561 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
562 else if (RECORD_OR_UNION_TYPE_P (expr))
563 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
564 DFS_follow_tree_edge (t);
565 else if (TREE_CODE (expr) == FUNCTION_TYPE
566 || TREE_CODE (expr) == METHOD_TYPE)
567 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
568
569 if (!POINTER_TYPE_P (expr))
570 DFS_follow_tree_edge (TYPE_MINVAL (expr));
571 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
572 if (RECORD_OR_UNION_TYPE_P (expr))
573 DFS_follow_tree_edge (TYPE_BINFO (expr));
574 }
575
576 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
577 {
578 DFS_follow_tree_edge (TREE_PURPOSE (expr));
579 DFS_follow_tree_edge (TREE_VALUE (expr));
580 DFS_follow_tree_edge (TREE_CHAIN (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
584 {
585 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
586 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
587 }
588
589 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
590 {
591 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
592 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
593 DFS_follow_tree_edge (TREE_BLOCK (expr));
594 }
595
596 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
597 {
598 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
599 /* ??? FIXME. See also streamer_write_chain. */
600 if (!(VAR_OR_FUNCTION_DECL_P (t)
601 && DECL_EXTERNAL (t)))
602 DFS_follow_tree_edge (t);
603
604 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
605
606 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
607 handle - those that represent inlined function scopes.
608 For the drop rest them on the floor instead of ICEing
609 in dwarf2out.c. */
610 if (inlined_function_outer_scope_p (expr))
611 {
612 tree ultimate_origin = block_ultimate_origin (expr);
613 DFS_follow_tree_edge (ultimate_origin);
614 }
615 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
616 information for early inlined BLOCKs so drop it on the floor instead
617 of ICEing in dwarf2out.c. */
618
619 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
620 streaming time. */
621
622 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
623 list is re-constructed from BLOCK_SUPERCONTEXT. */
624 }
625
626 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
627 {
628 unsigned i;
629 tree t;
630
631 /* Note that the number of BINFO slots has already been emitted in
632 EXPR's header (see streamer_write_tree_header) because this length
633 is needed to build the empty BINFO node on the reader side. */
634 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
635 DFS_follow_tree_edge (t);
636 DFS_follow_tree_edge (BINFO_OFFSET (expr));
637 DFS_follow_tree_edge (BINFO_VTABLE (expr));
638 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
639
640 /* The number of BINFO_BASE_ACCESSES has already been emitted in
641 EXPR's bitfield section. */
642 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
643 DFS_follow_tree_edge (t);
644
645 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
646 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
647 }
648
649 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
650 {
651 unsigned i;
652 tree index, value;
653
654 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
655 {
656 DFS_follow_tree_edge (index);
657 DFS_follow_tree_edge (value);
658 }
659 }
660
661 #undef DFS_follow_tree_edge
662 }
663
664 /* Return a hash value for the tree T. */
665
666 static hashval_t
667 hash_tree (struct streamer_tree_cache_d *cache, tree t)
668 {
669 #define visit(SIBLING) \
670 do { \
671 unsigned ix; \
672 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
673 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
674 } while (0)
675
676 /* Hash TS_BASE. */
677 enum tree_code code = TREE_CODE (t);
678 hashval_t v = iterative_hash_host_wide_int (code, 0);
679 if (!TYPE_P (t))
680 {
681 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
682 | (TREE_CONSTANT (t) << 1)
683 | (TREE_READONLY (t) << 2)
684 | (TREE_PUBLIC (t) << 3), v);
685 }
686 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
687 | (TREE_THIS_VOLATILE (t) << 1), v);
688 if (DECL_P (t))
689 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
690 else if (TYPE_P (t))
691 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
692 if (TYPE_P (t))
693 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
694 else
695 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
696 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
697 | (TREE_STATIC (t) << 1)
698 | (TREE_PROTECTED (t) << 2)
699 | (TREE_DEPRECATED (t) << 3), v);
700 if (code != TREE_BINFO)
701 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
702 if (TYPE_P (t))
703 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
704 | (TYPE_ADDR_SPACE (t) << 1), v);
705 else if (code == SSA_NAME)
706 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
707
708 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
709 {
710 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
711 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
712 }
713
714 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
715 {
716 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
717 v = iterative_hash_host_wide_int (r.cl, v);
718 v = iterative_hash_host_wide_int (r.decimal
719 | (r.sign << 1)
720 | (r.signalling << 2)
721 | (r.canonical << 3), v);
722 v = iterative_hash_host_wide_int (r.uexp, v);
723 for (unsigned i = 0; i < SIGSZ; ++i)
724 v = iterative_hash_host_wide_int (r.sig[i], v);
725 }
726
727 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
728 {
729 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
730 v = iterative_hash_host_wide_int (f.mode, v);
731 v = iterative_hash_host_wide_int (f.data.low, v);
732 v = iterative_hash_host_wide_int (f.data.high, v);
733 }
734
735 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
736 {
737 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
738 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
739 | (DECL_VIRTUAL_P (t) << 1)
740 | (DECL_IGNORED_P (t) << 2)
741 | (DECL_ABSTRACT (t) << 3)
742 | (DECL_ARTIFICIAL (t) << 4)
743 | (DECL_USER_ALIGN (t) << 5)
744 | (DECL_PRESERVE_P (t) << 6)
745 | (DECL_EXTERNAL (t) << 7)
746 | (DECL_GIMPLE_REG_P (t) << 8), v);
747 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
748 if (code == LABEL_DECL)
749 {
750 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
751 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
752 }
753 else if (code == FIELD_DECL)
754 {
755 v = iterative_hash_host_wide_int (DECL_PACKED (t)
756 | (DECL_NONADDRESSABLE_P (t) << 1),
757 v);
758 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
759 }
760 else if (code == VAR_DECL)
761 {
762 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
763 | (DECL_NONLOCAL_FRAME (t) << 1),
764 v);
765 }
766 if (code == RESULT_DECL
767 || code == PARM_DECL
768 || code == VAR_DECL)
769 {
770 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
771 if (code == VAR_DECL
772 || code == PARM_DECL)
773 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
774 }
775 }
776
777 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
778 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
779
780 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
781 {
782 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
783 | (DECL_DLLIMPORT_P (t) << 1)
784 | (DECL_WEAK (t) << 2)
785 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
786 | (DECL_COMDAT (t) << 4)
787 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
788 v);
789 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
790 if (code == VAR_DECL)
791 {
792 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
793 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
794 | (DECL_IN_CONSTANT_POOL (t) << 1),
795 v);
796 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
797 }
798 if (VAR_OR_FUNCTION_DECL_P (t))
799 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
800 }
801
802 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
803 {
804 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
805 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
806 | (DECL_STATIC_DESTRUCTOR (t) << 1)
807 | (DECL_UNINLINABLE (t) << 2)
808 | (DECL_POSSIBLY_INLINED (t) << 3)
809 | (DECL_IS_NOVOPS (t) << 4)
810 | (DECL_IS_RETURNS_TWICE (t) << 5)
811 | (DECL_IS_MALLOC (t) << 6)
812 | (DECL_IS_OPERATOR_NEW (t) << 7)
813 | (DECL_DECLARED_INLINE_P (t) << 8)
814 | (DECL_STATIC_CHAIN (t) << 9)
815 | (DECL_NO_INLINE_WARNING_P (t) << 10)
816 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
817 | (DECL_NO_LIMIT_STACK (t) << 12)
818 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
819 | (DECL_PURE_P (t) << 14)
820 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
821 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
822 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
823 if (DECL_STATIC_DESTRUCTOR (t))
824 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
825 }
826
827 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
828 {
829 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
830 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
831 | (TYPE_NO_FORCE_BLK (t) << 1)
832 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
833 | (TYPE_PACKED (t) << 3)
834 | (TYPE_RESTRICT (t) << 4)
835 | (TYPE_USER_ALIGN (t) << 5)
836 | (TYPE_READONLY (t) << 6), v);
837 if (RECORD_OR_UNION_TYPE_P (t))
838 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t), v);
839 else if (code == ARRAY_TYPE)
840 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
841 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
842 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
843 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
844 || (!in_lto_p
845 && get_alias_set (t) == 0))
846 ? 0 : -1, v);
847 }
848
849 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
850 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
851 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
852
853 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
854 v = iterative_hash (t, sizeof (struct cl_target_option), v);
855
856 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
857 v = iterative_hash (t, sizeof (struct cl_optimization), v);
858
859 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
860 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
861
862 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
863 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
864
865 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
866 {
867 if (POINTER_TYPE_P (t))
868 {
869 /* For pointers factor in the pointed-to type recursively as
870 we cannot recurse through only pointers.
871 ??? We can generalize this by keeping track of the
872 in-SCC edges for each tree (or arbitrarily the first
873 such edge) and hashing that in in a second stage
874 (instead of the quadratic mixing of the SCC we do now). */
875 hashval_t x;
876 unsigned ix;
877 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
878 x = streamer_tree_cache_get_hash (cache, ix);
879 else
880 x = hash_tree (cache, TREE_TYPE (t));
881 v = iterative_hash_hashval_t (x, v);
882 }
883 else if (code != IDENTIFIER_NODE)
884 visit (TREE_TYPE (t));
885 }
886
887 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
888 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
889 visit (VECTOR_CST_ELT (t, i));
890
891 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
892 {
893 visit (TREE_REALPART (t));
894 visit (TREE_IMAGPART (t));
895 }
896
897 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
898 {
899 /* Drop names that were created for anonymous entities. */
900 if (DECL_NAME (t)
901 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
902 && ANON_AGGRNAME_P (DECL_NAME (t)))
903 ;
904 else
905 visit (DECL_NAME (t));
906 if (DECL_FILE_SCOPE_P (t))
907 ;
908 else
909 visit (DECL_CONTEXT (t));
910 }
911
912 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
913 {
914 visit (DECL_SIZE (t));
915 visit (DECL_SIZE_UNIT (t));
916 visit (DECL_ATTRIBUTES (t));
917 if ((code == VAR_DECL
918 || code == PARM_DECL)
919 && DECL_HAS_VALUE_EXPR_P (t))
920 visit (DECL_VALUE_EXPR (t));
921 if (code == VAR_DECL
922 && DECL_HAS_DEBUG_EXPR_P (t))
923 visit (DECL_DEBUG_EXPR (t));
924 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
925 be able to call get_symbol_initial_value. */
926 }
927
928 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
929 {
930 if (code == TYPE_DECL)
931 visit (DECL_ORIGINAL_TYPE (t));
932 visit (DECL_VINDEX (t));
933 }
934
935 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
936 {
937 if (DECL_ASSEMBLER_NAME_SET_P (t))
938 visit (DECL_ASSEMBLER_NAME (t));
939 visit (DECL_SECTION_NAME (t));
940 visit (DECL_COMDAT_GROUP (t));
941 }
942
943 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
944 {
945 visit (DECL_FIELD_OFFSET (t));
946 visit (DECL_BIT_FIELD_TYPE (t));
947 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
948 visit (DECL_FIELD_BIT_OFFSET (t));
949 visit (DECL_FCONTEXT (t));
950 }
951
952 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
953 {
954 visit (DECL_FUNCTION_PERSONALITY (t));
955 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
956 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
957 }
958
959 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
960 {
961 visit (TYPE_SIZE (t));
962 visit (TYPE_SIZE_UNIT (t));
963 visit (TYPE_ATTRIBUTES (t));
964 visit (TYPE_NAME (t));
965 visit (TYPE_MAIN_VARIANT (t));
966 if (TYPE_FILE_SCOPE_P (t))
967 ;
968 else
969 visit (TYPE_CONTEXT (t));
970 visit (TYPE_STUB_DECL (t));
971 }
972
973 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
974 {
975 if (code == ENUMERAL_TYPE)
976 visit (TYPE_VALUES (t));
977 else if (code == ARRAY_TYPE)
978 visit (TYPE_DOMAIN (t));
979 else if (RECORD_OR_UNION_TYPE_P (t))
980 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
981 visit (f);
982 else if (code == FUNCTION_TYPE
983 || code == METHOD_TYPE)
984 visit (TYPE_ARG_TYPES (t));
985 if (!POINTER_TYPE_P (t))
986 visit (TYPE_MINVAL (t));
987 visit (TYPE_MAXVAL (t));
988 if (RECORD_OR_UNION_TYPE_P (t))
989 visit (TYPE_BINFO (t));
990 }
991
992 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
993 {
994 visit (TREE_PURPOSE (t));
995 visit (TREE_VALUE (t));
996 visit (TREE_CHAIN (t));
997 }
998
999 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1000 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1001 visit (TREE_VEC_ELT (t, i));
1002
1003 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1004 {
1005 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1006 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1007 visit (TREE_OPERAND (t, i));
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1011 {
1012 unsigned i;
1013 tree b;
1014 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1015 visit (b);
1016 visit (BINFO_OFFSET (t));
1017 visit (BINFO_VTABLE (t));
1018 visit (BINFO_VPTR_FIELD (t));
1019 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1020 visit (b);
1021 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1022 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1026 {
1027 unsigned i;
1028 tree index, value;
1029 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1030 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1031 {
1032 visit (index);
1033 visit (value);
1034 }
1035 }
1036
1037 return v;
1038
1039 #undef visit
1040 }
1041
1042 /* Compare two SCC entries by their hash value for qsorting them. */
1043
1044 static int
1045 scc_entry_compare (const void *p1_, const void *p2_)
1046 {
1047 const scc_entry *p1 = (const scc_entry *) p1_;
1048 const scc_entry *p2 = (const scc_entry *) p2_;
1049 if (p1->hash < p2->hash)
1050 return -1;
1051 else if (p1->hash > p2->hash)
1052 return 1;
1053 return 0;
1054 }
1055
1056 /* Return a hash value for the SCC on the SCC stack from FIRST with
1057 size SIZE. */
1058
1059 static hashval_t
1060 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1061 {
1062 /* Compute hash values for the SCC members. */
1063 for (unsigned i = 0; i < size; ++i)
1064 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1065
1066 if (size == 1)
1067 return sccstack[first].hash;
1068
1069 /* Sort the SCC of type, hash pairs so that when we mix in
1070 all members of the SCC the hash value becomes independent on
1071 the order we visited the SCC. Disregard hashes equal to
1072 the hash of the tree we mix into because we cannot guarantee
1073 a stable sort for those across different TUs. */
1074 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1075 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1076 for (unsigned i = 0; i < size; ++i)
1077 {
1078 hashval_t hash = sccstack[first+i].hash;
1079 hashval_t orig_hash = hash;
1080 unsigned j;
1081 /* Skip same hashes. */
1082 for (j = i + 1;
1083 j < size && sccstack[first+j].hash == orig_hash; ++j)
1084 ;
1085 for (; j < size; ++j)
1086 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1087 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1088 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1089 tem[i] = hash;
1090 }
1091 hashval_t scc_hash = 0;
1092 for (unsigned i = 0; i < size; ++i)
1093 {
1094 sccstack[first+i].hash = tem[i];
1095 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1096 }
1097 return scc_hash;
1098 }
1099
1100 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1101 already in the streamer cache. Main routine called for
1102 each visit of EXPR. */
1103
1104 static void
1105 DFS_write_tree (struct output_block *ob, sccs *from_state,
1106 tree expr, bool ref_p, bool this_ref_p)
1107 {
1108 unsigned ix;
1109 sccs **slot;
1110
1111 /* Handle special cases. */
1112 if (expr == NULL_TREE)
1113 return;
1114
1115 /* Do not DFS walk into indexable trees. */
1116 if (this_ref_p && tree_is_indexable (expr))
1117 return;
1118
1119 /* Check if we already streamed EXPR. */
1120 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1121 return;
1122
1123 slot = (sccs **)pointer_map_insert (sccstate, expr);
1124 sccs *cstate = *slot;
1125 if (!cstate)
1126 {
1127 scc_entry e = { expr, 0 };
1128 /* Not yet visited. DFS recurse and push it onto the stack. */
1129 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1130 sccstack.safe_push (e);
1131 cstate->dfsnum = next_dfs_num++;
1132 cstate->low = cstate->dfsnum;
1133
1134 if (streamer_handle_as_builtin_p (expr))
1135 ;
1136 else if (TREE_CODE (expr) == INTEGER_CST
1137 && !TREE_OVERFLOW (expr))
1138 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1139 else
1140 {
1141 DFS_write_tree_body (ob, expr, cstate, ref_p);
1142
1143 /* Walk any LTO-specific edges. */
1144 if (DECL_P (expr)
1145 && TREE_CODE (expr) != FUNCTION_DECL
1146 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1147 {
1148 /* Handle DECL_INITIAL for symbols. */
1149 tree initial = get_symbol_initial_value (ob, expr);
1150 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1151 }
1152 }
1153
1154 /* See if we found an SCC. */
1155 if (cstate->low == cstate->dfsnum)
1156 {
1157 unsigned first, size;
1158 tree x;
1159
1160 /* Pop the SCC and compute its size. */
1161 first = sccstack.length ();
1162 do
1163 {
1164 x = sccstack[--first].t;
1165 }
1166 while (x != expr);
1167 size = sccstack.length () - first;
1168
1169 /* No need to compute hashes for LTRANS units, we don't perform
1170 any merging there. */
1171 hashval_t scc_hash = 0;
1172 unsigned scc_entry_len = 0;
1173 if (!flag_wpa)
1174 {
1175 scc_hash = hash_scc (ob->writer_cache, first, size);
1176
1177 /* Put the entries with the least number of collisions first. */
1178 unsigned entry_start = 0;
1179 scc_entry_len = size + 1;
1180 for (unsigned i = 0; i < size;)
1181 {
1182 unsigned from = i;
1183 for (i = i + 1; i < size
1184 && (sccstack[first + i].hash
1185 == sccstack[first + from].hash); ++i)
1186 ;
1187 if (i - from < scc_entry_len)
1188 {
1189 scc_entry_len = i - from;
1190 entry_start = from;
1191 }
1192 }
1193 for (unsigned i = 0; i < scc_entry_len; ++i)
1194 {
1195 scc_entry tem = sccstack[first + i];
1196 sccstack[first + i] = sccstack[first + entry_start + i];
1197 sccstack[first + entry_start + i] = tem;
1198 }
1199 }
1200
1201 /* Write LTO_tree_scc. */
1202 streamer_write_record_start (ob, LTO_tree_scc);
1203 streamer_write_uhwi (ob, size);
1204 streamer_write_uhwi (ob, scc_hash);
1205
1206 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1207 All INTEGER_CSTs need to be handled this way as we need
1208 their type to materialize them. Also builtins are handled
1209 this way.
1210 ??? We still wrap these in LTO_tree_scc so at the
1211 input side we can properly identify the tree we want
1212 to ultimatively return. */
1213 size_t old_len = ob->writer_cache->nodes.length ();
1214 if (size == 1)
1215 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1216 else
1217 {
1218 /* Write the size of the SCC entry candidates. */
1219 streamer_write_uhwi (ob, scc_entry_len);
1220
1221 /* Write all headers and populate the streamer cache. */
1222 for (unsigned i = 0; i < size; ++i)
1223 {
1224 hashval_t hash = sccstack[first+i].hash;
1225 tree t = sccstack[first+i].t;
1226 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1227 t, hash, &ix);
1228 gcc_assert (!exists_p);
1229
1230 if (!lto_is_streamable (t))
1231 internal_error ("tree code %qs is not supported "
1232 "in LTO streams",
1233 tree_code_name[TREE_CODE (t)]);
1234
1235 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1236
1237 /* Write the header, containing everything needed to
1238 materialize EXPR on the reading side. */
1239 streamer_write_tree_header (ob, t);
1240 }
1241
1242 /* Write the bitpacks and tree references. */
1243 for (unsigned i = 0; i < size; ++i)
1244 {
1245 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1246
1247 /* Mark the end of the tree. */
1248 streamer_write_zero (ob);
1249 }
1250 }
1251 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1252
1253 /* Finally truncate the vector. */
1254 sccstack.truncate (first);
1255
1256 if (from_state)
1257 from_state->low = MIN (from_state->low, cstate->low);
1258 return;
1259 }
1260
1261 if (from_state)
1262 from_state->low = MIN (from_state->low, cstate->low);
1263 }
1264 gcc_checking_assert (from_state);
1265 if (cstate->dfsnum < from_state->dfsnum)
1266 from_state->low = MIN (cstate->dfsnum, from_state->low);
1267 }
1268
1269
1270 /* Emit the physical representation of tree node EXPR to output block
1271 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1272 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1273
1274 void
1275 lto_output_tree (struct output_block *ob, tree expr,
1276 bool ref_p, bool this_ref_p)
1277 {
1278 unsigned ix;
1279 bool existed_p;
1280
1281 if (expr == NULL_TREE)
1282 {
1283 streamer_write_record_start (ob, LTO_null);
1284 return;
1285 }
1286
1287 if (this_ref_p && tree_is_indexable (expr))
1288 {
1289 lto_output_tree_ref (ob, expr);
1290 return;
1291 }
1292
1293 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1294 if (existed_p)
1295 {
1296 /* If a node has already been streamed out, make sure that
1297 we don't write it more than once. Otherwise, the reader
1298 will instantiate two different nodes for the same object. */
1299 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1300 streamer_write_uhwi (ob, ix);
1301 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1302 lto_tree_code_to_tag (TREE_CODE (expr)));
1303 lto_stats.num_pickle_refs_output++;
1304 }
1305 else
1306 {
1307 /* This is the first time we see EXPR, write all reachable
1308 trees to OB. */
1309 static bool in_dfs_walk;
1310
1311 /* Protect against recursion which means disconnect between
1312 what tree edges we walk in the DFS walk and what edges
1313 we stream out. */
1314 gcc_assert (!in_dfs_walk);
1315
1316 /* Start the DFS walk. */
1317 /* Save ob state ... */
1318 /* let's see ... */
1319 in_dfs_walk = true;
1320 sccstate = pointer_map_create ();
1321 gcc_obstack_init (&sccstate_obstack);
1322 next_dfs_num = 1;
1323 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1324 sccstack.release ();
1325 pointer_map_destroy (sccstate);
1326 obstack_free (&sccstate_obstack, NULL);
1327 in_dfs_walk = false;
1328
1329 /* Finally append a reference to the tree we were writing.
1330 ??? If expr ended up as a singleton we could have
1331 inlined it here and avoid outputting a reference. */
1332 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1333 gcc_assert (existed_p);
1334 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1335 streamer_write_uhwi (ob, ix);
1336 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1337 lto_tree_code_to_tag (TREE_CODE (expr)));
1338 lto_stats.num_pickle_refs_output++;
1339 }
1340 }
1341
1342
1343 /* Output to OB a list of try/catch handlers starting with FIRST. */
1344
1345 static void
1346 output_eh_try_list (struct output_block *ob, eh_catch first)
1347 {
1348 eh_catch n;
1349
1350 for (n = first; n; n = n->next_catch)
1351 {
1352 streamer_write_record_start (ob, LTO_eh_catch);
1353 stream_write_tree (ob, n->type_list, true);
1354 stream_write_tree (ob, n->filter_list, true);
1355 stream_write_tree (ob, n->label, true);
1356 }
1357
1358 streamer_write_record_start (ob, LTO_null);
1359 }
1360
1361
1362 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1363 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1364 detect EH region sharing. */
1365
1366 static void
1367 output_eh_region (struct output_block *ob, eh_region r)
1368 {
1369 enum LTO_tags tag;
1370
1371 if (r == NULL)
1372 {
1373 streamer_write_record_start (ob, LTO_null);
1374 return;
1375 }
1376
1377 if (r->type == ERT_CLEANUP)
1378 tag = LTO_ert_cleanup;
1379 else if (r->type == ERT_TRY)
1380 tag = LTO_ert_try;
1381 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1382 tag = LTO_ert_allowed_exceptions;
1383 else if (r->type == ERT_MUST_NOT_THROW)
1384 tag = LTO_ert_must_not_throw;
1385 else
1386 gcc_unreachable ();
1387
1388 streamer_write_record_start (ob, tag);
1389 streamer_write_hwi (ob, r->index);
1390
1391 if (r->outer)
1392 streamer_write_hwi (ob, r->outer->index);
1393 else
1394 streamer_write_zero (ob);
1395
1396 if (r->inner)
1397 streamer_write_hwi (ob, r->inner->index);
1398 else
1399 streamer_write_zero (ob);
1400
1401 if (r->next_peer)
1402 streamer_write_hwi (ob, r->next_peer->index);
1403 else
1404 streamer_write_zero (ob);
1405
1406 if (r->type == ERT_TRY)
1407 {
1408 output_eh_try_list (ob, r->u.eh_try.first_catch);
1409 }
1410 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1411 {
1412 stream_write_tree (ob, r->u.allowed.type_list, true);
1413 stream_write_tree (ob, r->u.allowed.label, true);
1414 streamer_write_uhwi (ob, r->u.allowed.filter);
1415 }
1416 else if (r->type == ERT_MUST_NOT_THROW)
1417 {
1418 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1419 bitpack_d bp = bitpack_create (ob->main_stream);
1420 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1421 streamer_write_bitpack (&bp);
1422 }
1423
1424 if (r->landing_pads)
1425 streamer_write_hwi (ob, r->landing_pads->index);
1426 else
1427 streamer_write_zero (ob);
1428 }
1429
1430
1431 /* Output landing pad LP to OB. */
1432
1433 static void
1434 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1435 {
1436 if (lp == NULL)
1437 {
1438 streamer_write_record_start (ob, LTO_null);
1439 return;
1440 }
1441
1442 streamer_write_record_start (ob, LTO_eh_landing_pad);
1443 streamer_write_hwi (ob, lp->index);
1444 if (lp->next_lp)
1445 streamer_write_hwi (ob, lp->next_lp->index);
1446 else
1447 streamer_write_zero (ob);
1448
1449 if (lp->region)
1450 streamer_write_hwi (ob, lp->region->index);
1451 else
1452 streamer_write_zero (ob);
1453
1454 stream_write_tree (ob, lp->post_landing_pad, true);
1455 }
1456
1457
1458 /* Output the existing eh_table to OB. */
1459
1460 static void
1461 output_eh_regions (struct output_block *ob, struct function *fn)
1462 {
1463 if (fn->eh && fn->eh->region_tree)
1464 {
1465 unsigned i;
1466 eh_region eh;
1467 eh_landing_pad lp;
1468 tree ttype;
1469
1470 streamer_write_record_start (ob, LTO_eh_table);
1471
1472 /* Emit the index of the root of the EH region tree. */
1473 streamer_write_hwi (ob, fn->eh->region_tree->index);
1474
1475 /* Emit all the EH regions in the region array. */
1476 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1477 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1478 output_eh_region (ob, eh);
1479
1480 /* Emit all landing pads. */
1481 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1482 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1483 output_eh_lp (ob, lp);
1484
1485 /* Emit all the runtime type data. */
1486 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1487 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1488 stream_write_tree (ob, ttype, true);
1489
1490 /* Emit the table of action chains. */
1491 if (targetm.arm_eabi_unwinder)
1492 {
1493 tree t;
1494 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1495 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1496 stream_write_tree (ob, t, true);
1497 }
1498 else
1499 {
1500 uchar c;
1501 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1502 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1503 streamer_write_char_stream (ob->main_stream, c);
1504 }
1505 }
1506
1507 /* The LTO_null either terminates the record or indicates that there
1508 are no eh_records at all. */
1509 streamer_write_record_start (ob, LTO_null);
1510 }
1511
1512
1513 /* Output all of the active ssa names to the ssa_names stream. */
1514
1515 static void
1516 output_ssa_names (struct output_block *ob, struct function *fn)
1517 {
1518 unsigned int i, len;
1519
1520 len = vec_safe_length (SSANAMES (fn));
1521 streamer_write_uhwi (ob, len);
1522
1523 for (i = 1; i < len; i++)
1524 {
1525 tree ptr = (*SSANAMES (fn))[i];
1526
1527 if (ptr == NULL_TREE
1528 || SSA_NAME_IN_FREE_LIST (ptr)
1529 || virtual_operand_p (ptr))
1530 continue;
1531
1532 streamer_write_uhwi (ob, i);
1533 streamer_write_char_stream (ob->main_stream,
1534 SSA_NAME_IS_DEFAULT_DEF (ptr));
1535 if (SSA_NAME_VAR (ptr))
1536 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1537 else
1538 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1539 stream_write_tree (ob, TREE_TYPE (ptr), true);
1540 }
1541
1542 streamer_write_zero (ob);
1543 }
1544
1545
1546 /* Output the cfg. */
1547
1548 static void
1549 output_cfg (struct output_block *ob, struct function *fn)
1550 {
1551 struct lto_output_stream *tmp_stream = ob->main_stream;
1552 basic_block bb;
1553
1554 ob->main_stream = ob->cfg_stream;
1555
1556 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1557 profile_status_for_function (fn));
1558
1559 /* Output the number of the highest basic block. */
1560 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1561
1562 FOR_ALL_BB_FN (bb, fn)
1563 {
1564 edge_iterator ei;
1565 edge e;
1566
1567 streamer_write_hwi (ob, bb->index);
1568
1569 /* Output the successors and the edge flags. */
1570 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1571 FOR_EACH_EDGE (e, ei, bb->succs)
1572 {
1573 streamer_write_uhwi (ob, e->dest->index);
1574 streamer_write_hwi (ob, e->probability);
1575 streamer_write_gcov_count (ob, e->count);
1576 streamer_write_uhwi (ob, e->flags);
1577 }
1578 }
1579
1580 streamer_write_hwi (ob, -1);
1581
1582 bb = ENTRY_BLOCK_PTR;
1583 while (bb->next_bb)
1584 {
1585 streamer_write_hwi (ob, bb->next_bb->index);
1586 bb = bb->next_bb;
1587 }
1588
1589 streamer_write_hwi (ob, -1);
1590
1591 /* ??? The cfgloop interface is tied to cfun. */
1592 gcc_assert (cfun == fn);
1593
1594 /* Output the number of loops. */
1595 streamer_write_uhwi (ob, number_of_loops (fn));
1596
1597 /* Output each loop, skipping the tree root which has number zero. */
1598 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1599 {
1600 struct loop *loop = get_loop (fn, i);
1601
1602 /* Write the index of the loop header. That's enough to rebuild
1603 the loop tree on the reader side. Stream -1 for an unused
1604 loop entry. */
1605 if (!loop)
1606 {
1607 streamer_write_hwi (ob, -1);
1608 continue;
1609 }
1610 else
1611 streamer_write_hwi (ob, loop->header->index);
1612
1613 /* Write everything copy_loop_info copies. */
1614 streamer_write_enum (ob->main_stream,
1615 loop_estimation, EST_LAST, loop->estimate_state);
1616 streamer_write_hwi (ob, loop->any_upper_bound);
1617 if (loop->any_upper_bound)
1618 {
1619 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1620 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1621 }
1622 streamer_write_hwi (ob, loop->any_estimate);
1623 if (loop->any_estimate)
1624 {
1625 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1626 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1627 }
1628 }
1629
1630 ob->main_stream = tmp_stream;
1631 }
1632
1633
1634 /* Create the header in the file using OB. If the section type is for
1635 a function, set FN to the decl for that function. */
1636
1637 void
1638 produce_asm (struct output_block *ob, tree fn)
1639 {
1640 enum lto_section_type section_type = ob->section_type;
1641 struct lto_function_header header;
1642 char *section_name;
1643 struct lto_output_stream *header_stream;
1644
1645 if (section_type == LTO_section_function_body)
1646 {
1647 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1648 section_name = lto_get_section_name (section_type, name, NULL);
1649 }
1650 else
1651 section_name = lto_get_section_name (section_type, NULL, NULL);
1652
1653 lto_begin_section (section_name, !flag_wpa);
1654 free (section_name);
1655
1656 /* The entire header is stream computed here. */
1657 memset (&header, 0, sizeof (struct lto_function_header));
1658
1659 /* Write the header. */
1660 header.lto_header.major_version = LTO_major_version;
1661 header.lto_header.minor_version = LTO_minor_version;
1662
1663 header.compressed_size = 0;
1664
1665 if (section_type == LTO_section_function_body)
1666 header.cfg_size = ob->cfg_stream->total_size;
1667 header.main_size = ob->main_stream->total_size;
1668 header.string_size = ob->string_stream->total_size;
1669
1670 header_stream = XCNEW (struct lto_output_stream);
1671 lto_output_data_stream (header_stream, &header, sizeof header);
1672 lto_write_stream (header_stream);
1673 free (header_stream);
1674
1675 /* Put all of the gimple and the string table out the asm file as a
1676 block of text. */
1677 if (section_type == LTO_section_function_body)
1678 lto_write_stream (ob->cfg_stream);
1679 lto_write_stream (ob->main_stream);
1680 lto_write_stream (ob->string_stream);
1681
1682 lto_end_section ();
1683 }
1684
1685
1686 /* Output the base body of struct function FN using output block OB. */
1687
1688 static void
1689 output_struct_function_base (struct output_block *ob, struct function *fn)
1690 {
1691 struct bitpack_d bp;
1692 unsigned i;
1693 tree t;
1694
1695 /* Output the static chain and non-local goto save area. */
1696 stream_write_tree (ob, fn->static_chain_decl, true);
1697 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1698
1699 /* Output all the local variables in the function. */
1700 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1701 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1702 stream_write_tree (ob, t, true);
1703
1704 /* Output current IL state of the function. */
1705 streamer_write_uhwi (ob, fn->curr_properties);
1706
1707 /* Write all the attributes for FN. */
1708 bp = bitpack_create (ob->main_stream);
1709 bp_pack_value (&bp, fn->is_thunk, 1);
1710 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1711 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1712 bp_pack_value (&bp, fn->returns_struct, 1);
1713 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1714 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1715 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1716 bp_pack_value (&bp, fn->after_inlining, 1);
1717 bp_pack_value (&bp, fn->stdarg, 1);
1718 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1719 bp_pack_value (&bp, fn->calls_alloca, 1);
1720 bp_pack_value (&bp, fn->calls_setjmp, 1);
1721 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1722 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1723
1724 /* Output the function start and end loci. */
1725 stream_output_location (ob, &bp, fn->function_start_locus);
1726 stream_output_location (ob, &bp, fn->function_end_locus);
1727
1728 streamer_write_bitpack (&bp);
1729 }
1730
1731
1732 /* Output the body of function NODE->DECL. */
1733
1734 static void
1735 output_function (struct cgraph_node *node)
1736 {
1737 tree function;
1738 struct function *fn;
1739 basic_block bb;
1740 struct output_block *ob;
1741
1742 function = node->symbol.decl;
1743 fn = DECL_STRUCT_FUNCTION (function);
1744 ob = create_output_block (LTO_section_function_body);
1745
1746 clear_line_info (ob);
1747 ob->cgraph_node = node;
1748
1749 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1750
1751 /* Set current_function_decl and cfun. */
1752 push_cfun (fn);
1753
1754 /* Make string 0 be a NULL string. */
1755 streamer_write_char_stream (ob->string_stream, 0);
1756
1757 streamer_write_record_start (ob, LTO_function);
1758
1759 /* Output decls for parameters and args. */
1760 stream_write_tree (ob, DECL_RESULT (function), true);
1761 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1762
1763 /* Output DECL_INITIAL for the function, which contains the tree of
1764 lexical scopes. */
1765 stream_write_tree (ob, DECL_INITIAL (function), true);
1766
1767 /* We also stream abstract functions where we stream only stuff needed for
1768 debug info. */
1769 if (gimple_has_body_p (function))
1770 {
1771 streamer_write_uhwi (ob, 1);
1772 output_struct_function_base (ob, fn);
1773
1774 /* Output all the SSA names used in the function. */
1775 output_ssa_names (ob, fn);
1776
1777 /* Output any exception handling regions. */
1778 output_eh_regions (ob, fn);
1779
1780
1781 /* We will renumber the statements. The code that does this uses
1782 the same ordering that we use for serializing them so we can use
1783 the same code on the other end and not have to write out the
1784 statement numbers. We do not assign UIDs to PHIs here because
1785 virtual PHIs get re-computed on-the-fly which would make numbers
1786 inconsistent. */
1787 set_gimple_stmt_max_uid (cfun, 0);
1788 FOR_ALL_BB (bb)
1789 {
1790 gimple_stmt_iterator gsi;
1791 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1792 {
1793 gimple stmt = gsi_stmt (gsi);
1794
1795 /* Virtual PHIs are not going to be streamed. */
1796 if (!virtual_operand_p (gimple_phi_result (stmt)))
1797 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1798 }
1799 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1800 {
1801 gimple stmt = gsi_stmt (gsi);
1802 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1803 }
1804 }
1805 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1806 virtual phis now. */
1807 FOR_ALL_BB (bb)
1808 {
1809 gimple_stmt_iterator gsi;
1810 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1811 {
1812 gimple stmt = gsi_stmt (gsi);
1813 if (virtual_operand_p (gimple_phi_result (stmt)))
1814 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1815 }
1816 }
1817
1818 /* Output the code for the function. */
1819 FOR_ALL_BB_FN (bb, fn)
1820 output_bb (ob, bb, fn);
1821
1822 /* The terminator for this function. */
1823 streamer_write_record_start (ob, LTO_null);
1824
1825 output_cfg (ob, fn);
1826
1827 pop_cfun ();
1828 }
1829 else
1830 streamer_write_uhwi (ob, 0);
1831
1832 /* Create a section to hold the pickled output of this function. */
1833 produce_asm (ob, function);
1834
1835 destroy_output_block (ob);
1836 }
1837
1838
1839 /* Emit toplevel asms. */
1840
1841 void
1842 lto_output_toplevel_asms (void)
1843 {
1844 struct output_block *ob;
1845 struct asm_node *can;
1846 char *section_name;
1847 struct lto_output_stream *header_stream;
1848 struct lto_asm_header header;
1849
1850 if (! asm_nodes)
1851 return;
1852
1853 ob = create_output_block (LTO_section_asm);
1854
1855 /* Make string 0 be a NULL string. */
1856 streamer_write_char_stream (ob->string_stream, 0);
1857
1858 for (can = asm_nodes; can; can = can->next)
1859 {
1860 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1861 streamer_write_hwi (ob, can->order);
1862 }
1863
1864 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1865
1866 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1867 lto_begin_section (section_name, !flag_wpa);
1868 free (section_name);
1869
1870 /* The entire header stream is computed here. */
1871 memset (&header, 0, sizeof (header));
1872
1873 /* Write the header. */
1874 header.lto_header.major_version = LTO_major_version;
1875 header.lto_header.minor_version = LTO_minor_version;
1876
1877 header.main_size = ob->main_stream->total_size;
1878 header.string_size = ob->string_stream->total_size;
1879
1880 header_stream = XCNEW (struct lto_output_stream);
1881 lto_output_data_stream (header_stream, &header, sizeof (header));
1882 lto_write_stream (header_stream);
1883 free (header_stream);
1884
1885 /* Put all of the gimple and the string table out the asm file as a
1886 block of text. */
1887 lto_write_stream (ob->main_stream);
1888 lto_write_stream (ob->string_stream);
1889
1890 lto_end_section ();
1891
1892 destroy_output_block (ob);
1893 }
1894
1895
1896 /* Copy the function body of NODE without deserializing. */
1897
1898 static void
1899 copy_function (struct cgraph_node *node)
1900 {
1901 tree function = node->symbol.decl;
1902 struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
1903 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1904 const char *data;
1905 size_t len;
1906 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1907 char *section_name =
1908 lto_get_section_name (LTO_section_function_body, name, NULL);
1909 size_t i, j;
1910 struct lto_in_decl_state *in_state;
1911 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1912
1913 lto_begin_section (section_name, !flag_wpa);
1914 free (section_name);
1915
1916 /* We may have renamed the declaration, e.g., a static function. */
1917 name = lto_get_decl_name_mapping (file_data, name);
1918
1919 data = lto_get_section_data (file_data, LTO_section_function_body,
1920 name, &len);
1921 gcc_assert (data);
1922
1923 /* Do a bit copy of the function body. */
1924 lto_output_data_stream (output_stream, data, len);
1925 lto_write_stream (output_stream);
1926
1927 /* Copy decls. */
1928 in_state =
1929 lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
1930 gcc_assert (in_state);
1931
1932 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1933 {
1934 size_t n = in_state->streams[i].size;
1935 tree *trees = in_state->streams[i].trees;
1936 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1937
1938 /* The out state must have the same indices and the in state.
1939 So just copy the vector. All the encoders in the in state
1940 must be empty where we reach here. */
1941 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1942 encoder->trees.reserve_exact (n);
1943 for (j = 0; j < n; j++)
1944 encoder->trees.safe_push (trees[j]);
1945 }
1946
1947 lto_free_section_data (file_data, LTO_section_function_body, name,
1948 data, len);
1949 free (output_stream);
1950 lto_end_section ();
1951 }
1952
1953
1954 /* Main entry point from the pass manager. */
1955
1956 static void
1957 lto_output (void)
1958 {
1959 struct lto_out_decl_state *decl_state;
1960 #ifdef ENABLE_CHECKING
1961 bitmap output = lto_bitmap_alloc ();
1962 #endif
1963 int i, n_nodes;
1964 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1965
1966 /* Initialize the streamer. */
1967 lto_streamer_init ();
1968
1969 n_nodes = lto_symtab_encoder_size (encoder);
1970 /* Process only the functions with bodies. */
1971 for (i = 0; i < n_nodes; i++)
1972 {
1973 symtab_node snode = lto_symtab_encoder_deref (encoder, i);
1974 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1975 if (node
1976 && lto_symtab_encoder_encode_body_p (encoder, node)
1977 && !node->symbol.alias
1978 && !node->thunk.thunk_p)
1979 {
1980 #ifdef ENABLE_CHECKING
1981 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
1982 bitmap_set_bit (output, DECL_UID (node->symbol.decl));
1983 #endif
1984 decl_state = lto_new_out_decl_state ();
1985 lto_push_out_decl_state (decl_state);
1986 if (gimple_has_body_p (node->symbol.decl) || !flag_wpa)
1987 output_function (node);
1988 else
1989 copy_function (node);
1990 gcc_assert (lto_get_out_decl_state () == decl_state);
1991 lto_pop_out_decl_state ();
1992 lto_record_function_out_decl_state (node->symbol.decl, decl_state);
1993 }
1994 }
1995
1996 /* Emit the callgraph after emitting function bodies. This needs to
1997 be done now to make sure that all the statements in every function
1998 have been renumbered so that edges can be associated with call
1999 statements using the statement UIDs. */
2000 output_symtab ();
2001
2002 #ifdef ENABLE_CHECKING
2003 lto_bitmap_free (output);
2004 #endif
2005 }
2006
2007 namespace {
2008
2009 const pass_data pass_data_ipa_lto_gimple_out =
2010 {
2011 IPA_PASS, /* type */
2012 "lto_gimple_out", /* name */
2013 OPTGROUP_NONE, /* optinfo_flags */
2014 true, /* has_gate */
2015 false, /* has_execute */
2016 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2017 0, /* properties_required */
2018 0, /* properties_provided */
2019 0, /* properties_destroyed */
2020 0, /* todo_flags_start */
2021 0, /* todo_flags_finish */
2022 };
2023
2024 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2025 {
2026 public:
2027 pass_ipa_lto_gimple_out(gcc::context *ctxt)
2028 : ipa_opt_pass_d(pass_data_ipa_lto_gimple_out, ctxt,
2029 NULL, /* generate_summary */
2030 lto_output, /* write_summary */
2031 NULL, /* read_summary */
2032 lto_output, /* write_optimization_summary */
2033 NULL, /* read_optimization_summary */
2034 NULL, /* stmt_fixup */
2035 0, /* function_transform_todo_flags_start */
2036 NULL, /* function_transform */
2037 NULL) /* variable_transform */
2038 {}
2039
2040 /* opt_pass methods: */
2041 bool gate () { return gate_lto_out (); }
2042
2043 }; // class pass_ipa_lto_gimple_out
2044
2045 } // anon namespace
2046
2047 ipa_opt_pass_d *
2048 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2049 {
2050 return new pass_ipa_lto_gimple_out (ctxt);
2051 }
2052
2053
2054 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2055 from it and required for correct representation of its semantics.
2056 Each node in ENCODER must be a global declaration or a type. A node
2057 is written only once, even if it appears multiple times in the
2058 vector. Certain transitively-reachable nodes, such as those
2059 representing expressions, may be duplicated, but such nodes
2060 must not appear in ENCODER itself. */
2061
2062 static void
2063 write_global_stream (struct output_block *ob,
2064 struct lto_tree_ref_encoder *encoder)
2065 {
2066 tree t;
2067 size_t index;
2068 const size_t size = lto_tree_ref_encoder_size (encoder);
2069
2070 for (index = 0; index < size; index++)
2071 {
2072 t = lto_tree_ref_encoder_get_tree (encoder, index);
2073 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2074 stream_write_tree (ob, t, false);
2075 }
2076 }
2077
2078
2079 /* Write a sequence of indices into the globals vector corresponding
2080 to the trees in ENCODER. These are used by the reader to map the
2081 indices used to refer to global entities within function bodies to
2082 their referents. */
2083
2084 static void
2085 write_global_references (struct output_block *ob,
2086 struct lto_output_stream *ref_stream,
2087 struct lto_tree_ref_encoder *encoder)
2088 {
2089 tree t;
2090 uint32_t index;
2091 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2092
2093 /* Write size as 32-bit unsigned. */
2094 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2095
2096 for (index = 0; index < size; index++)
2097 {
2098 uint32_t slot_num;
2099
2100 t = lto_tree_ref_encoder_get_tree (encoder, index);
2101 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2102 gcc_assert (slot_num != (unsigned)-1);
2103 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2104 }
2105 }
2106
2107
2108 /* Write all the streams in an lto_out_decl_state STATE using
2109 output block OB and output stream OUT_STREAM. */
2110
2111 void
2112 lto_output_decl_state_streams (struct output_block *ob,
2113 struct lto_out_decl_state *state)
2114 {
2115 int i;
2116
2117 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2118 write_global_stream (ob, &state->streams[i]);
2119 }
2120
2121
2122 /* Write all the references in an lto_out_decl_state STATE using
2123 output block OB and output stream OUT_STREAM. */
2124
2125 void
2126 lto_output_decl_state_refs (struct output_block *ob,
2127 struct lto_output_stream *out_stream,
2128 struct lto_out_decl_state *state)
2129 {
2130 unsigned i;
2131 uint32_t ref;
2132 tree decl;
2133
2134 /* Write reference to FUNCTION_DECL. If there is not function,
2135 write reference to void_type_node. */
2136 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2137 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2138 gcc_assert (ref != (unsigned)-1);
2139 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2140
2141 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2142 write_global_references (ob, out_stream, &state->streams[i]);
2143 }
2144
2145
2146 /* Return the written size of STATE. */
2147
2148 static size_t
2149 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2150 {
2151 int i;
2152 size_t size;
2153
2154 size = sizeof (int32_t); /* fn_ref. */
2155 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2156 {
2157 size += sizeof (int32_t); /* vector size. */
2158 size += (lto_tree_ref_encoder_size (&state->streams[i])
2159 * sizeof (int32_t));
2160 }
2161 return size;
2162 }
2163
2164
2165 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2166 so far. */
2167
2168 static void
2169 write_symbol (struct streamer_tree_cache_d *cache,
2170 struct lto_output_stream *stream,
2171 tree t, struct pointer_set_t *seen, bool alias)
2172 {
2173 const char *name;
2174 enum gcc_plugin_symbol_kind kind;
2175 enum gcc_plugin_symbol_visibility visibility;
2176 unsigned slot_num;
2177 unsigned HOST_WIDEST_INT size;
2178 const char *comdat;
2179 unsigned char c;
2180
2181 /* None of the following kinds of symbols are needed in the
2182 symbol table. */
2183 if (!TREE_PUBLIC (t)
2184 || is_builtin_fn (t)
2185 || DECL_ABSTRACT (t)
2186 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2187 return;
2188 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2189
2190 gcc_assert (TREE_CODE (t) == VAR_DECL
2191 || TREE_CODE (t) == FUNCTION_DECL);
2192
2193 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2194
2195 /* This behaves like assemble_name_raw in varasm.c, performing the
2196 same name manipulations that ASM_OUTPUT_LABELREF does. */
2197 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2198
2199 if (pointer_set_contains (seen, name))
2200 return;
2201 pointer_set_insert (seen, name);
2202
2203 streamer_tree_cache_lookup (cache, t, &slot_num);
2204 gcc_assert (slot_num != (unsigned)-1);
2205
2206 if (DECL_EXTERNAL (t))
2207 {
2208 if (DECL_WEAK (t))
2209 kind = GCCPK_WEAKUNDEF;
2210 else
2211 kind = GCCPK_UNDEF;
2212 }
2213 else
2214 {
2215 if (DECL_WEAK (t))
2216 kind = GCCPK_WEAKDEF;
2217 else if (DECL_COMMON (t))
2218 kind = GCCPK_COMMON;
2219 else
2220 kind = GCCPK_DEF;
2221
2222 /* When something is defined, it should have node attached. */
2223 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2224 || varpool_get_node (t)->symbol.definition);
2225 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2226 || (cgraph_get_node (t)
2227 && cgraph_get_node (t)->symbol.definition));
2228 }
2229
2230 /* Imitate what default_elf_asm_output_external do.
2231 When symbol is external, we need to output it with DEFAULT visibility
2232 when compiling with -fvisibility=default, while with HIDDEN visibility
2233 when symbol has attribute (visibility("hidden")) specified.
2234 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2235 right. */
2236
2237 if (DECL_EXTERNAL (t)
2238 && !targetm.binds_local_p (t))
2239 visibility = GCCPV_DEFAULT;
2240 else
2241 switch (DECL_VISIBILITY(t))
2242 {
2243 case VISIBILITY_DEFAULT:
2244 visibility = GCCPV_DEFAULT;
2245 break;
2246 case VISIBILITY_PROTECTED:
2247 visibility = GCCPV_PROTECTED;
2248 break;
2249 case VISIBILITY_HIDDEN:
2250 visibility = GCCPV_HIDDEN;
2251 break;
2252 case VISIBILITY_INTERNAL:
2253 visibility = GCCPV_INTERNAL;
2254 break;
2255 }
2256
2257 if (kind == GCCPK_COMMON
2258 && DECL_SIZE_UNIT (t)
2259 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2260 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2261 else
2262 size = 0;
2263
2264 if (DECL_ONE_ONLY (t))
2265 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2266 else
2267 comdat = "";
2268
2269 lto_output_data_stream (stream, name, strlen (name) + 1);
2270 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2271 c = (unsigned char) kind;
2272 lto_output_data_stream (stream, &c, 1);
2273 c = (unsigned char) visibility;
2274 lto_output_data_stream (stream, &c, 1);
2275 lto_output_data_stream (stream, &size, 8);
2276 lto_output_data_stream (stream, &slot_num, 4);
2277 }
2278
2279 /* Return true if NODE should appear in the plugin symbol table. */
2280
2281 bool
2282 output_symbol_p (symtab_node node)
2283 {
2284 struct cgraph_node *cnode;
2285 if (!symtab_real_symbol_p (node))
2286 return false;
2287 /* We keep external functions in symtab for sake of inlining
2288 and devirtualization. We do not want to see them in symbol table as
2289 references unless they are really used. */
2290 cnode = dyn_cast <cgraph_node> (node);
2291 if (cnode && (!node->symbol.definition || DECL_EXTERNAL (cnode->symbol.decl))
2292 && cnode->callers)
2293 return true;
2294
2295 /* Ignore all references from external vars initializers - they are not really
2296 part of the compilation unit until they are used by folding. Some symbols,
2297 like references to external construction vtables can not be referred to at all.
2298 We decide this at can_refer_decl_in_current_unit_p. */
2299 if (!node->symbol.definition || DECL_EXTERNAL (node->symbol.decl))
2300 {
2301 int i;
2302 struct ipa_ref *ref;
2303 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
2304 i, ref); i++)
2305 {
2306 if (ref->use == IPA_REF_ALIAS)
2307 continue;
2308 if (is_a <cgraph_node> (ref->referring))
2309 return true;
2310 if (!DECL_EXTERNAL (ref->referring->symbol.decl))
2311 return true;
2312 }
2313 return false;
2314 }
2315 return true;
2316 }
2317
2318
2319 /* Write an IL symbol table to OB.
2320 SET and VSET are cgraph/varpool node sets we are outputting. */
2321
2322 static void
2323 produce_symtab (struct output_block *ob)
2324 {
2325 struct streamer_tree_cache_d *cache = ob->writer_cache;
2326 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2327 struct pointer_set_t *seen;
2328 struct lto_output_stream stream;
2329 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2330 lto_symtab_encoder_iterator lsei;
2331
2332 lto_begin_section (section_name, false);
2333 free (section_name);
2334
2335 seen = pointer_set_create ();
2336 memset (&stream, 0, sizeof (stream));
2337
2338 /* Write the symbol table.
2339 First write everything defined and then all declarations.
2340 This is necessary to handle cases where we have duplicated symbols. */
2341 for (lsei = lsei_start (encoder);
2342 !lsei_end_p (lsei); lsei_next (&lsei))
2343 {
2344 symtab_node node = lsei_node (lsei);
2345
2346 if (!output_symbol_p (node) || DECL_EXTERNAL (node->symbol.decl))
2347 continue;
2348 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2349 }
2350 for (lsei = lsei_start (encoder);
2351 !lsei_end_p (lsei); lsei_next (&lsei))
2352 {
2353 symtab_node node = lsei_node (lsei);
2354
2355 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->symbol.decl))
2356 continue;
2357 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2358 }
2359
2360 lto_write_stream (&stream);
2361 pointer_set_destroy (seen);
2362
2363 lto_end_section ();
2364 }
2365
2366
2367 /* This pass is run after all of the functions are serialized and all
2368 of the IPA passes have written their serialized forms. This pass
2369 causes the vector of all of the global decls and types used from
2370 this file to be written in to a section that can then be read in to
2371 recover these on other side. */
2372
2373 static void
2374 produce_asm_for_decls (void)
2375 {
2376 struct lto_out_decl_state *out_state;
2377 struct lto_out_decl_state *fn_out_state;
2378 struct lto_decl_header header;
2379 char *section_name;
2380 struct output_block *ob;
2381 struct lto_output_stream *header_stream, *decl_state_stream;
2382 unsigned idx, num_fns;
2383 size_t decl_state_size;
2384 int32_t num_decl_states;
2385
2386 ob = create_output_block (LTO_section_decls);
2387 ob->global = true;
2388
2389 memset (&header, 0, sizeof (struct lto_decl_header));
2390
2391 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2392 lto_begin_section (section_name, !flag_wpa);
2393 free (section_name);
2394
2395 /* Make string 0 be a NULL string. */
2396 streamer_write_char_stream (ob->string_stream, 0);
2397
2398 gcc_assert (!alias_pairs);
2399
2400 /* Write the global symbols. */
2401 out_state = lto_get_out_decl_state ();
2402 num_fns = lto_function_decl_states.length ();
2403 lto_output_decl_state_streams (ob, out_state);
2404 for (idx = 0; idx < num_fns; idx++)
2405 {
2406 fn_out_state =
2407 lto_function_decl_states[idx];
2408 lto_output_decl_state_streams (ob, fn_out_state);
2409 }
2410
2411 header.lto_header.major_version = LTO_major_version;
2412 header.lto_header.minor_version = LTO_minor_version;
2413
2414 /* Currently not used. This field would allow us to preallocate
2415 the globals vector, so that it need not be resized as it is extended. */
2416 header.num_nodes = -1;
2417
2418 /* Compute the total size of all decl out states. */
2419 decl_state_size = sizeof (int32_t);
2420 decl_state_size += lto_out_decl_state_written_size (out_state);
2421 for (idx = 0; idx < num_fns; idx++)
2422 {
2423 fn_out_state =
2424 lto_function_decl_states[idx];
2425 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2426 }
2427 header.decl_state_size = decl_state_size;
2428
2429 header.main_size = ob->main_stream->total_size;
2430 header.string_size = ob->string_stream->total_size;
2431
2432 header_stream = XCNEW (struct lto_output_stream);
2433 lto_output_data_stream (header_stream, &header, sizeof header);
2434 lto_write_stream (header_stream);
2435 free (header_stream);
2436
2437 /* Write the main out-decl state, followed by out-decl states of
2438 functions. */
2439 decl_state_stream = XCNEW (struct lto_output_stream);
2440 num_decl_states = num_fns + 1;
2441 lto_output_data_stream (decl_state_stream, &num_decl_states,
2442 sizeof (num_decl_states));
2443 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2444 for (idx = 0; idx < num_fns; idx++)
2445 {
2446 fn_out_state =
2447 lto_function_decl_states[idx];
2448 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2449 }
2450 lto_write_stream (decl_state_stream);
2451 free(decl_state_stream);
2452
2453 lto_write_stream (ob->main_stream);
2454 lto_write_stream (ob->string_stream);
2455
2456 lto_end_section ();
2457
2458 /* Write the symbol table. It is used by linker to determine dependencies
2459 and thus we can skip it for WPA. */
2460 if (!flag_wpa)
2461 produce_symtab (ob);
2462
2463 /* Write command line opts. */
2464 lto_write_options ();
2465
2466 /* Deallocate memory and clean up. */
2467 for (idx = 0; idx < num_fns; idx++)
2468 {
2469 fn_out_state =
2470 lto_function_decl_states[idx];
2471 lto_delete_out_decl_state (fn_out_state);
2472 }
2473 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2474 lto_function_decl_states.release ();
2475 destroy_output_block (ob);
2476 }
2477
2478
2479 namespace {
2480
2481 const pass_data pass_data_ipa_lto_finish_out =
2482 {
2483 IPA_PASS, /* type */
2484 "lto_decls_out", /* name */
2485 OPTGROUP_NONE, /* optinfo_flags */
2486 true, /* has_gate */
2487 false, /* has_execute */
2488 TV_IPA_LTO_DECL_OUT, /* tv_id */
2489 0, /* properties_required */
2490 0, /* properties_provided */
2491 0, /* properties_destroyed */
2492 0, /* todo_flags_start */
2493 0, /* todo_flags_finish */
2494 };
2495
2496 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2497 {
2498 public:
2499 pass_ipa_lto_finish_out(gcc::context *ctxt)
2500 : ipa_opt_pass_d(pass_data_ipa_lto_finish_out, ctxt,
2501 NULL, /* generate_summary */
2502 produce_asm_for_decls, /* write_summary */
2503 NULL, /* read_summary */
2504 produce_asm_for_decls, /* write_optimization_summary */
2505 NULL, /* read_optimization_summary */
2506 NULL, /* stmt_fixup */
2507 0, /* function_transform_todo_flags_start */
2508 NULL, /* function_transform */
2509 NULL) /* variable_transform */
2510 {}
2511
2512 /* opt_pass methods: */
2513 bool gate () { return gate_lto_out (); }
2514
2515 }; // class pass_ipa_lto_finish_out
2516
2517 } // anon namespace
2518
2519 ipa_opt_pass_d *
2520 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2521 {
2522 return new pass_ipa_lto_finish_out (ctxt);
2523 }