re PR c/61405 (Not emitting "enumeration value not handled in switch" warning for...
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-pass.h"
46 #include "function.h"
47 #include "diagnostic-core.h"
48 #include "inchash.h"
49 #include "except.h"
50 #include "lto-symtab.h"
51 #include "lto-streamer.h"
52 #include "data-streamer.h"
53 #include "gimple-streamer.h"
54 #include "tree-streamer.h"
55 #include "streamer-hooks.h"
56 #include "cfgloop.h"
57 #include "builtins.h"
58
59
60 static void lto_write_tree (struct output_block*, tree, bool);
61
62 /* Clear the line info stored in DATA_IN. */
63
64 static void
65 clear_line_info (struct output_block *ob)
66 {
67 ob->current_file = NULL;
68 ob->current_line = 0;
69 ob->current_col = 0;
70 }
71
72
73 /* Create the output block and return it. SECTION_TYPE is
74 LTO_section_function_body or LTO_static_initializer. */
75
76 struct output_block *
77 create_output_block (enum lto_section_type section_type)
78 {
79 struct output_block *ob = XCNEW (struct output_block);
80
81 ob->section_type = section_type;
82 ob->decl_state = lto_get_out_decl_state ();
83 ob->main_stream = XCNEW (struct lto_output_stream);
84 ob->string_stream = XCNEW (struct lto_output_stream);
85 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86
87 if (section_type == LTO_section_function_body)
88 ob->cfg_stream = XCNEW (struct lto_output_stream);
89
90 clear_line_info (ob);
91
92 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
93 gcc_obstack_init (&ob->obstack);
94
95 return ob;
96 }
97
98
99 /* Destroy the output block OB. */
100
101 void
102 destroy_output_block (struct output_block *ob)
103 {
104 enum lto_section_type section_type = ob->section_type;
105
106 delete ob->string_hash_table;
107 ob->string_hash_table = NULL;
108
109 free (ob->main_stream);
110 free (ob->string_stream);
111 if (section_type == LTO_section_function_body)
112 free (ob->cfg_stream);
113
114 streamer_tree_cache_delete (ob->writer_cache);
115 obstack_free (&ob->obstack, NULL);
116
117 free (ob);
118 }
119
120
121 /* Look up NODE in the type table and write the index for it to OB. */
122
123 static void
124 output_type_ref (struct output_block *ob, tree node)
125 {
126 streamer_write_record_start (ob, LTO_type_ref);
127 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
128 }
129
130
131 /* Return true if tree node T is written to various tables. For these
132 nodes, we sometimes want to write their phyiscal representation
133 (via lto_output_tree), and sometimes we need to emit an index
134 reference into a table (via lto_output_tree_ref). */
135
136 static bool
137 tree_is_indexable (tree t)
138 {
139 /* Parameters and return values of functions of variably modified types
140 must go to global stream, because they may be used in the type
141 definition. */
142 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
167 }
168
169
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
173
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
177 {
178 expanded_location xloc;
179
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
182 if (loc == UNKNOWN_LOCATION)
183 return;
184
185 xloc = expand_location (loc);
186
187 bp_pack_value (bp, ob->current_file != xloc.file, 1);
188 bp_pack_value (bp, ob->current_line != xloc.line, 1);
189 bp_pack_value (bp, ob->current_col != xloc.column, 1);
190
191 if (ob->current_file != xloc.file)
192 bp_pack_string (ob, bp, xloc.file, true);
193 ob->current_file = xloc.file;
194
195 if (ob->current_line != xloc.line)
196 bp_pack_var_len_unsigned (bp, xloc.line);
197 ob->current_line = xloc.line;
198
199 if (ob->current_col != xloc.column)
200 bp_pack_var_len_unsigned (bp, xloc.column);
201 ob->current_col = xloc.column;
202 }
203
204
205 /* If EXPR is an indexable tree node, output a reference to it to
206 output block OB. Otherwise, output the physical representation of
207 EXPR to OB. */
208
209 static void
210 lto_output_tree_ref (struct output_block *ob, tree expr)
211 {
212 enum tree_code code;
213
214 if (TYPE_P (expr))
215 {
216 output_type_ref (ob, expr);
217 return;
218 }
219
220 code = TREE_CODE (expr);
221 switch (code)
222 {
223 case SSA_NAME:
224 streamer_write_record_start (ob, LTO_ssa_name_ref);
225 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
226 break;
227
228 case FIELD_DECL:
229 streamer_write_record_start (ob, LTO_field_decl_ref);
230 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case FUNCTION_DECL:
234 streamer_write_record_start (ob, LTO_function_decl_ref);
235 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
236 break;
237
238 case VAR_DECL:
239 case DEBUG_EXPR_DECL:
240 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
241 case PARM_DECL:
242 streamer_write_record_start (ob, LTO_global_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case CONST_DECL:
247 streamer_write_record_start (ob, LTO_const_decl_ref);
248 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case IMPORTED_DECL:
252 gcc_assert (decl_function_context (expr) == NULL);
253 streamer_write_record_start (ob, LTO_imported_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case TYPE_DECL:
258 streamer_write_record_start (ob, LTO_type_decl_ref);
259 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case NAMELIST_DECL:
263 streamer_write_record_start (ob, LTO_namelist_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 case NAMESPACE_DECL:
268 streamer_write_record_start (ob, LTO_namespace_decl_ref);
269 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
271
272 case LABEL_DECL:
273 streamer_write_record_start (ob, LTO_label_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case RESULT_DECL:
278 streamer_write_record_start (ob, LTO_result_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case TRANSLATION_UNIT_DECL:
283 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
284 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 default:
288 /* No other node is indexable, so it should have been handled by
289 lto_output_tree. */
290 gcc_unreachable ();
291 }
292 }
293
294
295 /* Return true if EXPR is a tree node that can be written to disk. */
296
297 static inline bool
298 lto_is_streamable (tree expr)
299 {
300 enum tree_code code = TREE_CODE (expr);
301
302 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
303 name version in lto_output_tree_ref (see output_ssa_names). */
304 return !is_lang_specific (expr)
305 && code != SSA_NAME
306 && code != CALL_EXPR
307 && code != LANG_TYPE
308 && code != MODIFY_EXPR
309 && code != INIT_EXPR
310 && code != TARGET_EXPR
311 && code != BIND_EXPR
312 && code != WITH_CLEANUP_EXPR
313 && code != STATEMENT_LIST
314 && (code == CASE_LABEL_EXPR
315 || code == DECL_EXPR
316 || TREE_CODE_CLASS (code) != tcc_statement);
317 }
318
319
320 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
321
322 static tree
323 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
324 {
325 gcc_checking_assert (DECL_P (expr)
326 && TREE_CODE (expr) != FUNCTION_DECL
327 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
328
329 /* Handle DECL_INITIAL for symbols. */
330 tree initial = DECL_INITIAL (expr);
331 if (TREE_CODE (expr) == VAR_DECL
332 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
333 && !DECL_IN_CONSTANT_POOL (expr)
334 && initial)
335 {
336 varpool_node *vnode;
337 /* Extra section needs about 30 bytes; do not produce it for simple
338 scalar values. */
339 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
340 || !(vnode = varpool_node::get (expr))
341 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
342 initial = error_mark_node;
343 }
344
345 return initial;
346 }
347
348
349 /* Write a physical representation of tree node EXPR to output block
350 OB. If REF_P is true, the leaves of EXPR are emitted as references
351 via lto_output_tree_ref. IX is the index into the streamer cache
352 where EXPR is stored. */
353
354 static void
355 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
356 {
357 /* Pack all the non-pointer fields in EXPR into a bitpack and write
358 the resulting bitpack. */
359 bitpack_d bp = bitpack_create (ob->main_stream);
360 streamer_pack_tree_bitfields (ob, &bp, expr);
361 streamer_write_bitpack (&bp);
362
363 /* Write all the pointer fields in EXPR. */
364 streamer_write_tree_body (ob, expr, ref_p);
365
366 /* Write any LTO-specific data to OB. */
367 if (DECL_P (expr)
368 && TREE_CODE (expr) != FUNCTION_DECL
369 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
370 {
371 /* Handle DECL_INITIAL for symbols. */
372 tree initial = get_symbol_initial_value
373 (ob->decl_state->symtab_node_encoder, expr);
374 stream_write_tree (ob, initial, ref_p);
375 }
376 }
377
378 /* Write a physical representation of tree node EXPR to output block
379 OB. If REF_P is true, the leaves of EXPR are emitted as references
380 via lto_output_tree_ref. IX is the index into the streamer cache
381 where EXPR is stored. */
382
383 static void
384 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
385 {
386 if (!lto_is_streamable (expr))
387 internal_error ("tree code %qs is not supported in LTO streams",
388 get_tree_code_name (TREE_CODE (expr)));
389
390 /* Write the header, containing everything needed to materialize
391 EXPR on the reading side. */
392 streamer_write_tree_header (ob, expr);
393
394 lto_write_tree_1 (ob, expr, ref_p);
395
396 /* Mark the end of EXPR. */
397 streamer_write_zero (ob);
398 }
399
400 /* Emit the physical representation of tree node EXPR to output block
401 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
402 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
403
404 static void
405 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
406 bool ref_p, bool this_ref_p)
407 {
408 unsigned ix;
409
410 gcc_checking_assert (expr != NULL_TREE
411 && !(this_ref_p && tree_is_indexable (expr)));
412
413 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
414 expr, hash, &ix);
415 gcc_assert (!exists_p);
416 if (streamer_handle_as_builtin_p (expr))
417 {
418 /* MD and NORMAL builtins do not need to be written out
419 completely as they are always instantiated by the
420 compiler on startup. The only builtins that need to
421 be written out are BUILT_IN_FRONTEND. For all other
422 builtins, we simply write the class and code. */
423 streamer_write_builtin (ob, expr);
424 }
425 else if (TREE_CODE (expr) == INTEGER_CST
426 && !TREE_OVERFLOW (expr))
427 {
428 /* Shared INTEGER_CST nodes are special because they need their
429 original type to be materialized by the reader (to implement
430 TYPE_CACHED_VALUES). */
431 streamer_write_integer_cst (ob, expr, ref_p);
432 }
433 else
434 {
435 /* This is the first time we see EXPR, write its fields
436 to OB. */
437 lto_write_tree (ob, expr, ref_p);
438 }
439 }
440
441 class DFS
442 {
443 public:
444 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
445 bool single_p);
446 ~DFS ();
447
448 struct scc_entry
449 {
450 tree t;
451 hashval_t hash;
452 };
453 vec<scc_entry> sccstack;
454
455 private:
456 struct sccs
457 {
458 unsigned int dfsnum;
459 unsigned int low;
460 };
461
462 static int scc_entry_compare (const void *, const void *);
463
464 void DFS_write_tree_body (struct output_block *ob,
465 tree expr, sccs *expr_state, bool ref_p,
466 bool single_p);
467
468 void DFS_write_tree (struct output_block *ob, sccs *from_state,
469 tree expr, bool ref_p, bool this_ref_p,
470 bool single_p);
471 hashval_t
472 hash_scc (struct output_block *ob, unsigned first, unsigned size);
473
474 unsigned int next_dfs_num;
475 hash_map<tree, sccs *> sccstate;
476 struct obstack sccstate_obstack;
477 };
478
479 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
480 bool single_p)
481 {
482 sccstack.create (0);
483 gcc_obstack_init (&sccstate_obstack);
484 next_dfs_num = 1;
485 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
486 }
487
488 DFS::~DFS ()
489 {
490 sccstack.release ();
491 obstack_free (&sccstate_obstack, NULL);
492 }
493
494 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
495 DFS recurse for all tree edges originating from it. */
496
497 void
498 DFS::DFS_write_tree_body (struct output_block *ob,
499 tree expr, sccs *expr_state, bool ref_p,
500 bool single_p)
501 {
502 #define DFS_follow_tree_edge(DEST) \
503 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
504
505 enum tree_code code;
506
507 code = TREE_CODE (expr);
508
509 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
510 {
511 if (TREE_CODE (expr) != IDENTIFIER_NODE)
512 DFS_follow_tree_edge (TREE_TYPE (expr));
513 }
514
515 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
516 {
517 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
518 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
519 }
520
521 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
522 {
523 DFS_follow_tree_edge (TREE_REALPART (expr));
524 DFS_follow_tree_edge (TREE_IMAGPART (expr));
525 }
526
527 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
528 {
529 /* Drop names that were created for anonymous entities. */
530 if (DECL_NAME (expr)
531 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
532 && ANON_AGGRNAME_P (DECL_NAME (expr)))
533 ;
534 else
535 DFS_follow_tree_edge (DECL_NAME (expr));
536 DFS_follow_tree_edge (DECL_CONTEXT (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
540 {
541 DFS_follow_tree_edge (DECL_SIZE (expr));
542 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
543
544 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
545 special handling in LTO, it must be handled by streamer hooks. */
546
547 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
548
549 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
550 for early inlining so drop it on the floor instead of ICEing in
551 dwarf2out.c. */
552
553 if ((TREE_CODE (expr) == VAR_DECL
554 || TREE_CODE (expr) == PARM_DECL)
555 && DECL_HAS_VALUE_EXPR_P (expr))
556 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
557 if (TREE_CODE (expr) == VAR_DECL)
558 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
559 }
560
561 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
562 {
563 if (TREE_CODE (expr) == TYPE_DECL)
564 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
565 }
566
567 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
568 {
569 /* Make sure we don't inadvertently set the assembler name. */
570 if (DECL_ASSEMBLER_NAME_SET_P (expr))
571 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
572 }
573
574 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
575 {
576 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
577 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
578 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
579 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
580 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
584 {
585 DFS_follow_tree_edge (DECL_VINDEX (expr));
586 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
587 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
588 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
589 }
590
591 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
592 {
593 DFS_follow_tree_edge (TYPE_SIZE (expr));
594 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
595 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
596 DFS_follow_tree_edge (TYPE_NAME (expr));
597 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
598 reconstructed during fixup. */
599 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
600 during fixup. */
601 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
602 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
603 /* TYPE_CANONICAL is re-computed during type merging, so no need
604 to follow it here. */
605 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
606 }
607
608 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
609 {
610 if (TREE_CODE (expr) == ENUMERAL_TYPE)
611 DFS_follow_tree_edge (TYPE_VALUES (expr));
612 else if (TREE_CODE (expr) == ARRAY_TYPE)
613 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
614 else if (RECORD_OR_UNION_TYPE_P (expr))
615 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
616 DFS_follow_tree_edge (t);
617 else if (TREE_CODE (expr) == FUNCTION_TYPE
618 || TREE_CODE (expr) == METHOD_TYPE)
619 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
620
621 if (!POINTER_TYPE_P (expr))
622 DFS_follow_tree_edge (TYPE_MINVAL (expr));
623 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
624 if (RECORD_OR_UNION_TYPE_P (expr))
625 DFS_follow_tree_edge (TYPE_BINFO (expr));
626 }
627
628 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
629 {
630 DFS_follow_tree_edge (TREE_PURPOSE (expr));
631 DFS_follow_tree_edge (TREE_VALUE (expr));
632 DFS_follow_tree_edge (TREE_CHAIN (expr));
633 }
634
635 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
636 {
637 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
638 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
639 }
640
641 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
642 {
643 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
644 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
645 DFS_follow_tree_edge (TREE_BLOCK (expr));
646 }
647
648 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
649 {
650 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
651 if (VAR_OR_FUNCTION_DECL_P (t)
652 && DECL_EXTERNAL (t))
653 /* We have to stream externals in the block chain as
654 non-references. See also
655 tree-streamer-out.c:streamer_write_chain. */
656 DFS_write_tree (ob, expr_state, t, ref_p, false, single_p);
657 else
658 DFS_follow_tree_edge (t);
659
660 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
661
662 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
663 handle - those that represent inlined function scopes.
664 For the drop rest them on the floor instead of ICEing
665 in dwarf2out.c. */
666 if (inlined_function_outer_scope_p (expr))
667 {
668 tree ultimate_origin = block_ultimate_origin (expr);
669 DFS_follow_tree_edge (ultimate_origin);
670 }
671 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
672 information for early inlined BLOCKs so drop it on the floor instead
673 of ICEing in dwarf2out.c. */
674
675 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
676 streaming time. */
677
678 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
679 list is re-constructed from BLOCK_SUPERCONTEXT. */
680 }
681
682 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
683 {
684 unsigned i;
685 tree t;
686
687 /* Note that the number of BINFO slots has already been emitted in
688 EXPR's header (see streamer_write_tree_header) because this length
689 is needed to build the empty BINFO node on the reader side. */
690 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
691 DFS_follow_tree_edge (t);
692 DFS_follow_tree_edge (BINFO_OFFSET (expr));
693 DFS_follow_tree_edge (BINFO_VTABLE (expr));
694 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
695
696 /* The number of BINFO_BASE_ACCESSES has already been emitted in
697 EXPR's bitfield section. */
698 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
699 DFS_follow_tree_edge (t);
700
701 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
702 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
703 }
704
705 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
706 {
707 unsigned i;
708 tree index, value;
709
710 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
711 {
712 DFS_follow_tree_edge (index);
713 DFS_follow_tree_edge (value);
714 }
715 }
716
717 if (code == OMP_CLAUSE)
718 {
719 int i;
720 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
721 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
722 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
723 }
724
725 #undef DFS_follow_tree_edge
726 }
727
728 /* Return a hash value for the tree T.
729 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
730 may hold hash values if trees inside current SCC. */
731
732 static hashval_t
733 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
734 {
735 inchash::hash hstate;
736
737 #define visit(SIBLING) \
738 do { \
739 unsigned ix; \
740 if (!SIBLING) \
741 hstate.add_int (0); \
742 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
743 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
744 else if (map) \
745 hstate.add_int (*map->get (SIBLING)); \
746 else \
747 hstate.add_int (1); \
748 } while (0)
749
750 /* Hash TS_BASE. */
751 enum tree_code code = TREE_CODE (t);
752 hstate.add_int (code);
753 if (!TYPE_P (t))
754 {
755 hstate.add_flag (TREE_SIDE_EFFECTS (t));
756 hstate.add_flag (TREE_CONSTANT (t));
757 hstate.add_flag (TREE_READONLY (t));
758 hstate.add_flag (TREE_PUBLIC (t));
759 }
760 hstate.add_flag (TREE_ADDRESSABLE (t));
761 hstate.add_flag (TREE_THIS_VOLATILE (t));
762 if (DECL_P (t))
763 hstate.add_flag (DECL_UNSIGNED (t));
764 else if (TYPE_P (t))
765 hstate.add_flag (TYPE_UNSIGNED (t));
766 if (TYPE_P (t))
767 hstate.add_flag (TYPE_ARTIFICIAL (t));
768 else
769 hstate.add_flag (TREE_NO_WARNING (t));
770 hstate.add_flag (TREE_NOTHROW (t));
771 hstate.add_flag (TREE_STATIC (t));
772 hstate.add_flag (TREE_PROTECTED (t));
773 hstate.add_flag (TREE_DEPRECATED (t));
774 if (code != TREE_BINFO)
775 hstate.add_flag (TREE_PRIVATE (t));
776 if (TYPE_P (t))
777 {
778 hstate.add_flag (TYPE_SATURATING (t));
779 hstate.add_flag (TYPE_ADDR_SPACE (t));
780 }
781 else if (code == SSA_NAME)
782 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
783 hstate.commit_flag ();
784
785 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
786 {
787 int i;
788 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
789 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
790 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
791 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
792 }
793
794 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
795 {
796 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
797 hstate.add_flag (r.cl);
798 hstate.add_flag (r.sign);
799 hstate.add_flag (r.signalling);
800 hstate.add_flag (r.canonical);
801 hstate.commit_flag ();
802 hstate.add_int (r.uexp);
803 hstate.add (r.sig, sizeof (r.sig));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
807 {
808 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
809 hstate.add_int (f.mode);
810 hstate.add_int (f.data.low);
811 hstate.add_int (f.data.high);
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
815 {
816 hstate.add_wide_int (DECL_MODE (t));
817 hstate.add_flag (DECL_NONLOCAL (t));
818 hstate.add_flag (DECL_VIRTUAL_P (t));
819 hstate.add_flag (DECL_IGNORED_P (t));
820 hstate.add_flag (DECL_ABSTRACT (t));
821 hstate.add_flag (DECL_ARTIFICIAL (t));
822 hstate.add_flag (DECL_USER_ALIGN (t));
823 hstate.add_flag (DECL_PRESERVE_P (t));
824 hstate.add_flag (DECL_EXTERNAL (t));
825 hstate.add_flag (DECL_GIMPLE_REG_P (t));
826 hstate.commit_flag ();
827 hstate.add_int (DECL_ALIGN (t));
828 if (code == LABEL_DECL)
829 {
830 hstate.add_int (EH_LANDING_PAD_NR (t));
831 hstate.add_int (LABEL_DECL_UID (t));
832 }
833 else if (code == FIELD_DECL)
834 {
835 hstate.add_flag (DECL_PACKED (t));
836 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
837 hstate.add_int (DECL_OFFSET_ALIGN (t));
838 }
839 else if (code == VAR_DECL)
840 {
841 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
842 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
843 }
844 if (code == RESULT_DECL
845 || code == PARM_DECL
846 || code == VAR_DECL)
847 {
848 hstate.add_flag (DECL_BY_REFERENCE (t));
849 if (code == VAR_DECL
850 || code == PARM_DECL)
851 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
852 }
853 hstate.commit_flag ();
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
857 hstate.add_int (DECL_REGISTER (t));
858
859 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
860 {
861 hstate.add_flag (DECL_COMMON (t));
862 hstate.add_flag (DECL_DLLIMPORT_P (t));
863 hstate.add_flag (DECL_WEAK (t));
864 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
865 hstate.add_flag (DECL_COMDAT (t));
866 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
867 hstate.add_int (DECL_VISIBILITY (t));
868 if (code == VAR_DECL)
869 {
870 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
871 hstate.add_flag (DECL_HARD_REGISTER (t));
872 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
873 }
874 if (TREE_CODE (t) == FUNCTION_DECL)
875 {
876 hstate.add_flag (DECL_FINAL_P (t));
877 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
878 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
879 }
880 hstate.commit_flag ();
881 }
882
883 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
884 {
885 hstate.add_int (DECL_BUILT_IN_CLASS (t));
886 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
887 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
888 hstate.add_flag (DECL_UNINLINABLE (t));
889 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
890 hstate.add_flag (DECL_IS_NOVOPS (t));
891 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
892 hstate.add_flag (DECL_IS_MALLOC (t));
893 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
894 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
895 hstate.add_flag (DECL_STATIC_CHAIN (t));
896 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
897 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
898 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
899 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
900 hstate.add_flag (DECL_PURE_P (t));
901 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
902 hstate.commit_flag ();
903 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
904 hstate.add_int (DECL_FUNCTION_CODE (t));
905 }
906
907 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
908 {
909 hstate.add_wide_int (TYPE_MODE (t));
910 hstate.add_flag (TYPE_STRING_FLAG (t));
911 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
912 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
913 hstate.add_flag (TYPE_PACKED (t));
914 hstate.add_flag (TYPE_RESTRICT (t));
915 hstate.add_flag (TYPE_USER_ALIGN (t));
916 hstate.add_flag (TYPE_READONLY (t));
917 if (RECORD_OR_UNION_TYPE_P (t))
918 {
919 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
920 hstate.add_flag (TYPE_FINAL_P (t));
921 }
922 else if (code == ARRAY_TYPE)
923 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
924 hstate.commit_flag ();
925 hstate.add_int (TYPE_PRECISION (t));
926 hstate.add_int (TYPE_ALIGN (t));
927 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
928 || (!in_lto_p
929 && get_alias_set (t) == 0))
930 ? 0 : -1);
931 }
932
933 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
934 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
935 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
936
937 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
938 gcc_unreachable ();
939
940 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
941 hstate.add (t, sizeof (struct cl_optimization));
942
943 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
944 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
945
946 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
947 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
948
949 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
950 {
951 if (code != IDENTIFIER_NODE)
952 visit (TREE_TYPE (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
956 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
957 visit (VECTOR_CST_ELT (t, i));
958
959 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
960 {
961 visit (TREE_REALPART (t));
962 visit (TREE_IMAGPART (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
966 {
967 /* Drop names that were created for anonymous entities. */
968 if (DECL_NAME (t)
969 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
970 && ANON_AGGRNAME_P (DECL_NAME (t)))
971 ;
972 else
973 visit (DECL_NAME (t));
974 if (DECL_FILE_SCOPE_P (t))
975 ;
976 else
977 visit (DECL_CONTEXT (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
981 {
982 visit (DECL_SIZE (t));
983 visit (DECL_SIZE_UNIT (t));
984 visit (DECL_ATTRIBUTES (t));
985 if ((code == VAR_DECL
986 || code == PARM_DECL)
987 && DECL_HAS_VALUE_EXPR_P (t))
988 visit (DECL_VALUE_EXPR (t));
989 if (code == VAR_DECL
990 && DECL_HAS_DEBUG_EXPR_P (t))
991 visit (DECL_DEBUG_EXPR (t));
992 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
993 be able to call get_symbol_initial_value. */
994 }
995
996 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
997 {
998 if (code == TYPE_DECL)
999 visit (DECL_ORIGINAL_TYPE (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1003 {
1004 if (DECL_ASSEMBLER_NAME_SET_P (t))
1005 visit (DECL_ASSEMBLER_NAME (t));
1006 }
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1009 {
1010 visit (DECL_FIELD_OFFSET (t));
1011 visit (DECL_BIT_FIELD_TYPE (t));
1012 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1013 visit (DECL_FIELD_BIT_OFFSET (t));
1014 visit (DECL_FCONTEXT (t));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1018 {
1019 visit (DECL_VINDEX (t));
1020 visit (DECL_FUNCTION_PERSONALITY (t));
1021 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
1022 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1026 {
1027 visit (TYPE_SIZE (t));
1028 visit (TYPE_SIZE_UNIT (t));
1029 visit (TYPE_ATTRIBUTES (t));
1030 visit (TYPE_NAME (t));
1031 visit (TYPE_MAIN_VARIANT (t));
1032 if (TYPE_FILE_SCOPE_P (t))
1033 ;
1034 else
1035 visit (TYPE_CONTEXT (t));
1036 visit (TYPE_STUB_DECL (t));
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1040 {
1041 if (code == ENUMERAL_TYPE)
1042 visit (TYPE_VALUES (t));
1043 else if (code == ARRAY_TYPE)
1044 visit (TYPE_DOMAIN (t));
1045 else if (RECORD_OR_UNION_TYPE_P (t))
1046 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1047 visit (f);
1048 else if (code == FUNCTION_TYPE
1049 || code == METHOD_TYPE)
1050 visit (TYPE_ARG_TYPES (t));
1051 if (!POINTER_TYPE_P (t))
1052 visit (TYPE_MINVAL (t));
1053 visit (TYPE_MAXVAL (t));
1054 if (RECORD_OR_UNION_TYPE_P (t))
1055 visit (TYPE_BINFO (t));
1056 }
1057
1058 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1059 {
1060 visit (TREE_PURPOSE (t));
1061 visit (TREE_VALUE (t));
1062 visit (TREE_CHAIN (t));
1063 }
1064
1065 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1066 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1067 visit (TREE_VEC_ELT (t, i));
1068
1069 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1070 {
1071 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1072 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1073 visit (TREE_OPERAND (t, i));
1074 }
1075
1076 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1077 {
1078 unsigned i;
1079 tree b;
1080 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1081 visit (b);
1082 visit (BINFO_OFFSET (t));
1083 visit (BINFO_VTABLE (t));
1084 visit (BINFO_VPTR_FIELD (t));
1085 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1086 visit (b);
1087 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1088 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1089 }
1090
1091 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1092 {
1093 unsigned i;
1094 tree index, value;
1095 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1097 {
1098 visit (index);
1099 visit (value);
1100 }
1101 }
1102
1103 if (code == OMP_CLAUSE)
1104 {
1105 int i;
1106 HOST_WIDE_INT val;
1107
1108 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1109 switch (OMP_CLAUSE_CODE (t))
1110 {
1111 case OMP_CLAUSE_DEFAULT:
1112 val = OMP_CLAUSE_DEFAULT_KIND (t);
1113 break;
1114 case OMP_CLAUSE_SCHEDULE:
1115 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1116 break;
1117 case OMP_CLAUSE_DEPEND:
1118 val = OMP_CLAUSE_DEPEND_KIND (t);
1119 break;
1120 case OMP_CLAUSE_MAP:
1121 val = OMP_CLAUSE_MAP_KIND (t);
1122 break;
1123 case OMP_CLAUSE_PROC_BIND:
1124 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1125 break;
1126 case OMP_CLAUSE_REDUCTION:
1127 val = OMP_CLAUSE_REDUCTION_CODE (t);
1128 break;
1129 default:
1130 val = 0;
1131 break;
1132 }
1133 hstate.add_wide_int (val);
1134 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1135 visit (OMP_CLAUSE_OPERAND (t, i));
1136 visit (OMP_CLAUSE_CHAIN (t));
1137 }
1138
1139 return hstate.end ();
1140
1141 #undef visit
1142 }
1143
1144 /* Compare two SCC entries by their hash value for qsorting them. */
1145
1146 int
1147 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1148 {
1149 const scc_entry *p1 = (const scc_entry *) p1_;
1150 const scc_entry *p2 = (const scc_entry *) p2_;
1151 if (p1->hash < p2->hash)
1152 return -1;
1153 else if (p1->hash > p2->hash)
1154 return 1;
1155 return 0;
1156 }
1157
1158 /* Return a hash value for the SCC on the SCC stack from FIRST with
1159 size SIZE. */
1160
1161 hashval_t
1162 DFS::hash_scc (struct output_block *ob,
1163 unsigned first, unsigned size)
1164 {
1165 unsigned int last_classes = 0, iterations = 0;
1166
1167 /* Compute hash values for the SCC members. */
1168 for (unsigned i = 0; i < size; ++i)
1169 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1170 sccstack[first+i].t);
1171
1172 if (size == 1)
1173 return sccstack[first].hash;
1174
1175 /* We aim to get unique hash for every tree within SCC and compute hash value
1176 of the whole SCC by combing all values together in an stable (entry point
1177 independent) order. This guarantees that the same SCC regions within
1178 different translation units will get the same hash values and therefore
1179 will be merged at WPA time.
1180
1181 Often the hashes are already unique. In that case we compute scc hash
1182 by combining individual hash values in an increasing order.
1183
1184 If thre are duplicates we seek at least one tree with unique hash (and
1185 pick one with minimal hash and this property). Then we obtain stable
1186 order by DFS walk starting from this unique tree and then use index
1187 within this order to make individual hash values unique.
1188
1189 If there is no tree with unique hash, we iteratively propagate the hash
1190 values across the internal edges of SCC. This usually quickly leads
1191 to unique hashes. Consider, for example, an SCC containing two pointers
1192 that are identical except for type they point and assume that these
1193 types are also part of the SCC.
1194 The propagation will add the points-to type information into their hash
1195 values. */
1196 do
1197 {
1198 /* Sort the SCC so we can easily see check for uniqueness. */
1199 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1200
1201 unsigned int classes = 1;
1202 int firstunique = -1;
1203
1204 /* Find tree with lowest unique hash (if it exists) and compute
1205 number of equivalence classes. */
1206 if (sccstack[first].hash != sccstack[first+1].hash)
1207 firstunique = 0;
1208 for (unsigned i = 1; i < size; ++i)
1209 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1210 {
1211 classes++;
1212 if (firstunique == -1
1213 && (i == size - 1
1214 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1215 firstunique = i;
1216 }
1217
1218 /* If we found tree with unique hash; stop the iteration. */
1219 if (firstunique != -1
1220 /* Also terminate if we run out of iterations or if the number of
1221 equivalence classes is no longer increasing.
1222 For example a cyclic list of trees that are all equivalent will
1223 never have unique entry point; we however do not build such SCCs
1224 in our IL. */
1225 || classes <= last_classes || iterations > 16)
1226 {
1227 hashval_t scc_hash;
1228
1229 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1230 starting from FIRSTUNIQUE to obstain stable order. */
1231 if (classes != size && firstunique != -1)
1232 {
1233 hash_map <tree, hashval_t> map(size*2);
1234
1235 /* Store hash values into a map, so we can associate them with
1236 reordered SCC. */
1237 for (unsigned i = 0; i < size; ++i)
1238 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1239
1240 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1241 gcc_assert (again.sccstack.length () == size);
1242
1243 memcpy (sccstack.address () + first,
1244 again.sccstack.address (),
1245 sizeof (scc_entry) * size);
1246
1247 /* Update hash values of individual members by hashing in the
1248 index within the stable order. This ensures uniqueness.
1249 Also compute the scc_hash by mixing in all hash values in the
1250 stable order we obtained. */
1251 sccstack[first].hash = *map.get (sccstack[first].t);
1252 scc_hash = sccstack[first].hash;
1253 for (unsigned i = 1; i < size; ++i)
1254 {
1255 sccstack[first+i].hash
1256 = iterative_hash_hashval_t (i,
1257 *map.get (sccstack[first+i].t));
1258 scc_hash = iterative_hash_hashval_t (scc_hash,
1259 sccstack[first+i].hash);
1260 }
1261 }
1262 /* If we got unique hash values for each tree, then sort already
1263 ensured entry point independent order. Only compute the final
1264 scc hash.
1265
1266 If we failed to find the unique entry point, we go by the same
1267 route. We will eventually introduce unwanted hash conflicts. */
1268 else
1269 {
1270 scc_hash = sccstack[first].hash;
1271 for (unsigned i = 1; i < size; ++i)
1272 scc_hash = iterative_hash_hashval_t (scc_hash,
1273 sccstack[first+i].hash);
1274 /* We can not 100% guarantee that the hash will not conflict in
1275 in a way so the unique hash is not found. This however
1276 should be extremely rare situation. ICE for now so possible
1277 issues are found and evaulated. */
1278 gcc_checking_assert (classes == size);
1279 }
1280
1281 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1282 hash into the hash of each of the elements. */
1283 for (unsigned i = 0; i < size; ++i)
1284 sccstack[first+i].hash
1285 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1286 return scc_hash;
1287 }
1288
1289 last_classes = classes;
1290 iterations++;
1291
1292 /* We failed to identify the entry point; propagate hash values across
1293 the edges. */
1294 {
1295 hash_map <tree, hashval_t> map(size*2);
1296 for (unsigned i = 0; i < size; ++i)
1297 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1298
1299 for (unsigned i = 0; i < size; i++)
1300 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1301 sccstack[first+i].t);
1302 }
1303 }
1304 while (true);
1305 }
1306
1307 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1308 already in the streamer cache. Main routine called for
1309 each visit of EXPR. */
1310
1311 void
1312 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1313 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1314 {
1315 unsigned ix;
1316
1317 /* Handle special cases. */
1318 if (expr == NULL_TREE)
1319 return;
1320
1321 /* Do not DFS walk into indexable trees. */
1322 if (this_ref_p && tree_is_indexable (expr))
1323 return;
1324
1325 /* Check if we already streamed EXPR. */
1326 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1327 return;
1328
1329 sccs **slot = &sccstate.get_or_insert (expr);
1330 sccs *cstate = *slot;
1331 if (!cstate)
1332 {
1333 scc_entry e = { expr, 0 };
1334 /* Not yet visited. DFS recurse and push it onto the stack. */
1335 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1336 sccstack.safe_push (e);
1337 cstate->dfsnum = next_dfs_num++;
1338 cstate->low = cstate->dfsnum;
1339
1340 if (streamer_handle_as_builtin_p (expr))
1341 ;
1342 else if (TREE_CODE (expr) == INTEGER_CST
1343 && !TREE_OVERFLOW (expr))
1344 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1345 else
1346 {
1347 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1348
1349 /* Walk any LTO-specific edges. */
1350 if (DECL_P (expr)
1351 && TREE_CODE (expr) != FUNCTION_DECL
1352 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1353 {
1354 /* Handle DECL_INITIAL for symbols. */
1355 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1356 expr);
1357 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1358 }
1359 }
1360
1361 /* See if we found an SCC. */
1362 if (cstate->low == cstate->dfsnum)
1363 {
1364 unsigned first, size;
1365 tree x;
1366
1367 /* If we are re-walking a single leaf-SCC just return and
1368 let the caller access the sccstack. */
1369 if (single_p)
1370 return;
1371
1372 /* Pop the SCC and compute its size. */
1373 first = sccstack.length ();
1374 do
1375 {
1376 x = sccstack[--first].t;
1377 }
1378 while (x != expr);
1379 size = sccstack.length () - first;
1380
1381 /* No need to compute hashes for LTRANS units, we don't perform
1382 any merging there. */
1383 hashval_t scc_hash = 0;
1384 unsigned scc_entry_len = 0;
1385 if (!flag_wpa)
1386 {
1387 scc_hash = hash_scc (ob, first, size);
1388
1389 /* Put the entries with the least number of collisions first. */
1390 unsigned entry_start = 0;
1391 scc_entry_len = size + 1;
1392 for (unsigned i = 0; i < size;)
1393 {
1394 unsigned from = i;
1395 for (i = i + 1; i < size
1396 && (sccstack[first + i].hash
1397 == sccstack[first + from].hash); ++i)
1398 ;
1399 if (i - from < scc_entry_len)
1400 {
1401 scc_entry_len = i - from;
1402 entry_start = from;
1403 }
1404 }
1405 for (unsigned i = 0; i < scc_entry_len; ++i)
1406 {
1407 scc_entry tem = sccstack[first + i];
1408 sccstack[first + i] = sccstack[first + entry_start + i];
1409 sccstack[first + entry_start + i] = tem;
1410 }
1411
1412 if (scc_entry_len == 1)
1413 ; /* We already sorted SCC deterministically in hash_scc. */
1414 else
1415 /* Check that we have only one SCC.
1416 Naturally we may have conflicts if hash function is not
1417 strong enough. Lets see how far this gets. */
1418 {
1419 #ifdef ENABLE_CHECKING
1420 gcc_unreachable ();
1421 #endif
1422 }
1423 }
1424
1425 /* Write LTO_tree_scc. */
1426 streamer_write_record_start (ob, LTO_tree_scc);
1427 streamer_write_uhwi (ob, size);
1428 streamer_write_uhwi (ob, scc_hash);
1429
1430 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1431 All INTEGER_CSTs need to be handled this way as we need
1432 their type to materialize them. Also builtins are handled
1433 this way.
1434 ??? We still wrap these in LTO_tree_scc so at the
1435 input side we can properly identify the tree we want
1436 to ultimatively return. */
1437 if (size == 1)
1438 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1439 else
1440 {
1441 /* Write the size of the SCC entry candidates. */
1442 streamer_write_uhwi (ob, scc_entry_len);
1443
1444 /* Write all headers and populate the streamer cache. */
1445 for (unsigned i = 0; i < size; ++i)
1446 {
1447 hashval_t hash = sccstack[first+i].hash;
1448 tree t = sccstack[first+i].t;
1449 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1450 t, hash, &ix);
1451 gcc_assert (!exists_p);
1452
1453 if (!lto_is_streamable (t))
1454 internal_error ("tree code %qs is not supported "
1455 "in LTO streams",
1456 get_tree_code_name (TREE_CODE (t)));
1457
1458 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1459
1460 /* Write the header, containing everything needed to
1461 materialize EXPR on the reading side. */
1462 streamer_write_tree_header (ob, t);
1463 }
1464
1465 /* Write the bitpacks and tree references. */
1466 for (unsigned i = 0; i < size; ++i)
1467 {
1468 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1469
1470 /* Mark the end of the tree. */
1471 streamer_write_zero (ob);
1472 }
1473 }
1474
1475 /* Finally truncate the vector. */
1476 sccstack.truncate (first);
1477
1478 if (from_state)
1479 from_state->low = MIN (from_state->low, cstate->low);
1480 return;
1481 }
1482
1483 if (from_state)
1484 from_state->low = MIN (from_state->low, cstate->low);
1485 }
1486 gcc_checking_assert (from_state);
1487 if (cstate->dfsnum < from_state->dfsnum)
1488 from_state->low = MIN (cstate->dfsnum, from_state->low);
1489 }
1490
1491
1492 /* Emit the physical representation of tree node EXPR to output block
1493 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1494 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1495
1496 void
1497 lto_output_tree (struct output_block *ob, tree expr,
1498 bool ref_p, bool this_ref_p)
1499 {
1500 unsigned ix;
1501 bool existed_p;
1502
1503 if (expr == NULL_TREE)
1504 {
1505 streamer_write_record_start (ob, LTO_null);
1506 return;
1507 }
1508
1509 if (this_ref_p && tree_is_indexable (expr))
1510 {
1511 lto_output_tree_ref (ob, expr);
1512 return;
1513 }
1514
1515 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1516 if (existed_p)
1517 {
1518 /* If a node has already been streamed out, make sure that
1519 we don't write it more than once. Otherwise, the reader
1520 will instantiate two different nodes for the same object. */
1521 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1522 streamer_write_uhwi (ob, ix);
1523 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1524 lto_tree_code_to_tag (TREE_CODE (expr)));
1525 lto_stats.num_pickle_refs_output++;
1526 }
1527 else
1528 {
1529 /* This is the first time we see EXPR, write all reachable
1530 trees to OB. */
1531 static bool in_dfs_walk;
1532
1533 /* Protect against recursion which means disconnect between
1534 what tree edges we walk in the DFS walk and what edges
1535 we stream out. */
1536 gcc_assert (!in_dfs_walk);
1537
1538 /* Start the DFS walk. */
1539 /* Save ob state ... */
1540 /* let's see ... */
1541 in_dfs_walk = true;
1542 DFS (ob, expr, ref_p, this_ref_p, false);
1543 in_dfs_walk = false;
1544
1545 /* Finally append a reference to the tree we were writing.
1546 ??? If expr ended up as a singleton we could have
1547 inlined it here and avoid outputting a reference. */
1548 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1549 gcc_assert (existed_p);
1550 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1551 streamer_write_uhwi (ob, ix);
1552 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1553 lto_tree_code_to_tag (TREE_CODE (expr)));
1554 lto_stats.num_pickle_refs_output++;
1555 }
1556 }
1557
1558
1559 /* Output to OB a list of try/catch handlers starting with FIRST. */
1560
1561 static void
1562 output_eh_try_list (struct output_block *ob, eh_catch first)
1563 {
1564 eh_catch n;
1565
1566 for (n = first; n; n = n->next_catch)
1567 {
1568 streamer_write_record_start (ob, LTO_eh_catch);
1569 stream_write_tree (ob, n->type_list, true);
1570 stream_write_tree (ob, n->filter_list, true);
1571 stream_write_tree (ob, n->label, true);
1572 }
1573
1574 streamer_write_record_start (ob, LTO_null);
1575 }
1576
1577
1578 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1579 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1580 detect EH region sharing. */
1581
1582 static void
1583 output_eh_region (struct output_block *ob, eh_region r)
1584 {
1585 enum LTO_tags tag;
1586
1587 if (r == NULL)
1588 {
1589 streamer_write_record_start (ob, LTO_null);
1590 return;
1591 }
1592
1593 if (r->type == ERT_CLEANUP)
1594 tag = LTO_ert_cleanup;
1595 else if (r->type == ERT_TRY)
1596 tag = LTO_ert_try;
1597 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1598 tag = LTO_ert_allowed_exceptions;
1599 else if (r->type == ERT_MUST_NOT_THROW)
1600 tag = LTO_ert_must_not_throw;
1601 else
1602 gcc_unreachable ();
1603
1604 streamer_write_record_start (ob, tag);
1605 streamer_write_hwi (ob, r->index);
1606
1607 if (r->outer)
1608 streamer_write_hwi (ob, r->outer->index);
1609 else
1610 streamer_write_zero (ob);
1611
1612 if (r->inner)
1613 streamer_write_hwi (ob, r->inner->index);
1614 else
1615 streamer_write_zero (ob);
1616
1617 if (r->next_peer)
1618 streamer_write_hwi (ob, r->next_peer->index);
1619 else
1620 streamer_write_zero (ob);
1621
1622 if (r->type == ERT_TRY)
1623 {
1624 output_eh_try_list (ob, r->u.eh_try.first_catch);
1625 }
1626 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1627 {
1628 stream_write_tree (ob, r->u.allowed.type_list, true);
1629 stream_write_tree (ob, r->u.allowed.label, true);
1630 streamer_write_uhwi (ob, r->u.allowed.filter);
1631 }
1632 else if (r->type == ERT_MUST_NOT_THROW)
1633 {
1634 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1635 bitpack_d bp = bitpack_create (ob->main_stream);
1636 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1637 streamer_write_bitpack (&bp);
1638 }
1639
1640 if (r->landing_pads)
1641 streamer_write_hwi (ob, r->landing_pads->index);
1642 else
1643 streamer_write_zero (ob);
1644 }
1645
1646
1647 /* Output landing pad LP to OB. */
1648
1649 static void
1650 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1651 {
1652 if (lp == NULL)
1653 {
1654 streamer_write_record_start (ob, LTO_null);
1655 return;
1656 }
1657
1658 streamer_write_record_start (ob, LTO_eh_landing_pad);
1659 streamer_write_hwi (ob, lp->index);
1660 if (lp->next_lp)
1661 streamer_write_hwi (ob, lp->next_lp->index);
1662 else
1663 streamer_write_zero (ob);
1664
1665 if (lp->region)
1666 streamer_write_hwi (ob, lp->region->index);
1667 else
1668 streamer_write_zero (ob);
1669
1670 stream_write_tree (ob, lp->post_landing_pad, true);
1671 }
1672
1673
1674 /* Output the existing eh_table to OB. */
1675
1676 static void
1677 output_eh_regions (struct output_block *ob, struct function *fn)
1678 {
1679 if (fn->eh && fn->eh->region_tree)
1680 {
1681 unsigned i;
1682 eh_region eh;
1683 eh_landing_pad lp;
1684 tree ttype;
1685
1686 streamer_write_record_start (ob, LTO_eh_table);
1687
1688 /* Emit the index of the root of the EH region tree. */
1689 streamer_write_hwi (ob, fn->eh->region_tree->index);
1690
1691 /* Emit all the EH regions in the region array. */
1692 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1693 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1694 output_eh_region (ob, eh);
1695
1696 /* Emit all landing pads. */
1697 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1698 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1699 output_eh_lp (ob, lp);
1700
1701 /* Emit all the runtime type data. */
1702 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1703 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1704 stream_write_tree (ob, ttype, true);
1705
1706 /* Emit the table of action chains. */
1707 if (targetm.arm_eabi_unwinder)
1708 {
1709 tree t;
1710 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1711 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1712 stream_write_tree (ob, t, true);
1713 }
1714 else
1715 {
1716 uchar c;
1717 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1718 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1719 streamer_write_char_stream (ob->main_stream, c);
1720 }
1721 }
1722
1723 /* The LTO_null either terminates the record or indicates that there
1724 are no eh_records at all. */
1725 streamer_write_record_start (ob, LTO_null);
1726 }
1727
1728
1729 /* Output all of the active ssa names to the ssa_names stream. */
1730
1731 static void
1732 output_ssa_names (struct output_block *ob, struct function *fn)
1733 {
1734 unsigned int i, len;
1735
1736 len = vec_safe_length (SSANAMES (fn));
1737 streamer_write_uhwi (ob, len);
1738
1739 for (i = 1; i < len; i++)
1740 {
1741 tree ptr = (*SSANAMES (fn))[i];
1742
1743 if (ptr == NULL_TREE
1744 || SSA_NAME_IN_FREE_LIST (ptr)
1745 || virtual_operand_p (ptr))
1746 continue;
1747
1748 streamer_write_uhwi (ob, i);
1749 streamer_write_char_stream (ob->main_stream,
1750 SSA_NAME_IS_DEFAULT_DEF (ptr));
1751 if (SSA_NAME_VAR (ptr))
1752 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1753 else
1754 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1755 stream_write_tree (ob, TREE_TYPE (ptr), true);
1756 }
1757
1758 streamer_write_zero (ob);
1759 }
1760
1761
1762 /* Output a wide-int. */
1763
1764 static void
1765 streamer_write_wi (struct output_block *ob,
1766 const widest_int &w)
1767 {
1768 int len = w.get_len ();
1769
1770 streamer_write_uhwi (ob, w.get_precision ());
1771 streamer_write_uhwi (ob, len);
1772 for (int i = 0; i < len; i++)
1773 streamer_write_hwi (ob, w.elt (i));
1774 }
1775
1776
1777 /* Output the cfg. */
1778
1779 static void
1780 output_cfg (struct output_block *ob, struct function *fn)
1781 {
1782 struct lto_output_stream *tmp_stream = ob->main_stream;
1783 basic_block bb;
1784
1785 ob->main_stream = ob->cfg_stream;
1786
1787 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1788 profile_status_for_fn (fn));
1789
1790 /* Output the number of the highest basic block. */
1791 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1792
1793 FOR_ALL_BB_FN (bb, fn)
1794 {
1795 edge_iterator ei;
1796 edge e;
1797
1798 streamer_write_hwi (ob, bb->index);
1799
1800 /* Output the successors and the edge flags. */
1801 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1802 FOR_EACH_EDGE (e, ei, bb->succs)
1803 {
1804 streamer_write_uhwi (ob, e->dest->index);
1805 streamer_write_hwi (ob, e->probability);
1806 streamer_write_gcov_count (ob, e->count);
1807 streamer_write_uhwi (ob, e->flags);
1808 }
1809 }
1810
1811 streamer_write_hwi (ob, -1);
1812
1813 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1814 while (bb->next_bb)
1815 {
1816 streamer_write_hwi (ob, bb->next_bb->index);
1817 bb = bb->next_bb;
1818 }
1819
1820 streamer_write_hwi (ob, -1);
1821
1822 /* ??? The cfgloop interface is tied to cfun. */
1823 gcc_assert (cfun == fn);
1824
1825 /* Output the number of loops. */
1826 streamer_write_uhwi (ob, number_of_loops (fn));
1827
1828 /* Output each loop, skipping the tree root which has number zero. */
1829 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1830 {
1831 struct loop *loop = get_loop (fn, i);
1832
1833 /* Write the index of the loop header. That's enough to rebuild
1834 the loop tree on the reader side. Stream -1 for an unused
1835 loop entry. */
1836 if (!loop)
1837 {
1838 streamer_write_hwi (ob, -1);
1839 continue;
1840 }
1841 else
1842 streamer_write_hwi (ob, loop->header->index);
1843
1844 /* Write everything copy_loop_info copies. */
1845 streamer_write_enum (ob->main_stream,
1846 loop_estimation, EST_LAST, loop->estimate_state);
1847 streamer_write_hwi (ob, loop->any_upper_bound);
1848 if (loop->any_upper_bound)
1849 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1850 streamer_write_hwi (ob, loop->any_estimate);
1851 if (loop->any_estimate)
1852 streamer_write_wi (ob, loop->nb_iterations_estimate);
1853
1854 /* Write OMP SIMD related info. */
1855 streamer_write_hwi (ob, loop->safelen);
1856 streamer_write_hwi (ob, loop->dont_vectorize);
1857 streamer_write_hwi (ob, loop->force_vectorize);
1858 stream_write_tree (ob, loop->simduid, true);
1859 }
1860
1861 ob->main_stream = tmp_stream;
1862 }
1863
1864
1865 /* Create the header in the file using OB. If the section type is for
1866 a function, set FN to the decl for that function. */
1867
1868 void
1869 produce_asm (struct output_block *ob, tree fn)
1870 {
1871 enum lto_section_type section_type = ob->section_type;
1872 struct lto_function_header header;
1873 char *section_name;
1874
1875 if (section_type == LTO_section_function_body)
1876 {
1877 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1878 section_name = lto_get_section_name (section_type, name, NULL);
1879 }
1880 else
1881 section_name = lto_get_section_name (section_type, NULL, NULL);
1882
1883 lto_begin_section (section_name, !flag_wpa);
1884 free (section_name);
1885
1886 /* The entire header is stream computed here. */
1887 memset (&header, 0, sizeof (struct lto_function_header));
1888
1889 /* Write the header. */
1890 header.major_version = LTO_major_version;
1891 header.minor_version = LTO_minor_version;
1892
1893 if (section_type == LTO_section_function_body)
1894 header.cfg_size = ob->cfg_stream->total_size;
1895 header.main_size = ob->main_stream->total_size;
1896 header.string_size = ob->string_stream->total_size;
1897 lto_write_data (&header, sizeof header);
1898
1899 /* Put all of the gimple and the string table out the asm file as a
1900 block of text. */
1901 if (section_type == LTO_section_function_body)
1902 lto_write_stream (ob->cfg_stream);
1903 lto_write_stream (ob->main_stream);
1904 lto_write_stream (ob->string_stream);
1905
1906 lto_end_section ();
1907 }
1908
1909
1910 /* Output the base body of struct function FN using output block OB. */
1911
1912 static void
1913 output_struct_function_base (struct output_block *ob, struct function *fn)
1914 {
1915 struct bitpack_d bp;
1916 unsigned i;
1917 tree t;
1918
1919 /* Output the static chain and non-local goto save area. */
1920 stream_write_tree (ob, fn->static_chain_decl, true);
1921 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1922
1923 /* Output all the local variables in the function. */
1924 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1925 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1926 stream_write_tree (ob, t, true);
1927
1928 /* Output current IL state of the function. */
1929 streamer_write_uhwi (ob, fn->curr_properties);
1930
1931 /* Write all the attributes for FN. */
1932 bp = bitpack_create (ob->main_stream);
1933 bp_pack_value (&bp, fn->is_thunk, 1);
1934 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1935 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1936 bp_pack_value (&bp, fn->returns_struct, 1);
1937 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1938 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1939 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1940 bp_pack_value (&bp, fn->after_inlining, 1);
1941 bp_pack_value (&bp, fn->stdarg, 1);
1942 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1943 bp_pack_value (&bp, fn->calls_alloca, 1);
1944 bp_pack_value (&bp, fn->calls_setjmp, 1);
1945 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1946 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1947 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1948 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1949
1950 /* Output the function start and end loci. */
1951 stream_output_location (ob, &bp, fn->function_start_locus);
1952 stream_output_location (ob, &bp, fn->function_end_locus);
1953
1954 streamer_write_bitpack (&bp);
1955 }
1956
1957
1958 /* Output the body of function NODE->DECL. */
1959
1960 static void
1961 output_function (struct cgraph_node *node)
1962 {
1963 tree function;
1964 struct function *fn;
1965 basic_block bb;
1966 struct output_block *ob;
1967
1968 function = node->decl;
1969 fn = DECL_STRUCT_FUNCTION (function);
1970 ob = create_output_block (LTO_section_function_body);
1971
1972 clear_line_info (ob);
1973 ob->symbol = node;
1974
1975 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1976
1977 /* Set current_function_decl and cfun. */
1978 push_cfun (fn);
1979
1980 /* Make string 0 be a NULL string. */
1981 streamer_write_char_stream (ob->string_stream, 0);
1982
1983 streamer_write_record_start (ob, LTO_function);
1984
1985 /* Output decls for parameters and args. */
1986 stream_write_tree (ob, DECL_RESULT (function), true);
1987 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1988
1989 /* Output DECL_INITIAL for the function, which contains the tree of
1990 lexical scopes. */
1991 stream_write_tree (ob, DECL_INITIAL (function), true);
1992
1993 /* We also stream abstract functions where we stream only stuff needed for
1994 debug info. */
1995 if (gimple_has_body_p (function))
1996 {
1997 streamer_write_uhwi (ob, 1);
1998 output_struct_function_base (ob, fn);
1999
2000 /* Output all the SSA names used in the function. */
2001 output_ssa_names (ob, fn);
2002
2003 /* Output any exception handling regions. */
2004 output_eh_regions (ob, fn);
2005
2006
2007 /* We will renumber the statements. The code that does this uses
2008 the same ordering that we use for serializing them so we can use
2009 the same code on the other end and not have to write out the
2010 statement numbers. We do not assign UIDs to PHIs here because
2011 virtual PHIs get re-computed on-the-fly which would make numbers
2012 inconsistent. */
2013 set_gimple_stmt_max_uid (cfun, 0);
2014 FOR_ALL_BB_FN (bb, cfun)
2015 {
2016 gimple_stmt_iterator gsi;
2017 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2018 {
2019 gimple stmt = gsi_stmt (gsi);
2020
2021 /* Virtual PHIs are not going to be streamed. */
2022 if (!virtual_operand_p (gimple_phi_result (stmt)))
2023 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2024 }
2025 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2026 {
2027 gimple stmt = gsi_stmt (gsi);
2028 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2029 }
2030 }
2031 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2032 virtual phis now. */
2033 FOR_ALL_BB_FN (bb, cfun)
2034 {
2035 gimple_stmt_iterator gsi;
2036 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2037 {
2038 gimple stmt = gsi_stmt (gsi);
2039 if (virtual_operand_p (gimple_phi_result (stmt)))
2040 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2041 }
2042 }
2043
2044 /* Output the code for the function. */
2045 FOR_ALL_BB_FN (bb, fn)
2046 output_bb (ob, bb, fn);
2047
2048 /* The terminator for this function. */
2049 streamer_write_record_start (ob, LTO_null);
2050
2051 output_cfg (ob, fn);
2052
2053 pop_cfun ();
2054 }
2055 else
2056 streamer_write_uhwi (ob, 0);
2057
2058 /* Create a section to hold the pickled output of this function. */
2059 produce_asm (ob, function);
2060
2061 destroy_output_block (ob);
2062 }
2063
2064 /* Output the body of function NODE->DECL. */
2065
2066 static void
2067 output_constructor (struct varpool_node *node)
2068 {
2069 tree var = node->decl;
2070 struct output_block *ob;
2071
2072 ob = create_output_block (LTO_section_function_body);
2073
2074 clear_line_info (ob);
2075 ob->symbol = node;
2076
2077 /* Make string 0 be a NULL string. */
2078 streamer_write_char_stream (ob->string_stream, 0);
2079
2080 /* Output DECL_INITIAL for the function, which contains the tree of
2081 lexical scopes. */
2082 stream_write_tree (ob, DECL_INITIAL (var), true);
2083
2084 /* Create a section to hold the pickled output of this function. */
2085 produce_asm (ob, var);
2086
2087 destroy_output_block (ob);
2088 }
2089
2090
2091 /* Emit toplevel asms. */
2092
2093 void
2094 lto_output_toplevel_asms (void)
2095 {
2096 struct output_block *ob;
2097 struct asm_node *can;
2098 char *section_name;
2099 struct lto_simple_header_with_strings header;
2100
2101 if (!symtab->first_asm_symbol ())
2102 return;
2103
2104 ob = create_output_block (LTO_section_asm);
2105
2106 /* Make string 0 be a NULL string. */
2107 streamer_write_char_stream (ob->string_stream, 0);
2108
2109 for (can = symtab->first_asm_symbol (); can; can = can->next)
2110 {
2111 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2112 streamer_write_hwi (ob, can->order);
2113 }
2114
2115 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2116
2117 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2118 lto_begin_section (section_name, !flag_wpa);
2119 free (section_name);
2120
2121 /* The entire header stream is computed here. */
2122 memset (&header, 0, sizeof (header));
2123
2124 /* Write the header. */
2125 header.major_version = LTO_major_version;
2126 header.minor_version = LTO_minor_version;
2127
2128 header.main_size = ob->main_stream->total_size;
2129 header.string_size = ob->string_stream->total_size;
2130 lto_write_data (&header, sizeof header);
2131
2132 /* Put all of the gimple and the string table out the asm file as a
2133 block of text. */
2134 lto_write_stream (ob->main_stream);
2135 lto_write_stream (ob->string_stream);
2136
2137 lto_end_section ();
2138
2139 destroy_output_block (ob);
2140 }
2141
2142
2143 /* Copy the function body or variable constructor of NODE without deserializing. */
2144
2145 static void
2146 copy_function_or_variable (struct symtab_node *node)
2147 {
2148 tree function = node->decl;
2149 struct lto_file_decl_data *file_data = node->lto_file_data;
2150 const char *data;
2151 size_t len;
2152 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2153 char *section_name =
2154 lto_get_section_name (LTO_section_function_body, name, NULL);
2155 size_t i, j;
2156 struct lto_in_decl_state *in_state;
2157 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2158
2159 lto_begin_section (section_name, !flag_wpa);
2160 free (section_name);
2161
2162 /* We may have renamed the declaration, e.g., a static function. */
2163 name = lto_get_decl_name_mapping (file_data, name);
2164
2165 data = lto_get_section_data (file_data, LTO_section_function_body,
2166 name, &len);
2167 gcc_assert (data);
2168
2169 /* Do a bit copy of the function body. */
2170 lto_write_data (data, len);
2171
2172 /* Copy decls. */
2173 in_state =
2174 lto_get_function_in_decl_state (node->lto_file_data, function);
2175 gcc_assert (in_state);
2176
2177 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2178 {
2179 size_t n = in_state->streams[i].size;
2180 tree *trees = in_state->streams[i].trees;
2181 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2182
2183 /* The out state must have the same indices and the in state.
2184 So just copy the vector. All the encoders in the in state
2185 must be empty where we reach here. */
2186 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2187 encoder->trees.reserve_exact (n);
2188 for (j = 0; j < n; j++)
2189 encoder->trees.safe_push (trees[j]);
2190 }
2191
2192 lto_free_section_data (file_data, LTO_section_function_body, name,
2193 data, len);
2194 lto_end_section ();
2195 }
2196
2197 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2198
2199 static tree
2200 wrap_refs (tree *tp, int *ws, void *)
2201 {
2202 tree t = *tp;
2203 if (handled_component_p (t)
2204 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2205 {
2206 tree decl = TREE_OPERAND (t, 0);
2207 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2208 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2209 build1 (ADDR_EXPR, ptrtype, decl),
2210 build_int_cst (ptrtype, 0));
2211 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2212 *ws = 0;
2213 }
2214 else if (TREE_CODE (t) == CONSTRUCTOR)
2215 ;
2216 else if (!EXPR_P (t))
2217 *ws = 0;
2218 return NULL_TREE;
2219 }
2220
2221 /* Main entry point from the pass manager. */
2222
2223 void
2224 lto_output (void)
2225 {
2226 struct lto_out_decl_state *decl_state;
2227 #ifdef ENABLE_CHECKING
2228 bitmap output = lto_bitmap_alloc ();
2229 #endif
2230 int i, n_nodes;
2231 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2232
2233 /* Initialize the streamer. */
2234 lto_streamer_init ();
2235
2236 n_nodes = lto_symtab_encoder_size (encoder);
2237 /* Process only the functions with bodies. */
2238 for (i = 0; i < n_nodes; i++)
2239 {
2240 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2241 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2242 {
2243 if (lto_symtab_encoder_encode_body_p (encoder, node)
2244 && !node->alias)
2245 {
2246 #ifdef ENABLE_CHECKING
2247 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2248 bitmap_set_bit (output, DECL_UID (node->decl));
2249 #endif
2250 decl_state = lto_new_out_decl_state ();
2251 lto_push_out_decl_state (decl_state);
2252 if (gimple_has_body_p (node->decl) || !flag_wpa)
2253 output_function (node);
2254 else
2255 copy_function_or_variable (node);
2256 gcc_assert (lto_get_out_decl_state () == decl_state);
2257 lto_pop_out_decl_state ();
2258 lto_record_function_out_decl_state (node->decl, decl_state);
2259 }
2260 }
2261 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2262 {
2263 /* Wrap symbol references inside the ctor in a type
2264 preserving MEM_REF. */
2265 tree ctor = DECL_INITIAL (node->decl);
2266 if (ctor && !in_lto_p)
2267 walk_tree (&ctor, wrap_refs, NULL, NULL);
2268 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2269 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2270 && !node->alias)
2271 {
2272 timevar_push (TV_IPA_LTO_CTORS_OUT);
2273 #ifdef ENABLE_CHECKING
2274 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2275 bitmap_set_bit (output, DECL_UID (node->decl));
2276 #endif
2277 decl_state = lto_new_out_decl_state ();
2278 lto_push_out_decl_state (decl_state);
2279 if (DECL_INITIAL (node->decl) != error_mark_node
2280 || !flag_wpa)
2281 output_constructor (node);
2282 else
2283 copy_function_or_variable (node);
2284 gcc_assert (lto_get_out_decl_state () == decl_state);
2285 lto_pop_out_decl_state ();
2286 lto_record_function_out_decl_state (node->decl, decl_state);
2287 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2288 }
2289 }
2290 }
2291
2292 /* Emit the callgraph after emitting function bodies. This needs to
2293 be done now to make sure that all the statements in every function
2294 have been renumbered so that edges can be associated with call
2295 statements using the statement UIDs. */
2296 output_symtab ();
2297
2298 #ifdef ENABLE_CHECKING
2299 lto_bitmap_free (output);
2300 #endif
2301 }
2302
2303 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2304 from it and required for correct representation of its semantics.
2305 Each node in ENCODER must be a global declaration or a type. A node
2306 is written only once, even if it appears multiple times in the
2307 vector. Certain transitively-reachable nodes, such as those
2308 representing expressions, may be duplicated, but such nodes
2309 must not appear in ENCODER itself. */
2310
2311 static void
2312 write_global_stream (struct output_block *ob,
2313 struct lto_tree_ref_encoder *encoder)
2314 {
2315 tree t;
2316 size_t index;
2317 const size_t size = lto_tree_ref_encoder_size (encoder);
2318
2319 for (index = 0; index < size; index++)
2320 {
2321 t = lto_tree_ref_encoder_get_tree (encoder, index);
2322 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2323 stream_write_tree (ob, t, false);
2324 }
2325 }
2326
2327
2328 /* Write a sequence of indices into the globals vector corresponding
2329 to the trees in ENCODER. These are used by the reader to map the
2330 indices used to refer to global entities within function bodies to
2331 their referents. */
2332
2333 static void
2334 write_global_references (struct output_block *ob,
2335 struct lto_tree_ref_encoder *encoder)
2336 {
2337 tree t;
2338 uint32_t index;
2339 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2340
2341 /* Write size and slot indexes as 32-bit unsigned numbers. */
2342 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2343 data[0] = size;
2344
2345 for (index = 0; index < size; index++)
2346 {
2347 uint32_t slot_num;
2348
2349 t = lto_tree_ref_encoder_get_tree (encoder, index);
2350 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2351 gcc_assert (slot_num != (unsigned)-1);
2352 data[index + 1] = slot_num;
2353 }
2354
2355 lto_write_data (data, sizeof (int32_t) * (size + 1));
2356 free (data);
2357 }
2358
2359
2360 /* Write all the streams in an lto_out_decl_state STATE using
2361 output block OB and output stream OUT_STREAM. */
2362
2363 void
2364 lto_output_decl_state_streams (struct output_block *ob,
2365 struct lto_out_decl_state *state)
2366 {
2367 int i;
2368
2369 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2370 write_global_stream (ob, &state->streams[i]);
2371 }
2372
2373
2374 /* Write all the references in an lto_out_decl_state STATE using
2375 output block OB and output stream OUT_STREAM. */
2376
2377 void
2378 lto_output_decl_state_refs (struct output_block *ob,
2379 struct lto_out_decl_state *state)
2380 {
2381 unsigned i;
2382 uint32_t ref;
2383 tree decl;
2384
2385 /* Write reference to FUNCTION_DECL. If there is not function,
2386 write reference to void_type_node. */
2387 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2388 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2389 gcc_assert (ref != (unsigned)-1);
2390 lto_write_data (&ref, sizeof (uint32_t));
2391
2392 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2393 write_global_references (ob, &state->streams[i]);
2394 }
2395
2396
2397 /* Return the written size of STATE. */
2398
2399 static size_t
2400 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2401 {
2402 int i;
2403 size_t size;
2404
2405 size = sizeof (int32_t); /* fn_ref. */
2406 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2407 {
2408 size += sizeof (int32_t); /* vector size. */
2409 size += (lto_tree_ref_encoder_size (&state->streams[i])
2410 * sizeof (int32_t));
2411 }
2412 return size;
2413 }
2414
2415
2416 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2417 so far. */
2418
2419 static void
2420 write_symbol (struct streamer_tree_cache_d *cache,
2421 tree t, hash_set<const char *> *seen, bool alias)
2422 {
2423 const char *name;
2424 enum gcc_plugin_symbol_kind kind;
2425 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2426 unsigned slot_num;
2427 uint64_t size;
2428 const char *comdat;
2429 unsigned char c;
2430
2431 /* None of the following kinds of symbols are needed in the
2432 symbol table. */
2433 if (!TREE_PUBLIC (t)
2434 || is_builtin_fn (t)
2435 || DECL_ABSTRACT (t)
2436 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2437 return;
2438 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2439
2440 gcc_assert (TREE_CODE (t) == VAR_DECL
2441 || TREE_CODE (t) == FUNCTION_DECL);
2442
2443 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2444
2445 /* This behaves like assemble_name_raw in varasm.c, performing the
2446 same name manipulations that ASM_OUTPUT_LABELREF does. */
2447 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2448
2449 if (seen->add (name))
2450 return;
2451
2452 streamer_tree_cache_lookup (cache, t, &slot_num);
2453 gcc_assert (slot_num != (unsigned)-1);
2454
2455 if (DECL_EXTERNAL (t))
2456 {
2457 if (DECL_WEAK (t))
2458 kind = GCCPK_WEAKUNDEF;
2459 else
2460 kind = GCCPK_UNDEF;
2461 }
2462 else
2463 {
2464 if (DECL_WEAK (t))
2465 kind = GCCPK_WEAKDEF;
2466 else if (DECL_COMMON (t))
2467 kind = GCCPK_COMMON;
2468 else
2469 kind = GCCPK_DEF;
2470
2471 /* When something is defined, it should have node attached. */
2472 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2473 || varpool_node::get (t)->definition);
2474 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2475 || (cgraph_node::get (t)
2476 && cgraph_node::get (t)->definition));
2477 }
2478
2479 /* Imitate what default_elf_asm_output_external do.
2480 When symbol is external, we need to output it with DEFAULT visibility
2481 when compiling with -fvisibility=default, while with HIDDEN visibility
2482 when symbol has attribute (visibility("hidden")) specified.
2483 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2484 right. */
2485
2486 if (DECL_EXTERNAL (t)
2487 && !targetm.binds_local_p (t))
2488 visibility = GCCPV_DEFAULT;
2489 else
2490 switch (DECL_VISIBILITY (t))
2491 {
2492 case VISIBILITY_DEFAULT:
2493 visibility = GCCPV_DEFAULT;
2494 break;
2495 case VISIBILITY_PROTECTED:
2496 visibility = GCCPV_PROTECTED;
2497 break;
2498 case VISIBILITY_HIDDEN:
2499 visibility = GCCPV_HIDDEN;
2500 break;
2501 case VISIBILITY_INTERNAL:
2502 visibility = GCCPV_INTERNAL;
2503 break;
2504 }
2505
2506 if (kind == GCCPK_COMMON
2507 && DECL_SIZE_UNIT (t)
2508 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2509 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2510 else
2511 size = 0;
2512
2513 if (DECL_ONE_ONLY (t))
2514 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2515 else
2516 comdat = "";
2517
2518 lto_write_data (name, strlen (name) + 1);
2519 lto_write_data (comdat, strlen (comdat) + 1);
2520 c = (unsigned char) kind;
2521 lto_write_data (&c, 1);
2522 c = (unsigned char) visibility;
2523 lto_write_data (&c, 1);
2524 lto_write_data (&size, 8);
2525 lto_write_data (&slot_num, 4);
2526 }
2527
2528 /* Return true if NODE should appear in the plugin symbol table. */
2529
2530 bool
2531 output_symbol_p (symtab_node *node)
2532 {
2533 struct cgraph_node *cnode;
2534 if (!node->real_symbol_p ())
2535 return false;
2536 /* We keep external functions in symtab for sake of inlining
2537 and devirtualization. We do not want to see them in symbol table as
2538 references unless they are really used. */
2539 cnode = dyn_cast <cgraph_node *> (node);
2540 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2541 && cnode->callers)
2542 return true;
2543
2544 /* Ignore all references from external vars initializers - they are not really
2545 part of the compilation unit until they are used by folding. Some symbols,
2546 like references to external construction vtables can not be referred to at all.
2547 We decide this at can_refer_decl_in_current_unit_p. */
2548 if (!node->definition || DECL_EXTERNAL (node->decl))
2549 {
2550 int i;
2551 struct ipa_ref *ref;
2552 for (i = 0; node->iterate_referring (i, ref); i++)
2553 {
2554 if (ref->use == IPA_REF_ALIAS)
2555 continue;
2556 if (is_a <cgraph_node *> (ref->referring))
2557 return true;
2558 if (!DECL_EXTERNAL (ref->referring->decl))
2559 return true;
2560 }
2561 return false;
2562 }
2563 return true;
2564 }
2565
2566
2567 /* Write an IL symbol table to OB.
2568 SET and VSET are cgraph/varpool node sets we are outputting. */
2569
2570 static void
2571 produce_symtab (struct output_block *ob)
2572 {
2573 struct streamer_tree_cache_d *cache = ob->writer_cache;
2574 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2575 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2576 lto_symtab_encoder_iterator lsei;
2577
2578 lto_begin_section (section_name, false);
2579 free (section_name);
2580
2581 hash_set<const char *> seen;
2582
2583 /* Write the symbol table.
2584 First write everything defined and then all declarations.
2585 This is necessary to handle cases where we have duplicated symbols. */
2586 for (lsei = lsei_start (encoder);
2587 !lsei_end_p (lsei); lsei_next (&lsei))
2588 {
2589 symtab_node *node = lsei_node (lsei);
2590
2591 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2592 continue;
2593 write_symbol (cache, node->decl, &seen, false);
2594 }
2595 for (lsei = lsei_start (encoder);
2596 !lsei_end_p (lsei); lsei_next (&lsei))
2597 {
2598 symtab_node *node = lsei_node (lsei);
2599
2600 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2601 continue;
2602 write_symbol (cache, node->decl, &seen, false);
2603 }
2604
2605 lto_end_section ();
2606 }
2607
2608
2609 /* This pass is run after all of the functions are serialized and all
2610 of the IPA passes have written their serialized forms. This pass
2611 causes the vector of all of the global decls and types used from
2612 this file to be written in to a section that can then be read in to
2613 recover these on other side. */
2614
2615 void
2616 produce_asm_for_decls (void)
2617 {
2618 struct lto_out_decl_state *out_state;
2619 struct lto_out_decl_state *fn_out_state;
2620 struct lto_decl_header header;
2621 char *section_name;
2622 struct output_block *ob;
2623 unsigned idx, num_fns;
2624 size_t decl_state_size;
2625 int32_t num_decl_states;
2626
2627 ob = create_output_block (LTO_section_decls);
2628
2629 memset (&header, 0, sizeof (struct lto_decl_header));
2630
2631 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2632 lto_begin_section (section_name, !flag_wpa);
2633 free (section_name);
2634
2635 /* Make string 0 be a NULL string. */
2636 streamer_write_char_stream (ob->string_stream, 0);
2637
2638 gcc_assert (!alias_pairs);
2639
2640 /* Get rid of the global decl state hash tables to save some memory. */
2641 out_state = lto_get_out_decl_state ();
2642 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2643 if (out_state->streams[i].tree_hash_table)
2644 {
2645 delete out_state->streams[i].tree_hash_table;
2646 out_state->streams[i].tree_hash_table = NULL;
2647 }
2648
2649 /* Write the global symbols. */
2650 lto_output_decl_state_streams (ob, out_state);
2651 num_fns = lto_function_decl_states.length ();
2652 for (idx = 0; idx < num_fns; idx++)
2653 {
2654 fn_out_state =
2655 lto_function_decl_states[idx];
2656 lto_output_decl_state_streams (ob, fn_out_state);
2657 }
2658
2659 header.major_version = LTO_major_version;
2660 header.minor_version = LTO_minor_version;
2661
2662 /* Currently not used. This field would allow us to preallocate
2663 the globals vector, so that it need not be resized as it is extended. */
2664 header.num_nodes = -1;
2665
2666 /* Compute the total size of all decl out states. */
2667 decl_state_size = sizeof (int32_t);
2668 decl_state_size += lto_out_decl_state_written_size (out_state);
2669 for (idx = 0; idx < num_fns; idx++)
2670 {
2671 fn_out_state =
2672 lto_function_decl_states[idx];
2673 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2674 }
2675 header.decl_state_size = decl_state_size;
2676
2677 header.main_size = ob->main_stream->total_size;
2678 header.string_size = ob->string_stream->total_size;
2679
2680 lto_write_data (&header, sizeof header);
2681
2682 /* Write the main out-decl state, followed by out-decl states of
2683 functions. */
2684 num_decl_states = num_fns + 1;
2685 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2686 lto_output_decl_state_refs (ob, out_state);
2687 for (idx = 0; idx < num_fns; idx++)
2688 {
2689 fn_out_state = lto_function_decl_states[idx];
2690 lto_output_decl_state_refs (ob, fn_out_state);
2691 }
2692
2693 lto_write_stream (ob->main_stream);
2694 lto_write_stream (ob->string_stream);
2695
2696 lto_end_section ();
2697
2698 /* Write the symbol table. It is used by linker to determine dependencies
2699 and thus we can skip it for WPA. */
2700 if (!flag_wpa)
2701 produce_symtab (ob);
2702
2703 /* Write command line opts. */
2704 lto_write_options ();
2705
2706 /* Deallocate memory and clean up. */
2707 for (idx = 0; idx < num_fns; idx++)
2708 {
2709 fn_out_state =
2710 lto_function_decl_states[idx];
2711 lto_delete_out_decl_state (fn_out_state);
2712 }
2713 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2714 lto_function_decl_states.release ();
2715 destroy_output_block (ob);
2716 }