genattrtab.c (write_header): Include hash-set.h...
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "stringpool.h"
40 #include "expr.h"
41 #include "flags.h"
42 #include "params.h"
43 #include "input.h"
44 #include "predict.h"
45 #include "hard-reg-set.h"
46 #include "function.h"
47 #include "dominance.h"
48 #include "cfg.h"
49 #include "basic-block.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-pass.h"
59 #include "diagnostic-core.h"
60 #include "except.h"
61 #include "lto-symtab.h"
62 #include "hash-map.h"
63 #include "plugin-api.h"
64 #include "ipa-ref.h"
65 #include "cgraph.h"
66 #include "lto-streamer.h"
67 #include "data-streamer.h"
68 #include "gimple-streamer.h"
69 #include "tree-streamer.h"
70 #include "streamer-hooks.h"
71 #include "cfgloop.h"
72 #include "builtins.h"
73
74
75 static void lto_write_tree (struct output_block*, tree, bool);
76
77 /* Clear the line info stored in DATA_IN. */
78
79 static void
80 clear_line_info (struct output_block *ob)
81 {
82 ob->current_file = NULL;
83 ob->current_line = 0;
84 ob->current_col = 0;
85 }
86
87
88 /* Create the output block and return it. SECTION_TYPE is
89 LTO_section_function_body or LTO_static_initializer. */
90
91 struct output_block *
92 create_output_block (enum lto_section_type section_type)
93 {
94 struct output_block *ob = XCNEW (struct output_block);
95
96 ob->section_type = section_type;
97 ob->decl_state = lto_get_out_decl_state ();
98 ob->main_stream = XCNEW (struct lto_output_stream);
99 ob->string_stream = XCNEW (struct lto_output_stream);
100 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
101
102 if (section_type == LTO_section_function_body)
103 ob->cfg_stream = XCNEW (struct lto_output_stream);
104
105 clear_line_info (ob);
106
107 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
108 gcc_obstack_init (&ob->obstack);
109
110 return ob;
111 }
112
113
114 /* Destroy the output block OB. */
115
116 void
117 destroy_output_block (struct output_block *ob)
118 {
119 enum lto_section_type section_type = ob->section_type;
120
121 delete ob->string_hash_table;
122 ob->string_hash_table = NULL;
123
124 free (ob->main_stream);
125 free (ob->string_stream);
126 if (section_type == LTO_section_function_body)
127 free (ob->cfg_stream);
128
129 streamer_tree_cache_delete (ob->writer_cache);
130 obstack_free (&ob->obstack, NULL);
131
132 free (ob);
133 }
134
135
136 /* Look up NODE in the type table and write the index for it to OB. */
137
138 static void
139 output_type_ref (struct output_block *ob, tree node)
140 {
141 streamer_write_record_start (ob, LTO_type_ref);
142 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
143 }
144
145
146 /* Return true if tree node T is written to various tables. For these
147 nodes, we sometimes want to write their phyiscal representation
148 (via lto_output_tree), and sometimes we need to emit an index
149 reference into a table (via lto_output_tree_ref). */
150
151 static bool
152 tree_is_indexable (tree t)
153 {
154 /* Parameters and return values of functions of variably modified types
155 must go to global stream, because they may be used in the type
156 definition. */
157 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
158 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
159 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
160 else if (TREE_CODE (t) == IMPORTED_DECL)
161 return false;
162 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
163 || TREE_CODE (t) == TYPE_DECL
164 || TREE_CODE (t) == CONST_DECL
165 || TREE_CODE (t) == NAMELIST_DECL)
166 && decl_function_context (t))
167 return false;
168 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
169 return false;
170 /* Variably modified types need to be streamed alongside function
171 bodies because they can refer to local entities. Together with
172 them we have to localize their members as well.
173 ??? In theory that includes non-FIELD_DECLs as well. */
174 else if (TYPE_P (t)
175 && variably_modified_type_p (t, NULL_TREE))
176 return false;
177 else if (TREE_CODE (t) == FIELD_DECL
178 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
179 return false;
180 else
181 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
182 }
183
184
185 /* Output info about new location into bitpack BP.
186 After outputting bitpack, lto_output_location_data has
187 to be done to output actual data. */
188
189 void
190 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
191 location_t loc)
192 {
193 expanded_location xloc;
194
195 loc = LOCATION_LOCUS (loc);
196 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
197 if (loc == UNKNOWN_LOCATION)
198 return;
199
200 xloc = expand_location (loc);
201
202 bp_pack_value (bp, ob->current_file != xloc.file, 1);
203 bp_pack_value (bp, ob->current_line != xloc.line, 1);
204 bp_pack_value (bp, ob->current_col != xloc.column, 1);
205
206 if (ob->current_file != xloc.file)
207 bp_pack_string (ob, bp, xloc.file, true);
208 ob->current_file = xloc.file;
209
210 if (ob->current_line != xloc.line)
211 bp_pack_var_len_unsigned (bp, xloc.line);
212 ob->current_line = xloc.line;
213
214 if (ob->current_col != xloc.column)
215 bp_pack_var_len_unsigned (bp, xloc.column);
216 ob->current_col = xloc.column;
217 }
218
219
220 /* If EXPR is an indexable tree node, output a reference to it to
221 output block OB. Otherwise, output the physical representation of
222 EXPR to OB. */
223
224 static void
225 lto_output_tree_ref (struct output_block *ob, tree expr)
226 {
227 enum tree_code code;
228
229 if (TYPE_P (expr))
230 {
231 output_type_ref (ob, expr);
232 return;
233 }
234
235 code = TREE_CODE (expr);
236 switch (code)
237 {
238 case SSA_NAME:
239 streamer_write_record_start (ob, LTO_ssa_name_ref);
240 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
241 break;
242
243 case FIELD_DECL:
244 streamer_write_record_start (ob, LTO_field_decl_ref);
245 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case FUNCTION_DECL:
249 streamer_write_record_start (ob, LTO_function_decl_ref);
250 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case VAR_DECL:
254 case DEBUG_EXPR_DECL:
255 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
256 case PARM_DECL:
257 streamer_write_record_start (ob, LTO_global_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
260
261 case CONST_DECL:
262 streamer_write_record_start (ob, LTO_const_decl_ref);
263 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
265
266 case IMPORTED_DECL:
267 gcc_assert (decl_function_context (expr) == NULL);
268 streamer_write_record_start (ob, LTO_imported_decl_ref);
269 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
271
272 case TYPE_DECL:
273 streamer_write_record_start (ob, LTO_type_decl_ref);
274 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case NAMELIST_DECL:
278 streamer_write_record_start (ob, LTO_namelist_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case NAMESPACE_DECL:
283 streamer_write_record_start (ob, LTO_namespace_decl_ref);
284 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 case LABEL_DECL:
288 streamer_write_record_start (ob, LTO_label_decl_ref);
289 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
290 break;
291
292 case RESULT_DECL:
293 streamer_write_record_start (ob, LTO_result_decl_ref);
294 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
295 break;
296
297 case TRANSLATION_UNIT_DECL:
298 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
299 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
300 break;
301
302 default:
303 /* No other node is indexable, so it should have been handled by
304 lto_output_tree. */
305 gcc_unreachable ();
306 }
307 }
308
309
310 /* Return true if EXPR is a tree node that can be written to disk. */
311
312 static inline bool
313 lto_is_streamable (tree expr)
314 {
315 enum tree_code code = TREE_CODE (expr);
316
317 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
318 name version in lto_output_tree_ref (see output_ssa_names). */
319 return !is_lang_specific (expr)
320 && code != SSA_NAME
321 && code != CALL_EXPR
322 && code != LANG_TYPE
323 && code != MODIFY_EXPR
324 && code != INIT_EXPR
325 && code != TARGET_EXPR
326 && code != BIND_EXPR
327 && code != WITH_CLEANUP_EXPR
328 && code != STATEMENT_LIST
329 && (code == CASE_LABEL_EXPR
330 || code == DECL_EXPR
331 || TREE_CODE_CLASS (code) != tcc_statement);
332 }
333
334
335 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
336
337 static tree
338 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
339 {
340 gcc_checking_assert (DECL_P (expr)
341 && TREE_CODE (expr) != FUNCTION_DECL
342 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
343
344 /* Handle DECL_INITIAL for symbols. */
345 tree initial = DECL_INITIAL (expr);
346 if (TREE_CODE (expr) == VAR_DECL
347 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
348 && !DECL_IN_CONSTANT_POOL (expr)
349 && initial)
350 {
351 varpool_node *vnode;
352 /* Extra section needs about 30 bytes; do not produce it for simple
353 scalar values. */
354 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
355 || !(vnode = varpool_node::get (expr))
356 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
357 initial = error_mark_node;
358 }
359
360 return initial;
361 }
362
363
364 /* Write a physical representation of tree node EXPR to output block
365 OB. If REF_P is true, the leaves of EXPR are emitted as references
366 via lto_output_tree_ref. IX is the index into the streamer cache
367 where EXPR is stored. */
368
369 static void
370 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
371 {
372 /* Pack all the non-pointer fields in EXPR into a bitpack and write
373 the resulting bitpack. */
374 bitpack_d bp = bitpack_create (ob->main_stream);
375 streamer_pack_tree_bitfields (ob, &bp, expr);
376 streamer_write_bitpack (&bp);
377
378 /* Write all the pointer fields in EXPR. */
379 streamer_write_tree_body (ob, expr, ref_p);
380
381 /* Write any LTO-specific data to OB. */
382 if (DECL_P (expr)
383 && TREE_CODE (expr) != FUNCTION_DECL
384 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
385 {
386 /* Handle DECL_INITIAL for symbols. */
387 tree initial = get_symbol_initial_value
388 (ob->decl_state->symtab_node_encoder, expr);
389 stream_write_tree (ob, initial, ref_p);
390 }
391 }
392
393 /* Write a physical representation of tree node EXPR to output block
394 OB. If REF_P is true, the leaves of EXPR are emitted as references
395 via lto_output_tree_ref. IX is the index into the streamer cache
396 where EXPR is stored. */
397
398 static void
399 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
400 {
401 if (!lto_is_streamable (expr))
402 internal_error ("tree code %qs is not supported in LTO streams",
403 get_tree_code_name (TREE_CODE (expr)));
404
405 /* Write the header, containing everything needed to materialize
406 EXPR on the reading side. */
407 streamer_write_tree_header (ob, expr);
408
409 lto_write_tree_1 (ob, expr, ref_p);
410
411 /* Mark the end of EXPR. */
412 streamer_write_zero (ob);
413 }
414
415 /* Emit the physical representation of tree node EXPR to output block
416 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
417 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
418
419 static void
420 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
421 bool ref_p, bool this_ref_p)
422 {
423 unsigned ix;
424
425 gcc_checking_assert (expr != NULL_TREE
426 && !(this_ref_p && tree_is_indexable (expr)));
427
428 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
429 expr, hash, &ix);
430 gcc_assert (!exists_p);
431 if (streamer_handle_as_builtin_p (expr))
432 {
433 /* MD and NORMAL builtins do not need to be written out
434 completely as they are always instantiated by the
435 compiler on startup. The only builtins that need to
436 be written out are BUILT_IN_FRONTEND. For all other
437 builtins, we simply write the class and code. */
438 streamer_write_builtin (ob, expr);
439 }
440 else if (TREE_CODE (expr) == INTEGER_CST
441 && !TREE_OVERFLOW (expr))
442 {
443 /* Shared INTEGER_CST nodes are special because they need their
444 original type to be materialized by the reader (to implement
445 TYPE_CACHED_VALUES). */
446 streamer_write_integer_cst (ob, expr, ref_p);
447 }
448 else
449 {
450 /* This is the first time we see EXPR, write its fields
451 to OB. */
452 lto_write_tree (ob, expr, ref_p);
453 }
454 }
455
456 class DFS
457 {
458 public:
459 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
460 bool single_p);
461 ~DFS ();
462
463 struct scc_entry
464 {
465 tree t;
466 hashval_t hash;
467 };
468 vec<scc_entry> sccstack;
469
470 private:
471 struct sccs
472 {
473 unsigned int dfsnum;
474 unsigned int low;
475 };
476
477 static int scc_entry_compare (const void *, const void *);
478
479 void DFS_write_tree_body (struct output_block *ob,
480 tree expr, sccs *expr_state, bool ref_p,
481 bool single_p);
482
483 void DFS_write_tree (struct output_block *ob, sccs *from_state,
484 tree expr, bool ref_p, bool this_ref_p,
485 bool single_p);
486 hashval_t
487 hash_scc (struct output_block *ob, unsigned first, unsigned size);
488
489 unsigned int next_dfs_num;
490 hash_map<tree, sccs *> sccstate;
491 struct obstack sccstate_obstack;
492 };
493
494 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
495 bool single_p)
496 {
497 sccstack.create (0);
498 gcc_obstack_init (&sccstate_obstack);
499 next_dfs_num = 1;
500 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
501 }
502
503 DFS::~DFS ()
504 {
505 sccstack.release ();
506 obstack_free (&sccstate_obstack, NULL);
507 }
508
509 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
510 DFS recurse for all tree edges originating from it. */
511
512 void
513 DFS::DFS_write_tree_body (struct output_block *ob,
514 tree expr, sccs *expr_state, bool ref_p,
515 bool single_p)
516 {
517 #define DFS_follow_tree_edge(DEST) \
518 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
519
520 enum tree_code code;
521
522 code = TREE_CODE (expr);
523
524 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
525 {
526 if (TREE_CODE (expr) != IDENTIFIER_NODE)
527 DFS_follow_tree_edge (TREE_TYPE (expr));
528 }
529
530 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
531 {
532 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
533 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
534 }
535
536 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
537 {
538 DFS_follow_tree_edge (TREE_REALPART (expr));
539 DFS_follow_tree_edge (TREE_IMAGPART (expr));
540 }
541
542 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
543 {
544 /* Drop names that were created for anonymous entities. */
545 if (DECL_NAME (expr)
546 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
547 && ANON_AGGRNAME_P (DECL_NAME (expr)))
548 ;
549 else
550 DFS_follow_tree_edge (DECL_NAME (expr));
551 DFS_follow_tree_edge (DECL_CONTEXT (expr));
552 }
553
554 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
555 {
556 DFS_follow_tree_edge (DECL_SIZE (expr));
557 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
558
559 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
560 special handling in LTO, it must be handled by streamer hooks. */
561
562 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
563
564 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
565 for early inlining so drop it on the floor instead of ICEing in
566 dwarf2out.c. */
567
568 if ((TREE_CODE (expr) == VAR_DECL
569 || TREE_CODE (expr) == PARM_DECL)
570 && DECL_HAS_VALUE_EXPR_P (expr))
571 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
572 if (TREE_CODE (expr) == VAR_DECL)
573 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
574 }
575
576 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
577 {
578 if (TREE_CODE (expr) == TYPE_DECL)
579 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
580 }
581
582 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
583 {
584 /* Make sure we don't inadvertently set the assembler name. */
585 if (DECL_ASSEMBLER_NAME_SET_P (expr))
586 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
587 }
588
589 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
590 {
591 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
592 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
593 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
594 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
595 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
596 }
597
598 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
599 {
600 DFS_follow_tree_edge (DECL_VINDEX (expr));
601 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
602 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
603 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
604 }
605
606 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
607 {
608 DFS_follow_tree_edge (TYPE_SIZE (expr));
609 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
610 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
611 DFS_follow_tree_edge (TYPE_NAME (expr));
612 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
613 reconstructed during fixup. */
614 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
615 during fixup. */
616 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
617 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
618 /* TYPE_CANONICAL is re-computed during type merging, so no need
619 to follow it here. */
620 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
621 }
622
623 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
624 {
625 if (TREE_CODE (expr) == ENUMERAL_TYPE)
626 DFS_follow_tree_edge (TYPE_VALUES (expr));
627 else if (TREE_CODE (expr) == ARRAY_TYPE)
628 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
629 else if (RECORD_OR_UNION_TYPE_P (expr))
630 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
631 DFS_follow_tree_edge (t);
632 else if (TREE_CODE (expr) == FUNCTION_TYPE
633 || TREE_CODE (expr) == METHOD_TYPE)
634 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
635
636 if (!POINTER_TYPE_P (expr))
637 DFS_follow_tree_edge (TYPE_MINVAL (expr));
638 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
639 if (RECORD_OR_UNION_TYPE_P (expr))
640 DFS_follow_tree_edge (TYPE_BINFO (expr));
641 }
642
643 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
644 {
645 DFS_follow_tree_edge (TREE_PURPOSE (expr));
646 DFS_follow_tree_edge (TREE_VALUE (expr));
647 DFS_follow_tree_edge (TREE_CHAIN (expr));
648 }
649
650 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
651 {
652 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
653 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
654 }
655
656 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
657 {
658 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
659 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
660 DFS_follow_tree_edge (TREE_BLOCK (expr));
661 }
662
663 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
664 {
665 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
666 if (VAR_OR_FUNCTION_DECL_P (t)
667 && DECL_EXTERNAL (t))
668 /* We have to stream externals in the block chain as
669 non-references. See also
670 tree-streamer-out.c:streamer_write_chain. */
671 DFS_write_tree (ob, expr_state, t, ref_p, false, single_p);
672 else
673 DFS_follow_tree_edge (t);
674
675 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
676
677 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
678 handle - those that represent inlined function scopes.
679 For the drop rest them on the floor instead of ICEing
680 in dwarf2out.c. */
681 if (inlined_function_outer_scope_p (expr))
682 {
683 tree ultimate_origin = block_ultimate_origin (expr);
684 DFS_follow_tree_edge (ultimate_origin);
685 }
686 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
687 information for early inlined BLOCKs so drop it on the floor instead
688 of ICEing in dwarf2out.c. */
689
690 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
691 streaming time. */
692
693 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
694 list is re-constructed from BLOCK_SUPERCONTEXT. */
695 }
696
697 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
698 {
699 unsigned i;
700 tree t;
701
702 /* Note that the number of BINFO slots has already been emitted in
703 EXPR's header (see streamer_write_tree_header) because this length
704 is needed to build the empty BINFO node on the reader side. */
705 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
706 DFS_follow_tree_edge (t);
707 DFS_follow_tree_edge (BINFO_OFFSET (expr));
708 DFS_follow_tree_edge (BINFO_VTABLE (expr));
709 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
710
711 /* The number of BINFO_BASE_ACCESSES has already been emitted in
712 EXPR's bitfield section. */
713 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
714 DFS_follow_tree_edge (t);
715
716 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
717 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
718 }
719
720 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
721 {
722 unsigned i;
723 tree index, value;
724
725 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
726 {
727 DFS_follow_tree_edge (index);
728 DFS_follow_tree_edge (value);
729 }
730 }
731
732 if (code == OMP_CLAUSE)
733 {
734 int i;
735 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
736 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
737 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
738 }
739
740 #undef DFS_follow_tree_edge
741 }
742
743 /* Return a hash value for the tree T.
744 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
745 may hold hash values if trees inside current SCC. */
746
747 static hashval_t
748 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
749 {
750 inchash::hash hstate;
751
752 #define visit(SIBLING) \
753 do { \
754 unsigned ix; \
755 if (!SIBLING) \
756 hstate.add_int (0); \
757 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
758 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
759 else if (map) \
760 hstate.add_int (*map->get (SIBLING)); \
761 else \
762 hstate.add_int (1); \
763 } while (0)
764
765 /* Hash TS_BASE. */
766 enum tree_code code = TREE_CODE (t);
767 hstate.add_int (code);
768 if (!TYPE_P (t))
769 {
770 hstate.add_flag (TREE_SIDE_EFFECTS (t));
771 hstate.add_flag (TREE_CONSTANT (t));
772 hstate.add_flag (TREE_READONLY (t));
773 hstate.add_flag (TREE_PUBLIC (t));
774 }
775 hstate.add_flag (TREE_ADDRESSABLE (t));
776 hstate.add_flag (TREE_THIS_VOLATILE (t));
777 if (DECL_P (t))
778 hstate.add_flag (DECL_UNSIGNED (t));
779 else if (TYPE_P (t))
780 hstate.add_flag (TYPE_UNSIGNED (t));
781 if (TYPE_P (t))
782 hstate.add_flag (TYPE_ARTIFICIAL (t));
783 else
784 hstate.add_flag (TREE_NO_WARNING (t));
785 hstate.add_flag (TREE_NOTHROW (t));
786 hstate.add_flag (TREE_STATIC (t));
787 hstate.add_flag (TREE_PROTECTED (t));
788 hstate.add_flag (TREE_DEPRECATED (t));
789 if (code != TREE_BINFO)
790 hstate.add_flag (TREE_PRIVATE (t));
791 if (TYPE_P (t))
792 {
793 hstate.add_flag (TYPE_SATURATING (t));
794 hstate.add_flag (TYPE_ADDR_SPACE (t));
795 }
796 else if (code == SSA_NAME)
797 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
798 hstate.commit_flag ();
799
800 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
801 {
802 int i;
803 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
804 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
805 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
806 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
807 }
808
809 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
810 {
811 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
812 hstate.add_flag (r.cl);
813 hstate.add_flag (r.sign);
814 hstate.add_flag (r.signalling);
815 hstate.add_flag (r.canonical);
816 hstate.commit_flag ();
817 hstate.add_int (r.uexp);
818 hstate.add (r.sig, sizeof (r.sig));
819 }
820
821 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
822 {
823 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
824 hstate.add_int (f.mode);
825 hstate.add_int (f.data.low);
826 hstate.add_int (f.data.high);
827 }
828
829 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
830 {
831 hstate.add_wide_int (DECL_MODE (t));
832 hstate.add_flag (DECL_NONLOCAL (t));
833 hstate.add_flag (DECL_VIRTUAL_P (t));
834 hstate.add_flag (DECL_IGNORED_P (t));
835 hstate.add_flag (DECL_ABSTRACT_P (t));
836 hstate.add_flag (DECL_ARTIFICIAL (t));
837 hstate.add_flag (DECL_USER_ALIGN (t));
838 hstate.add_flag (DECL_PRESERVE_P (t));
839 hstate.add_flag (DECL_EXTERNAL (t));
840 hstate.add_flag (DECL_GIMPLE_REG_P (t));
841 hstate.commit_flag ();
842 hstate.add_int (DECL_ALIGN (t));
843 if (code == LABEL_DECL)
844 {
845 hstate.add_int (EH_LANDING_PAD_NR (t));
846 hstate.add_int (LABEL_DECL_UID (t));
847 }
848 else if (code == FIELD_DECL)
849 {
850 hstate.add_flag (DECL_PACKED (t));
851 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
852 hstate.add_int (DECL_OFFSET_ALIGN (t));
853 }
854 else if (code == VAR_DECL)
855 {
856 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
857 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
858 }
859 if (code == RESULT_DECL
860 || code == PARM_DECL
861 || code == VAR_DECL)
862 {
863 hstate.add_flag (DECL_BY_REFERENCE (t));
864 if (code == VAR_DECL
865 || code == PARM_DECL)
866 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
867 }
868 hstate.commit_flag ();
869 }
870
871 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
872 hstate.add_int (DECL_REGISTER (t));
873
874 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
875 {
876 hstate.add_flag (DECL_COMMON (t));
877 hstate.add_flag (DECL_DLLIMPORT_P (t));
878 hstate.add_flag (DECL_WEAK (t));
879 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
880 hstate.add_flag (DECL_COMDAT (t));
881 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
882 hstate.add_int (DECL_VISIBILITY (t));
883 if (code == VAR_DECL)
884 {
885 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
886 hstate.add_flag (DECL_HARD_REGISTER (t));
887 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
888 }
889 if (TREE_CODE (t) == FUNCTION_DECL)
890 {
891 hstate.add_flag (DECL_FINAL_P (t));
892 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
893 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
894 }
895 hstate.commit_flag ();
896 }
897
898 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
899 {
900 hstate.add_int (DECL_BUILT_IN_CLASS (t));
901 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
902 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
903 hstate.add_flag (DECL_UNINLINABLE (t));
904 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
905 hstate.add_flag (DECL_IS_NOVOPS (t));
906 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
907 hstate.add_flag (DECL_IS_MALLOC (t));
908 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
909 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
910 hstate.add_flag (DECL_STATIC_CHAIN (t));
911 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
912 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
913 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
914 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
915 hstate.add_flag (DECL_PURE_P (t));
916 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
917 hstate.commit_flag ();
918 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
919 hstate.add_int (DECL_FUNCTION_CODE (t));
920 }
921
922 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
923 {
924 hstate.add_wide_int (TYPE_MODE (t));
925 hstate.add_flag (TYPE_STRING_FLAG (t));
926 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
927 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
928 hstate.add_flag (TYPE_PACKED (t));
929 hstate.add_flag (TYPE_RESTRICT (t));
930 hstate.add_flag (TYPE_USER_ALIGN (t));
931 hstate.add_flag (TYPE_READONLY (t));
932 if (RECORD_OR_UNION_TYPE_P (t))
933 {
934 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
935 hstate.add_flag (TYPE_FINAL_P (t));
936 }
937 else if (code == ARRAY_TYPE)
938 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
939 hstate.commit_flag ();
940 hstate.add_int (TYPE_PRECISION (t));
941 hstate.add_int (TYPE_ALIGN (t));
942 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
943 || (!in_lto_p
944 && get_alias_set (t) == 0))
945 ? 0 : -1);
946 }
947
948 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
949 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
950 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
951
952 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
953 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
954
955 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
956 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
957
958 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
959 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
960
961 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
962 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
963
964 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
965 {
966 if (code != IDENTIFIER_NODE)
967 visit (TREE_TYPE (t));
968 }
969
970 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
971 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
972 visit (VECTOR_CST_ELT (t, i));
973
974 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
975 {
976 visit (TREE_REALPART (t));
977 visit (TREE_IMAGPART (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
981 {
982 /* Drop names that were created for anonymous entities. */
983 if (DECL_NAME (t)
984 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
985 && ANON_AGGRNAME_P (DECL_NAME (t)))
986 ;
987 else
988 visit (DECL_NAME (t));
989 if (DECL_FILE_SCOPE_P (t))
990 ;
991 else
992 visit (DECL_CONTEXT (t));
993 }
994
995 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
996 {
997 visit (DECL_SIZE (t));
998 visit (DECL_SIZE_UNIT (t));
999 visit (DECL_ATTRIBUTES (t));
1000 if ((code == VAR_DECL
1001 || code == PARM_DECL)
1002 && DECL_HAS_VALUE_EXPR_P (t))
1003 visit (DECL_VALUE_EXPR (t));
1004 if (code == VAR_DECL
1005 && DECL_HAS_DEBUG_EXPR_P (t))
1006 visit (DECL_DEBUG_EXPR (t));
1007 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1008 be able to call get_symbol_initial_value. */
1009 }
1010
1011 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1012 {
1013 if (code == TYPE_DECL)
1014 visit (DECL_ORIGINAL_TYPE (t));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1018 {
1019 if (DECL_ASSEMBLER_NAME_SET_P (t))
1020 visit (DECL_ASSEMBLER_NAME (t));
1021 }
1022
1023 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1024 {
1025 visit (DECL_FIELD_OFFSET (t));
1026 visit (DECL_BIT_FIELD_TYPE (t));
1027 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1028 visit (DECL_FIELD_BIT_OFFSET (t));
1029 visit (DECL_FCONTEXT (t));
1030 }
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1033 {
1034 visit (DECL_VINDEX (t));
1035 visit (DECL_FUNCTION_PERSONALITY (t));
1036 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1037 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1041 {
1042 visit (TYPE_SIZE (t));
1043 visit (TYPE_SIZE_UNIT (t));
1044 visit (TYPE_ATTRIBUTES (t));
1045 visit (TYPE_NAME (t));
1046 visit (TYPE_MAIN_VARIANT (t));
1047 if (TYPE_FILE_SCOPE_P (t))
1048 ;
1049 else
1050 visit (TYPE_CONTEXT (t));
1051 visit (TYPE_STUB_DECL (t));
1052 }
1053
1054 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1055 {
1056 if (code == ENUMERAL_TYPE)
1057 visit (TYPE_VALUES (t));
1058 else if (code == ARRAY_TYPE)
1059 visit (TYPE_DOMAIN (t));
1060 else if (RECORD_OR_UNION_TYPE_P (t))
1061 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1062 visit (f);
1063 else if (code == FUNCTION_TYPE
1064 || code == METHOD_TYPE)
1065 visit (TYPE_ARG_TYPES (t));
1066 if (!POINTER_TYPE_P (t))
1067 visit (TYPE_MINVAL (t));
1068 visit (TYPE_MAXVAL (t));
1069 if (RECORD_OR_UNION_TYPE_P (t))
1070 visit (TYPE_BINFO (t));
1071 }
1072
1073 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1074 {
1075 visit (TREE_PURPOSE (t));
1076 visit (TREE_VALUE (t));
1077 visit (TREE_CHAIN (t));
1078 }
1079
1080 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1081 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1082 visit (TREE_VEC_ELT (t, i));
1083
1084 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1085 {
1086 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1087 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1088 visit (TREE_OPERAND (t, i));
1089 }
1090
1091 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1092 {
1093 unsigned i;
1094 tree b;
1095 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1096 visit (b);
1097 visit (BINFO_OFFSET (t));
1098 visit (BINFO_VTABLE (t));
1099 visit (BINFO_VPTR_FIELD (t));
1100 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1101 visit (b);
1102 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1103 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1104 }
1105
1106 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1107 {
1108 unsigned i;
1109 tree index, value;
1110 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1111 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1112 {
1113 visit (index);
1114 visit (value);
1115 }
1116 }
1117
1118 if (code == OMP_CLAUSE)
1119 {
1120 int i;
1121 HOST_WIDE_INT val;
1122
1123 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1124 switch (OMP_CLAUSE_CODE (t))
1125 {
1126 case OMP_CLAUSE_DEFAULT:
1127 val = OMP_CLAUSE_DEFAULT_KIND (t);
1128 break;
1129 case OMP_CLAUSE_SCHEDULE:
1130 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1131 break;
1132 case OMP_CLAUSE_DEPEND:
1133 val = OMP_CLAUSE_DEPEND_KIND (t);
1134 break;
1135 case OMP_CLAUSE_MAP:
1136 val = OMP_CLAUSE_MAP_KIND (t);
1137 break;
1138 case OMP_CLAUSE_PROC_BIND:
1139 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1140 break;
1141 case OMP_CLAUSE_REDUCTION:
1142 val = OMP_CLAUSE_REDUCTION_CODE (t);
1143 break;
1144 default:
1145 val = 0;
1146 break;
1147 }
1148 hstate.add_wide_int (val);
1149 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1150 visit (OMP_CLAUSE_OPERAND (t, i));
1151 visit (OMP_CLAUSE_CHAIN (t));
1152 }
1153
1154 return hstate.end ();
1155
1156 #undef visit
1157 }
1158
1159 /* Compare two SCC entries by their hash value for qsorting them. */
1160
1161 int
1162 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1163 {
1164 const scc_entry *p1 = (const scc_entry *) p1_;
1165 const scc_entry *p2 = (const scc_entry *) p2_;
1166 if (p1->hash < p2->hash)
1167 return -1;
1168 else if (p1->hash > p2->hash)
1169 return 1;
1170 return 0;
1171 }
1172
1173 /* Return a hash value for the SCC on the SCC stack from FIRST with
1174 size SIZE. */
1175
1176 hashval_t
1177 DFS::hash_scc (struct output_block *ob,
1178 unsigned first, unsigned size)
1179 {
1180 unsigned int last_classes = 0, iterations = 0;
1181
1182 /* Compute hash values for the SCC members. */
1183 for (unsigned i = 0; i < size; ++i)
1184 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1185 sccstack[first+i].t);
1186
1187 if (size == 1)
1188 return sccstack[first].hash;
1189
1190 /* We aim to get unique hash for every tree within SCC and compute hash value
1191 of the whole SCC by combing all values together in an stable (entry point
1192 independent) order. This guarantees that the same SCC regions within
1193 different translation units will get the same hash values and therefore
1194 will be merged at WPA time.
1195
1196 Often the hashes are already unique. In that case we compute scc hash
1197 by combining individual hash values in an increasing order.
1198
1199 If thre are duplicates we seek at least one tree with unique hash (and
1200 pick one with minimal hash and this property). Then we obtain stable
1201 order by DFS walk starting from this unique tree and then use index
1202 within this order to make individual hash values unique.
1203
1204 If there is no tree with unique hash, we iteratively propagate the hash
1205 values across the internal edges of SCC. This usually quickly leads
1206 to unique hashes. Consider, for example, an SCC containing two pointers
1207 that are identical except for type they point and assume that these
1208 types are also part of the SCC.
1209 The propagation will add the points-to type information into their hash
1210 values. */
1211 do
1212 {
1213 /* Sort the SCC so we can easily see check for uniqueness. */
1214 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1215
1216 unsigned int classes = 1;
1217 int firstunique = -1;
1218
1219 /* Find tree with lowest unique hash (if it exists) and compute
1220 number of equivalence classes. */
1221 if (sccstack[first].hash != sccstack[first+1].hash)
1222 firstunique = 0;
1223 for (unsigned i = 1; i < size; ++i)
1224 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1225 {
1226 classes++;
1227 if (firstunique == -1
1228 && (i == size - 1
1229 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1230 firstunique = i;
1231 }
1232
1233 /* If we found tree with unique hash; stop the iteration. */
1234 if (firstunique != -1
1235 /* Also terminate if we run out of iterations or if the number of
1236 equivalence classes is no longer increasing.
1237 For example a cyclic list of trees that are all equivalent will
1238 never have unique entry point; we however do not build such SCCs
1239 in our IL. */
1240 || classes <= last_classes || iterations > 16)
1241 {
1242 hashval_t scc_hash;
1243
1244 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1245 starting from FIRSTUNIQUE to obstain stable order. */
1246 if (classes != size && firstunique != -1)
1247 {
1248 hash_map <tree, hashval_t> map(size*2);
1249
1250 /* Store hash values into a map, so we can associate them with
1251 reordered SCC. */
1252 for (unsigned i = 0; i < size; ++i)
1253 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1254
1255 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1256 gcc_assert (again.sccstack.length () == size);
1257
1258 memcpy (sccstack.address () + first,
1259 again.sccstack.address (),
1260 sizeof (scc_entry) * size);
1261
1262 /* Update hash values of individual members by hashing in the
1263 index within the stable order. This ensures uniqueness.
1264 Also compute the scc_hash by mixing in all hash values in the
1265 stable order we obtained. */
1266 sccstack[first].hash = *map.get (sccstack[first].t);
1267 scc_hash = sccstack[first].hash;
1268 for (unsigned i = 1; i < size; ++i)
1269 {
1270 sccstack[first+i].hash
1271 = iterative_hash_hashval_t (i,
1272 *map.get (sccstack[first+i].t));
1273 scc_hash = iterative_hash_hashval_t (scc_hash,
1274 sccstack[first+i].hash);
1275 }
1276 }
1277 /* If we got unique hash values for each tree, then sort already
1278 ensured entry point independent order. Only compute the final
1279 scc hash.
1280
1281 If we failed to find the unique entry point, we go by the same
1282 route. We will eventually introduce unwanted hash conflicts. */
1283 else
1284 {
1285 scc_hash = sccstack[first].hash;
1286 for (unsigned i = 1; i < size; ++i)
1287 scc_hash = iterative_hash_hashval_t (scc_hash,
1288 sccstack[first+i].hash);
1289 /* We can not 100% guarantee that the hash will not conflict in
1290 in a way so the unique hash is not found. This however
1291 should be extremely rare situation. ICE for now so possible
1292 issues are found and evaulated. */
1293 gcc_checking_assert (classes == size);
1294 }
1295
1296 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1297 hash into the hash of each of the elements. */
1298 for (unsigned i = 0; i < size; ++i)
1299 sccstack[first+i].hash
1300 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1301 return scc_hash;
1302 }
1303
1304 last_classes = classes;
1305 iterations++;
1306
1307 /* We failed to identify the entry point; propagate hash values across
1308 the edges. */
1309 {
1310 hash_map <tree, hashval_t> map(size*2);
1311 for (unsigned i = 0; i < size; ++i)
1312 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1313
1314 for (unsigned i = 0; i < size; i++)
1315 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1316 sccstack[first+i].t);
1317 }
1318 }
1319 while (true);
1320 }
1321
1322 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1323 already in the streamer cache. Main routine called for
1324 each visit of EXPR. */
1325
1326 void
1327 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1328 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1329 {
1330 unsigned ix;
1331
1332 /* Handle special cases. */
1333 if (expr == NULL_TREE)
1334 return;
1335
1336 /* Do not DFS walk into indexable trees. */
1337 if (this_ref_p && tree_is_indexable (expr))
1338 return;
1339
1340 /* Check if we already streamed EXPR. */
1341 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1342 return;
1343
1344 sccs **slot = &sccstate.get_or_insert (expr);
1345 sccs *cstate = *slot;
1346 if (!cstate)
1347 {
1348 scc_entry e = { expr, 0 };
1349 /* Not yet visited. DFS recurse and push it onto the stack. */
1350 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1351 sccstack.safe_push (e);
1352 cstate->dfsnum = next_dfs_num++;
1353 cstate->low = cstate->dfsnum;
1354
1355 if (streamer_handle_as_builtin_p (expr))
1356 ;
1357 else if (TREE_CODE (expr) == INTEGER_CST
1358 && !TREE_OVERFLOW (expr))
1359 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1360 else
1361 {
1362 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1363
1364 /* Walk any LTO-specific edges. */
1365 if (DECL_P (expr)
1366 && TREE_CODE (expr) != FUNCTION_DECL
1367 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1368 {
1369 /* Handle DECL_INITIAL for symbols. */
1370 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1371 expr);
1372 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1373 }
1374 }
1375
1376 /* See if we found an SCC. */
1377 if (cstate->low == cstate->dfsnum)
1378 {
1379 unsigned first, size;
1380 tree x;
1381
1382 /* If we are re-walking a single leaf-SCC just return and
1383 let the caller access the sccstack. */
1384 if (single_p)
1385 return;
1386
1387 /* Pop the SCC and compute its size. */
1388 first = sccstack.length ();
1389 do
1390 {
1391 x = sccstack[--first].t;
1392 }
1393 while (x != expr);
1394 size = sccstack.length () - first;
1395
1396 /* No need to compute hashes for LTRANS units, we don't perform
1397 any merging there. */
1398 hashval_t scc_hash = 0;
1399 unsigned scc_entry_len = 0;
1400 if (!flag_wpa)
1401 {
1402 scc_hash = hash_scc (ob, first, size);
1403
1404 /* Put the entries with the least number of collisions first. */
1405 unsigned entry_start = 0;
1406 scc_entry_len = size + 1;
1407 for (unsigned i = 0; i < size;)
1408 {
1409 unsigned from = i;
1410 for (i = i + 1; i < size
1411 && (sccstack[first + i].hash
1412 == sccstack[first + from].hash); ++i)
1413 ;
1414 if (i - from < scc_entry_len)
1415 {
1416 scc_entry_len = i - from;
1417 entry_start = from;
1418 }
1419 }
1420 for (unsigned i = 0; i < scc_entry_len; ++i)
1421 {
1422 scc_entry tem = sccstack[first + i];
1423 sccstack[first + i] = sccstack[first + entry_start + i];
1424 sccstack[first + entry_start + i] = tem;
1425 }
1426
1427 if (scc_entry_len == 1)
1428 ; /* We already sorted SCC deterministically in hash_scc. */
1429 else
1430 /* Check that we have only one SCC.
1431 Naturally we may have conflicts if hash function is not
1432 strong enough. Lets see how far this gets. */
1433 {
1434 #ifdef ENABLE_CHECKING
1435 gcc_unreachable ();
1436 #endif
1437 }
1438 }
1439
1440 /* Write LTO_tree_scc. */
1441 streamer_write_record_start (ob, LTO_tree_scc);
1442 streamer_write_uhwi (ob, size);
1443 streamer_write_uhwi (ob, scc_hash);
1444
1445 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1446 All INTEGER_CSTs need to be handled this way as we need
1447 their type to materialize them. Also builtins are handled
1448 this way.
1449 ??? We still wrap these in LTO_tree_scc so at the
1450 input side we can properly identify the tree we want
1451 to ultimatively return. */
1452 if (size == 1)
1453 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1454 else
1455 {
1456 /* Write the size of the SCC entry candidates. */
1457 streamer_write_uhwi (ob, scc_entry_len);
1458
1459 /* Write all headers and populate the streamer cache. */
1460 for (unsigned i = 0; i < size; ++i)
1461 {
1462 hashval_t hash = sccstack[first+i].hash;
1463 tree t = sccstack[first+i].t;
1464 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1465 t, hash, &ix);
1466 gcc_assert (!exists_p);
1467
1468 if (!lto_is_streamable (t))
1469 internal_error ("tree code %qs is not supported "
1470 "in LTO streams",
1471 get_tree_code_name (TREE_CODE (t)));
1472
1473 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1474
1475 /* Write the header, containing everything needed to
1476 materialize EXPR on the reading side. */
1477 streamer_write_tree_header (ob, t);
1478 }
1479
1480 /* Write the bitpacks and tree references. */
1481 for (unsigned i = 0; i < size; ++i)
1482 {
1483 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1484
1485 /* Mark the end of the tree. */
1486 streamer_write_zero (ob);
1487 }
1488 }
1489
1490 /* Finally truncate the vector. */
1491 sccstack.truncate (first);
1492
1493 if (from_state)
1494 from_state->low = MIN (from_state->low, cstate->low);
1495 return;
1496 }
1497
1498 if (from_state)
1499 from_state->low = MIN (from_state->low, cstate->low);
1500 }
1501 gcc_checking_assert (from_state);
1502 if (cstate->dfsnum < from_state->dfsnum)
1503 from_state->low = MIN (cstate->dfsnum, from_state->low);
1504 }
1505
1506
1507 /* Emit the physical representation of tree node EXPR to output block
1508 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1509 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1510
1511 void
1512 lto_output_tree (struct output_block *ob, tree expr,
1513 bool ref_p, bool this_ref_p)
1514 {
1515 unsigned ix;
1516 bool existed_p;
1517
1518 if (expr == NULL_TREE)
1519 {
1520 streamer_write_record_start (ob, LTO_null);
1521 return;
1522 }
1523
1524 if (this_ref_p && tree_is_indexable (expr))
1525 {
1526 lto_output_tree_ref (ob, expr);
1527 return;
1528 }
1529
1530 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1531 if (existed_p)
1532 {
1533 /* If a node has already been streamed out, make sure that
1534 we don't write it more than once. Otherwise, the reader
1535 will instantiate two different nodes for the same object. */
1536 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1537 streamer_write_uhwi (ob, ix);
1538 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1539 lto_tree_code_to_tag (TREE_CODE (expr)));
1540 lto_stats.num_pickle_refs_output++;
1541 }
1542 else
1543 {
1544 /* This is the first time we see EXPR, write all reachable
1545 trees to OB. */
1546 static bool in_dfs_walk;
1547
1548 /* Protect against recursion which means disconnect between
1549 what tree edges we walk in the DFS walk and what edges
1550 we stream out. */
1551 gcc_assert (!in_dfs_walk);
1552
1553 /* Start the DFS walk. */
1554 /* Save ob state ... */
1555 /* let's see ... */
1556 in_dfs_walk = true;
1557 DFS (ob, expr, ref_p, this_ref_p, false);
1558 in_dfs_walk = false;
1559
1560 /* Finally append a reference to the tree we were writing.
1561 ??? If expr ended up as a singleton we could have
1562 inlined it here and avoid outputting a reference. */
1563 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1564 gcc_assert (existed_p);
1565 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1566 streamer_write_uhwi (ob, ix);
1567 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1568 lto_tree_code_to_tag (TREE_CODE (expr)));
1569 lto_stats.num_pickle_refs_output++;
1570 }
1571 }
1572
1573
1574 /* Output to OB a list of try/catch handlers starting with FIRST. */
1575
1576 static void
1577 output_eh_try_list (struct output_block *ob, eh_catch first)
1578 {
1579 eh_catch n;
1580
1581 for (n = first; n; n = n->next_catch)
1582 {
1583 streamer_write_record_start (ob, LTO_eh_catch);
1584 stream_write_tree (ob, n->type_list, true);
1585 stream_write_tree (ob, n->filter_list, true);
1586 stream_write_tree (ob, n->label, true);
1587 }
1588
1589 streamer_write_record_start (ob, LTO_null);
1590 }
1591
1592
1593 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1594 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1595 detect EH region sharing. */
1596
1597 static void
1598 output_eh_region (struct output_block *ob, eh_region r)
1599 {
1600 enum LTO_tags tag;
1601
1602 if (r == NULL)
1603 {
1604 streamer_write_record_start (ob, LTO_null);
1605 return;
1606 }
1607
1608 if (r->type == ERT_CLEANUP)
1609 tag = LTO_ert_cleanup;
1610 else if (r->type == ERT_TRY)
1611 tag = LTO_ert_try;
1612 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1613 tag = LTO_ert_allowed_exceptions;
1614 else if (r->type == ERT_MUST_NOT_THROW)
1615 tag = LTO_ert_must_not_throw;
1616 else
1617 gcc_unreachable ();
1618
1619 streamer_write_record_start (ob, tag);
1620 streamer_write_hwi (ob, r->index);
1621
1622 if (r->outer)
1623 streamer_write_hwi (ob, r->outer->index);
1624 else
1625 streamer_write_zero (ob);
1626
1627 if (r->inner)
1628 streamer_write_hwi (ob, r->inner->index);
1629 else
1630 streamer_write_zero (ob);
1631
1632 if (r->next_peer)
1633 streamer_write_hwi (ob, r->next_peer->index);
1634 else
1635 streamer_write_zero (ob);
1636
1637 if (r->type == ERT_TRY)
1638 {
1639 output_eh_try_list (ob, r->u.eh_try.first_catch);
1640 }
1641 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1642 {
1643 stream_write_tree (ob, r->u.allowed.type_list, true);
1644 stream_write_tree (ob, r->u.allowed.label, true);
1645 streamer_write_uhwi (ob, r->u.allowed.filter);
1646 }
1647 else if (r->type == ERT_MUST_NOT_THROW)
1648 {
1649 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1650 bitpack_d bp = bitpack_create (ob->main_stream);
1651 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1652 streamer_write_bitpack (&bp);
1653 }
1654
1655 if (r->landing_pads)
1656 streamer_write_hwi (ob, r->landing_pads->index);
1657 else
1658 streamer_write_zero (ob);
1659 }
1660
1661
1662 /* Output landing pad LP to OB. */
1663
1664 static void
1665 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1666 {
1667 if (lp == NULL)
1668 {
1669 streamer_write_record_start (ob, LTO_null);
1670 return;
1671 }
1672
1673 streamer_write_record_start (ob, LTO_eh_landing_pad);
1674 streamer_write_hwi (ob, lp->index);
1675 if (lp->next_lp)
1676 streamer_write_hwi (ob, lp->next_lp->index);
1677 else
1678 streamer_write_zero (ob);
1679
1680 if (lp->region)
1681 streamer_write_hwi (ob, lp->region->index);
1682 else
1683 streamer_write_zero (ob);
1684
1685 stream_write_tree (ob, lp->post_landing_pad, true);
1686 }
1687
1688
1689 /* Output the existing eh_table to OB. */
1690
1691 static void
1692 output_eh_regions (struct output_block *ob, struct function *fn)
1693 {
1694 if (fn->eh && fn->eh->region_tree)
1695 {
1696 unsigned i;
1697 eh_region eh;
1698 eh_landing_pad lp;
1699 tree ttype;
1700
1701 streamer_write_record_start (ob, LTO_eh_table);
1702
1703 /* Emit the index of the root of the EH region tree. */
1704 streamer_write_hwi (ob, fn->eh->region_tree->index);
1705
1706 /* Emit all the EH regions in the region array. */
1707 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1708 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1709 output_eh_region (ob, eh);
1710
1711 /* Emit all landing pads. */
1712 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1713 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1714 output_eh_lp (ob, lp);
1715
1716 /* Emit all the runtime type data. */
1717 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1718 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1719 stream_write_tree (ob, ttype, true);
1720
1721 /* Emit the table of action chains. */
1722 if (targetm.arm_eabi_unwinder)
1723 {
1724 tree t;
1725 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1726 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1727 stream_write_tree (ob, t, true);
1728 }
1729 else
1730 {
1731 uchar c;
1732 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1733 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1734 streamer_write_char_stream (ob->main_stream, c);
1735 }
1736 }
1737
1738 /* The LTO_null either terminates the record or indicates that there
1739 are no eh_records at all. */
1740 streamer_write_record_start (ob, LTO_null);
1741 }
1742
1743
1744 /* Output all of the active ssa names to the ssa_names stream. */
1745
1746 static void
1747 output_ssa_names (struct output_block *ob, struct function *fn)
1748 {
1749 unsigned int i, len;
1750
1751 len = vec_safe_length (SSANAMES (fn));
1752 streamer_write_uhwi (ob, len);
1753
1754 for (i = 1; i < len; i++)
1755 {
1756 tree ptr = (*SSANAMES (fn))[i];
1757
1758 if (ptr == NULL_TREE
1759 || SSA_NAME_IN_FREE_LIST (ptr)
1760 || virtual_operand_p (ptr))
1761 continue;
1762
1763 streamer_write_uhwi (ob, i);
1764 streamer_write_char_stream (ob->main_stream,
1765 SSA_NAME_IS_DEFAULT_DEF (ptr));
1766 if (SSA_NAME_VAR (ptr))
1767 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1768 else
1769 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1770 stream_write_tree (ob, TREE_TYPE (ptr), true);
1771 }
1772
1773 streamer_write_zero (ob);
1774 }
1775
1776
1777 /* Output a wide-int. */
1778
1779 static void
1780 streamer_write_wi (struct output_block *ob,
1781 const widest_int &w)
1782 {
1783 int len = w.get_len ();
1784
1785 streamer_write_uhwi (ob, w.get_precision ());
1786 streamer_write_uhwi (ob, len);
1787 for (int i = 0; i < len; i++)
1788 streamer_write_hwi (ob, w.elt (i));
1789 }
1790
1791
1792 /* Output the cfg. */
1793
1794 static void
1795 output_cfg (struct output_block *ob, struct function *fn)
1796 {
1797 struct lto_output_stream *tmp_stream = ob->main_stream;
1798 basic_block bb;
1799
1800 ob->main_stream = ob->cfg_stream;
1801
1802 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1803 profile_status_for_fn (fn));
1804
1805 /* Output the number of the highest basic block. */
1806 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1807
1808 FOR_ALL_BB_FN (bb, fn)
1809 {
1810 edge_iterator ei;
1811 edge e;
1812
1813 streamer_write_hwi (ob, bb->index);
1814
1815 /* Output the successors and the edge flags. */
1816 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1817 FOR_EACH_EDGE (e, ei, bb->succs)
1818 {
1819 streamer_write_uhwi (ob, e->dest->index);
1820 streamer_write_hwi (ob, e->probability);
1821 streamer_write_gcov_count (ob, e->count);
1822 streamer_write_uhwi (ob, e->flags);
1823 }
1824 }
1825
1826 streamer_write_hwi (ob, -1);
1827
1828 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1829 while (bb->next_bb)
1830 {
1831 streamer_write_hwi (ob, bb->next_bb->index);
1832 bb = bb->next_bb;
1833 }
1834
1835 streamer_write_hwi (ob, -1);
1836
1837 /* ??? The cfgloop interface is tied to cfun. */
1838 gcc_assert (cfun == fn);
1839
1840 /* Output the number of loops. */
1841 streamer_write_uhwi (ob, number_of_loops (fn));
1842
1843 /* Output each loop, skipping the tree root which has number zero. */
1844 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1845 {
1846 struct loop *loop = get_loop (fn, i);
1847
1848 /* Write the index of the loop header. That's enough to rebuild
1849 the loop tree on the reader side. Stream -1 for an unused
1850 loop entry. */
1851 if (!loop)
1852 {
1853 streamer_write_hwi (ob, -1);
1854 continue;
1855 }
1856 else
1857 streamer_write_hwi (ob, loop->header->index);
1858
1859 /* Write everything copy_loop_info copies. */
1860 streamer_write_enum (ob->main_stream,
1861 loop_estimation, EST_LAST, loop->estimate_state);
1862 streamer_write_hwi (ob, loop->any_upper_bound);
1863 if (loop->any_upper_bound)
1864 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1865 streamer_write_hwi (ob, loop->any_estimate);
1866 if (loop->any_estimate)
1867 streamer_write_wi (ob, loop->nb_iterations_estimate);
1868
1869 /* Write OMP SIMD related info. */
1870 streamer_write_hwi (ob, loop->safelen);
1871 streamer_write_hwi (ob, loop->dont_vectorize);
1872 streamer_write_hwi (ob, loop->force_vectorize);
1873 stream_write_tree (ob, loop->simduid, true);
1874 }
1875
1876 ob->main_stream = tmp_stream;
1877 }
1878
1879
1880 /* Create the header in the file using OB. If the section type is for
1881 a function, set FN to the decl for that function. */
1882
1883 void
1884 produce_asm (struct output_block *ob, tree fn)
1885 {
1886 enum lto_section_type section_type = ob->section_type;
1887 struct lto_function_header header;
1888 char *section_name;
1889
1890 if (section_type == LTO_section_function_body)
1891 {
1892 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1893 section_name = lto_get_section_name (section_type, name, NULL);
1894 }
1895 else
1896 section_name = lto_get_section_name (section_type, NULL, NULL);
1897
1898 lto_begin_section (section_name, !flag_wpa);
1899 free (section_name);
1900
1901 /* The entire header is stream computed here. */
1902 memset (&header, 0, sizeof (struct lto_function_header));
1903
1904 /* Write the header. */
1905 header.major_version = LTO_major_version;
1906 header.minor_version = LTO_minor_version;
1907
1908 if (section_type == LTO_section_function_body)
1909 header.cfg_size = ob->cfg_stream->total_size;
1910 header.main_size = ob->main_stream->total_size;
1911 header.string_size = ob->string_stream->total_size;
1912 lto_write_data (&header, sizeof header);
1913
1914 /* Put all of the gimple and the string table out the asm file as a
1915 block of text. */
1916 if (section_type == LTO_section_function_body)
1917 lto_write_stream (ob->cfg_stream);
1918 lto_write_stream (ob->main_stream);
1919 lto_write_stream (ob->string_stream);
1920
1921 lto_end_section ();
1922 }
1923
1924
1925 /* Output the base body of struct function FN using output block OB. */
1926
1927 static void
1928 output_struct_function_base (struct output_block *ob, struct function *fn)
1929 {
1930 struct bitpack_d bp;
1931 unsigned i;
1932 tree t;
1933
1934 /* Output the static chain and non-local goto save area. */
1935 stream_write_tree (ob, fn->static_chain_decl, true);
1936 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1937
1938 /* Output all the local variables in the function. */
1939 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1940 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1941 stream_write_tree (ob, t, true);
1942
1943 /* Output current IL state of the function. */
1944 streamer_write_uhwi (ob, fn->curr_properties);
1945
1946 /* Write all the attributes for FN. */
1947 bp = bitpack_create (ob->main_stream);
1948 bp_pack_value (&bp, fn->is_thunk, 1);
1949 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1950 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1951 bp_pack_value (&bp, fn->returns_struct, 1);
1952 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1953 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1954 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1955 bp_pack_value (&bp, fn->after_inlining, 1);
1956 bp_pack_value (&bp, fn->stdarg, 1);
1957 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1958 bp_pack_value (&bp, fn->calls_alloca, 1);
1959 bp_pack_value (&bp, fn->calls_setjmp, 1);
1960 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1961 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1962 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1963 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1964 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
1965
1966 /* Output the function start and end loci. */
1967 stream_output_location (ob, &bp, fn->function_start_locus);
1968 stream_output_location (ob, &bp, fn->function_end_locus);
1969
1970 streamer_write_bitpack (&bp);
1971 }
1972
1973
1974 /* Output the body of function NODE->DECL. */
1975
1976 static void
1977 output_function (struct cgraph_node *node)
1978 {
1979 tree function;
1980 struct function *fn;
1981 basic_block bb;
1982 struct output_block *ob;
1983
1984 function = node->decl;
1985 fn = DECL_STRUCT_FUNCTION (function);
1986 ob = create_output_block (LTO_section_function_body);
1987
1988 clear_line_info (ob);
1989 ob->symbol = node;
1990
1991 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1992
1993 /* Set current_function_decl and cfun. */
1994 push_cfun (fn);
1995
1996 /* Make string 0 be a NULL string. */
1997 streamer_write_char_stream (ob->string_stream, 0);
1998
1999 streamer_write_record_start (ob, LTO_function);
2000
2001 /* Output decls for parameters and args. */
2002 stream_write_tree (ob, DECL_RESULT (function), true);
2003 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2004
2005 /* Output DECL_INITIAL for the function, which contains the tree of
2006 lexical scopes. */
2007 stream_write_tree (ob, DECL_INITIAL (function), true);
2008
2009 /* We also stream abstract functions where we stream only stuff needed for
2010 debug info. */
2011 if (gimple_has_body_p (function))
2012 {
2013 streamer_write_uhwi (ob, 1);
2014 output_struct_function_base (ob, fn);
2015
2016 /* Output all the SSA names used in the function. */
2017 output_ssa_names (ob, fn);
2018
2019 /* Output any exception handling regions. */
2020 output_eh_regions (ob, fn);
2021
2022
2023 /* We will renumber the statements. The code that does this uses
2024 the same ordering that we use for serializing them so we can use
2025 the same code on the other end and not have to write out the
2026 statement numbers. We do not assign UIDs to PHIs here because
2027 virtual PHIs get re-computed on-the-fly which would make numbers
2028 inconsistent. */
2029 set_gimple_stmt_max_uid (cfun, 0);
2030 FOR_ALL_BB_FN (bb, cfun)
2031 {
2032 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2033 gsi_next (&gsi))
2034 {
2035 gphi *stmt = gsi.phi ();
2036
2037 /* Virtual PHIs are not going to be streamed. */
2038 if (!virtual_operand_p (gimple_phi_result (stmt)))
2039 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2040 }
2041 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2042 gsi_next (&gsi))
2043 {
2044 gimple stmt = gsi_stmt (gsi);
2045 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2046 }
2047 }
2048 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2049 virtual phis now. */
2050 FOR_ALL_BB_FN (bb, cfun)
2051 {
2052 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2053 gsi_next (&gsi))
2054 {
2055 gphi *stmt = gsi.phi ();
2056 if (virtual_operand_p (gimple_phi_result (stmt)))
2057 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2058 }
2059 }
2060
2061 /* Output the code for the function. */
2062 FOR_ALL_BB_FN (bb, fn)
2063 output_bb (ob, bb, fn);
2064
2065 /* The terminator for this function. */
2066 streamer_write_record_start (ob, LTO_null);
2067
2068 output_cfg (ob, fn);
2069
2070 pop_cfun ();
2071 }
2072 else
2073 streamer_write_uhwi (ob, 0);
2074
2075 /* Create a section to hold the pickled output of this function. */
2076 produce_asm (ob, function);
2077
2078 destroy_output_block (ob);
2079 }
2080
2081 /* Output the body of function NODE->DECL. */
2082
2083 static void
2084 output_constructor (struct varpool_node *node)
2085 {
2086 tree var = node->decl;
2087 struct output_block *ob;
2088
2089 ob = create_output_block (LTO_section_function_body);
2090
2091 clear_line_info (ob);
2092 ob->symbol = node;
2093
2094 /* Make string 0 be a NULL string. */
2095 streamer_write_char_stream (ob->string_stream, 0);
2096
2097 /* Output DECL_INITIAL for the function, which contains the tree of
2098 lexical scopes. */
2099 stream_write_tree (ob, DECL_INITIAL (var), true);
2100
2101 /* Create a section to hold the pickled output of this function. */
2102 produce_asm (ob, var);
2103
2104 destroy_output_block (ob);
2105 }
2106
2107
2108 /* Emit toplevel asms. */
2109
2110 void
2111 lto_output_toplevel_asms (void)
2112 {
2113 struct output_block *ob;
2114 struct asm_node *can;
2115 char *section_name;
2116 struct lto_simple_header_with_strings header;
2117
2118 if (!symtab->first_asm_symbol ())
2119 return;
2120
2121 ob = create_output_block (LTO_section_asm);
2122
2123 /* Make string 0 be a NULL string. */
2124 streamer_write_char_stream (ob->string_stream, 0);
2125
2126 for (can = symtab->first_asm_symbol (); can; can = can->next)
2127 {
2128 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2129 streamer_write_hwi (ob, can->order);
2130 }
2131
2132 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2133
2134 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2135 lto_begin_section (section_name, !flag_wpa);
2136 free (section_name);
2137
2138 /* The entire header stream is computed here. */
2139 memset (&header, 0, sizeof (header));
2140
2141 /* Write the header. */
2142 header.major_version = LTO_major_version;
2143 header.minor_version = LTO_minor_version;
2144
2145 header.main_size = ob->main_stream->total_size;
2146 header.string_size = ob->string_stream->total_size;
2147 lto_write_data (&header, sizeof header);
2148
2149 /* Put all of the gimple and the string table out the asm file as a
2150 block of text. */
2151 lto_write_stream (ob->main_stream);
2152 lto_write_stream (ob->string_stream);
2153
2154 lto_end_section ();
2155
2156 destroy_output_block (ob);
2157 }
2158
2159
2160 /* Copy the function body or variable constructor of NODE without deserializing. */
2161
2162 static void
2163 copy_function_or_variable (struct symtab_node *node)
2164 {
2165 tree function = node->decl;
2166 struct lto_file_decl_data *file_data = node->lto_file_data;
2167 const char *data;
2168 size_t len;
2169 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2170 char *section_name =
2171 lto_get_section_name (LTO_section_function_body, name, NULL);
2172 size_t i, j;
2173 struct lto_in_decl_state *in_state;
2174 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2175
2176 lto_begin_section (section_name, !flag_wpa);
2177 free (section_name);
2178
2179 /* We may have renamed the declaration, e.g., a static function. */
2180 name = lto_get_decl_name_mapping (file_data, name);
2181
2182 data = lto_get_section_data (file_data, LTO_section_function_body,
2183 name, &len);
2184 gcc_assert (data);
2185
2186 /* Do a bit copy of the function body. */
2187 lto_write_data (data, len);
2188
2189 /* Copy decls. */
2190 in_state =
2191 lto_get_function_in_decl_state (node->lto_file_data, function);
2192 gcc_assert (in_state);
2193
2194 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2195 {
2196 size_t n = vec_safe_length (in_state->streams[i]);
2197 vec<tree, va_gc> *trees = in_state->streams[i];
2198 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2199
2200 /* The out state must have the same indices and the in state.
2201 So just copy the vector. All the encoders in the in state
2202 must be empty where we reach here. */
2203 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2204 encoder->trees.reserve_exact (n);
2205 for (j = 0; j < n; j++)
2206 encoder->trees.safe_push ((*trees)[j]);
2207 }
2208
2209 lto_free_section_data (file_data, LTO_section_function_body, name,
2210 data, len);
2211 lto_end_section ();
2212 }
2213
2214 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2215
2216 static tree
2217 wrap_refs (tree *tp, int *ws, void *)
2218 {
2219 tree t = *tp;
2220 if (handled_component_p (t)
2221 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2222 {
2223 tree decl = TREE_OPERAND (t, 0);
2224 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2225 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2226 build1 (ADDR_EXPR, ptrtype, decl),
2227 build_int_cst (ptrtype, 0));
2228 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2229 *ws = 0;
2230 }
2231 else if (TREE_CODE (t) == CONSTRUCTOR)
2232 ;
2233 else if (!EXPR_P (t))
2234 *ws = 0;
2235 return NULL_TREE;
2236 }
2237
2238 /* Main entry point from the pass manager. */
2239
2240 void
2241 lto_output (void)
2242 {
2243 struct lto_out_decl_state *decl_state;
2244 #ifdef ENABLE_CHECKING
2245 bitmap output = lto_bitmap_alloc ();
2246 #endif
2247 int i, n_nodes;
2248 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2249
2250 /* Initialize the streamer. */
2251 lto_streamer_init ();
2252
2253 n_nodes = lto_symtab_encoder_size (encoder);
2254 /* Process only the functions with bodies. */
2255 for (i = 0; i < n_nodes; i++)
2256 {
2257 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2258 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2259 {
2260 if (lto_symtab_encoder_encode_body_p (encoder, node)
2261 && !node->alias)
2262 {
2263 #ifdef ENABLE_CHECKING
2264 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2265 bitmap_set_bit (output, DECL_UID (node->decl));
2266 #endif
2267 decl_state = lto_new_out_decl_state ();
2268 lto_push_out_decl_state (decl_state);
2269 if (gimple_has_body_p (node->decl) || !flag_wpa
2270 /* Thunks have no body but they may be synthetized
2271 at WPA time. */
2272 || DECL_ARGUMENTS (node->decl))
2273 output_function (node);
2274 else
2275 copy_function_or_variable (node);
2276 gcc_assert (lto_get_out_decl_state () == decl_state);
2277 lto_pop_out_decl_state ();
2278 lto_record_function_out_decl_state (node->decl, decl_state);
2279 }
2280 }
2281 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2282 {
2283 /* Wrap symbol references inside the ctor in a type
2284 preserving MEM_REF. */
2285 tree ctor = DECL_INITIAL (node->decl);
2286 if (ctor && !in_lto_p)
2287 walk_tree (&ctor, wrap_refs, NULL, NULL);
2288 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2289 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2290 && !node->alias)
2291 {
2292 timevar_push (TV_IPA_LTO_CTORS_OUT);
2293 #ifdef ENABLE_CHECKING
2294 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2295 bitmap_set_bit (output, DECL_UID (node->decl));
2296 #endif
2297 decl_state = lto_new_out_decl_state ();
2298 lto_push_out_decl_state (decl_state);
2299 if (DECL_INITIAL (node->decl) != error_mark_node
2300 || !flag_wpa)
2301 output_constructor (node);
2302 else
2303 copy_function_or_variable (node);
2304 gcc_assert (lto_get_out_decl_state () == decl_state);
2305 lto_pop_out_decl_state ();
2306 lto_record_function_out_decl_state (node->decl, decl_state);
2307 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2308 }
2309 }
2310 }
2311
2312 /* Emit the callgraph after emitting function bodies. This needs to
2313 be done now to make sure that all the statements in every function
2314 have been renumbered so that edges can be associated with call
2315 statements using the statement UIDs. */
2316 output_symtab ();
2317
2318 output_offload_tables ();
2319
2320 #ifdef ENABLE_CHECKING
2321 lto_bitmap_free (output);
2322 #endif
2323 }
2324
2325 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2326 from it and required for correct representation of its semantics.
2327 Each node in ENCODER must be a global declaration or a type. A node
2328 is written only once, even if it appears multiple times in the
2329 vector. Certain transitively-reachable nodes, such as those
2330 representing expressions, may be duplicated, but such nodes
2331 must not appear in ENCODER itself. */
2332
2333 static void
2334 write_global_stream (struct output_block *ob,
2335 struct lto_tree_ref_encoder *encoder)
2336 {
2337 tree t;
2338 size_t index;
2339 const size_t size = lto_tree_ref_encoder_size (encoder);
2340
2341 for (index = 0; index < size; index++)
2342 {
2343 t = lto_tree_ref_encoder_get_tree (encoder, index);
2344 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2345 stream_write_tree (ob, t, false);
2346 }
2347 }
2348
2349
2350 /* Write a sequence of indices into the globals vector corresponding
2351 to the trees in ENCODER. These are used by the reader to map the
2352 indices used to refer to global entities within function bodies to
2353 their referents. */
2354
2355 static void
2356 write_global_references (struct output_block *ob,
2357 struct lto_tree_ref_encoder *encoder)
2358 {
2359 tree t;
2360 uint32_t index;
2361 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2362
2363 /* Write size and slot indexes as 32-bit unsigned numbers. */
2364 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2365 data[0] = size;
2366
2367 for (index = 0; index < size; index++)
2368 {
2369 uint32_t slot_num;
2370
2371 t = lto_tree_ref_encoder_get_tree (encoder, index);
2372 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2373 gcc_assert (slot_num != (unsigned)-1);
2374 data[index + 1] = slot_num;
2375 }
2376
2377 lto_write_data (data, sizeof (int32_t) * (size + 1));
2378 free (data);
2379 }
2380
2381
2382 /* Write all the streams in an lto_out_decl_state STATE using
2383 output block OB and output stream OUT_STREAM. */
2384
2385 void
2386 lto_output_decl_state_streams (struct output_block *ob,
2387 struct lto_out_decl_state *state)
2388 {
2389 int i;
2390
2391 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2392 write_global_stream (ob, &state->streams[i]);
2393 }
2394
2395
2396 /* Write all the references in an lto_out_decl_state STATE using
2397 output block OB and output stream OUT_STREAM. */
2398
2399 void
2400 lto_output_decl_state_refs (struct output_block *ob,
2401 struct lto_out_decl_state *state)
2402 {
2403 unsigned i;
2404 uint32_t ref;
2405 tree decl;
2406
2407 /* Write reference to FUNCTION_DECL. If there is not function,
2408 write reference to void_type_node. */
2409 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2410 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2411 gcc_assert (ref != (unsigned)-1);
2412 lto_write_data (&ref, sizeof (uint32_t));
2413
2414 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2415 write_global_references (ob, &state->streams[i]);
2416 }
2417
2418
2419 /* Return the written size of STATE. */
2420
2421 static size_t
2422 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2423 {
2424 int i;
2425 size_t size;
2426
2427 size = sizeof (int32_t); /* fn_ref. */
2428 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2429 {
2430 size += sizeof (int32_t); /* vector size. */
2431 size += (lto_tree_ref_encoder_size (&state->streams[i])
2432 * sizeof (int32_t));
2433 }
2434 return size;
2435 }
2436
2437
2438 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2439 so far. */
2440
2441 static void
2442 write_symbol (struct streamer_tree_cache_d *cache,
2443 tree t, hash_set<const char *> *seen, bool alias)
2444 {
2445 const char *name;
2446 enum gcc_plugin_symbol_kind kind;
2447 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2448 unsigned slot_num;
2449 uint64_t size;
2450 const char *comdat;
2451 unsigned char c;
2452
2453 /* None of the following kinds of symbols are needed in the
2454 symbol table. */
2455 if (!TREE_PUBLIC (t)
2456 || is_builtin_fn (t)
2457 || DECL_ABSTRACT_P (t)
2458 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2459 return;
2460 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2461
2462 gcc_assert (TREE_CODE (t) == VAR_DECL
2463 || TREE_CODE (t) == FUNCTION_DECL);
2464
2465 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2466
2467 /* This behaves like assemble_name_raw in varasm.c, performing the
2468 same name manipulations that ASM_OUTPUT_LABELREF does. */
2469 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2470
2471 if (seen->add (name))
2472 return;
2473
2474 streamer_tree_cache_lookup (cache, t, &slot_num);
2475 gcc_assert (slot_num != (unsigned)-1);
2476
2477 if (DECL_EXTERNAL (t))
2478 {
2479 if (DECL_WEAK (t))
2480 kind = GCCPK_WEAKUNDEF;
2481 else
2482 kind = GCCPK_UNDEF;
2483 }
2484 else
2485 {
2486 if (DECL_WEAK (t))
2487 kind = GCCPK_WEAKDEF;
2488 else if (DECL_COMMON (t))
2489 kind = GCCPK_COMMON;
2490 else
2491 kind = GCCPK_DEF;
2492
2493 /* When something is defined, it should have node attached. */
2494 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2495 || varpool_node::get (t)->definition);
2496 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2497 || (cgraph_node::get (t)
2498 && cgraph_node::get (t)->definition));
2499 }
2500
2501 /* Imitate what default_elf_asm_output_external do.
2502 When symbol is external, we need to output it with DEFAULT visibility
2503 when compiling with -fvisibility=default, while with HIDDEN visibility
2504 when symbol has attribute (visibility("hidden")) specified.
2505 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2506 right. */
2507
2508 if (DECL_EXTERNAL (t)
2509 && !targetm.binds_local_p (t))
2510 visibility = GCCPV_DEFAULT;
2511 else
2512 switch (DECL_VISIBILITY (t))
2513 {
2514 case VISIBILITY_DEFAULT:
2515 visibility = GCCPV_DEFAULT;
2516 break;
2517 case VISIBILITY_PROTECTED:
2518 visibility = GCCPV_PROTECTED;
2519 break;
2520 case VISIBILITY_HIDDEN:
2521 visibility = GCCPV_HIDDEN;
2522 break;
2523 case VISIBILITY_INTERNAL:
2524 visibility = GCCPV_INTERNAL;
2525 break;
2526 }
2527
2528 if (kind == GCCPK_COMMON
2529 && DECL_SIZE_UNIT (t)
2530 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2531 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2532 else
2533 size = 0;
2534
2535 if (DECL_ONE_ONLY (t))
2536 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2537 else
2538 comdat = "";
2539
2540 lto_write_data (name, strlen (name) + 1);
2541 lto_write_data (comdat, strlen (comdat) + 1);
2542 c = (unsigned char) kind;
2543 lto_write_data (&c, 1);
2544 c = (unsigned char) visibility;
2545 lto_write_data (&c, 1);
2546 lto_write_data (&size, 8);
2547 lto_write_data (&slot_num, 4);
2548 }
2549
2550 /* Return true if NODE should appear in the plugin symbol table. */
2551
2552 bool
2553 output_symbol_p (symtab_node *node)
2554 {
2555 struct cgraph_node *cnode;
2556 if (!node->real_symbol_p ())
2557 return false;
2558 /* We keep external functions in symtab for sake of inlining
2559 and devirtualization. We do not want to see them in symbol table as
2560 references unless they are really used. */
2561 cnode = dyn_cast <cgraph_node *> (node);
2562 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2563 && cnode->callers)
2564 return true;
2565
2566 /* Ignore all references from external vars initializers - they are not really
2567 part of the compilation unit until they are used by folding. Some symbols,
2568 like references to external construction vtables can not be referred to at all.
2569 We decide this at can_refer_decl_in_current_unit_p. */
2570 if (!node->definition || DECL_EXTERNAL (node->decl))
2571 {
2572 int i;
2573 struct ipa_ref *ref;
2574 for (i = 0; node->iterate_referring (i, ref); i++)
2575 {
2576 if (ref->use == IPA_REF_ALIAS)
2577 continue;
2578 if (is_a <cgraph_node *> (ref->referring))
2579 return true;
2580 if (!DECL_EXTERNAL (ref->referring->decl))
2581 return true;
2582 }
2583 return false;
2584 }
2585 return true;
2586 }
2587
2588
2589 /* Write an IL symbol table to OB.
2590 SET and VSET are cgraph/varpool node sets we are outputting. */
2591
2592 static void
2593 produce_symtab (struct output_block *ob)
2594 {
2595 struct streamer_tree_cache_d *cache = ob->writer_cache;
2596 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2597 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2598 lto_symtab_encoder_iterator lsei;
2599
2600 lto_begin_section (section_name, false);
2601 free (section_name);
2602
2603 hash_set<const char *> seen;
2604
2605 /* Write the symbol table.
2606 First write everything defined and then all declarations.
2607 This is necessary to handle cases where we have duplicated symbols. */
2608 for (lsei = lsei_start (encoder);
2609 !lsei_end_p (lsei); lsei_next (&lsei))
2610 {
2611 symtab_node *node = lsei_node (lsei);
2612
2613 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2614 continue;
2615 write_symbol (cache, node->decl, &seen, false);
2616 }
2617 for (lsei = lsei_start (encoder);
2618 !lsei_end_p (lsei); lsei_next (&lsei))
2619 {
2620 symtab_node *node = lsei_node (lsei);
2621
2622 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2623 continue;
2624 write_symbol (cache, node->decl, &seen, false);
2625 }
2626
2627 lto_end_section ();
2628 }
2629
2630
2631 /* This pass is run after all of the functions are serialized and all
2632 of the IPA passes have written their serialized forms. This pass
2633 causes the vector of all of the global decls and types used from
2634 this file to be written in to a section that can then be read in to
2635 recover these on other side. */
2636
2637 void
2638 produce_asm_for_decls (void)
2639 {
2640 struct lto_out_decl_state *out_state;
2641 struct lto_out_decl_state *fn_out_state;
2642 struct lto_decl_header header;
2643 char *section_name;
2644 struct output_block *ob;
2645 unsigned idx, num_fns;
2646 size_t decl_state_size;
2647 int32_t num_decl_states;
2648
2649 ob = create_output_block (LTO_section_decls);
2650
2651 memset (&header, 0, sizeof (struct lto_decl_header));
2652
2653 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2654 lto_begin_section (section_name, !flag_wpa);
2655 free (section_name);
2656
2657 /* Make string 0 be a NULL string. */
2658 streamer_write_char_stream (ob->string_stream, 0);
2659
2660 gcc_assert (!alias_pairs);
2661
2662 /* Get rid of the global decl state hash tables to save some memory. */
2663 out_state = lto_get_out_decl_state ();
2664 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2665 if (out_state->streams[i].tree_hash_table)
2666 {
2667 delete out_state->streams[i].tree_hash_table;
2668 out_state->streams[i].tree_hash_table = NULL;
2669 }
2670
2671 /* Write the global symbols. */
2672 lto_output_decl_state_streams (ob, out_state);
2673 num_fns = lto_function_decl_states.length ();
2674 for (idx = 0; idx < num_fns; idx++)
2675 {
2676 fn_out_state =
2677 lto_function_decl_states[idx];
2678 lto_output_decl_state_streams (ob, fn_out_state);
2679 }
2680
2681 header.major_version = LTO_major_version;
2682 header.minor_version = LTO_minor_version;
2683
2684 /* Currently not used. This field would allow us to preallocate
2685 the globals vector, so that it need not be resized as it is extended. */
2686 header.num_nodes = -1;
2687
2688 /* Compute the total size of all decl out states. */
2689 decl_state_size = sizeof (int32_t);
2690 decl_state_size += lto_out_decl_state_written_size (out_state);
2691 for (idx = 0; idx < num_fns; idx++)
2692 {
2693 fn_out_state =
2694 lto_function_decl_states[idx];
2695 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2696 }
2697 header.decl_state_size = decl_state_size;
2698
2699 header.main_size = ob->main_stream->total_size;
2700 header.string_size = ob->string_stream->total_size;
2701
2702 lto_write_data (&header, sizeof header);
2703
2704 /* Write the main out-decl state, followed by out-decl states of
2705 functions. */
2706 num_decl_states = num_fns + 1;
2707 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2708 lto_output_decl_state_refs (ob, out_state);
2709 for (idx = 0; idx < num_fns; idx++)
2710 {
2711 fn_out_state = lto_function_decl_states[idx];
2712 lto_output_decl_state_refs (ob, fn_out_state);
2713 }
2714
2715 lto_write_stream (ob->main_stream);
2716 lto_write_stream (ob->string_stream);
2717
2718 lto_end_section ();
2719
2720 /* Write the symbol table. It is used by linker to determine dependencies
2721 and thus we can skip it for WPA. */
2722 if (!flag_wpa)
2723 produce_symtab (ob);
2724
2725 /* Write command line opts. */
2726 lto_write_options ();
2727
2728 /* Deallocate memory and clean up. */
2729 for (idx = 0; idx < num_fns; idx++)
2730 {
2731 fn_out_state =
2732 lto_function_decl_states[idx];
2733 lto_delete_out_decl_state (fn_out_state);
2734 }
2735 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2736 lto_function_decl_states.release ();
2737 destroy_output_block (ob);
2738 }