add a hash_set based on hash_table
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-pass.h"
46 #include "function.h"
47 #include "diagnostic-core.h"
48 #include "inchash.h"
49 #include "except.h"
50 #include "lto-symtab.h"
51 #include "lto-streamer.h"
52 #include "data-streamer.h"
53 #include "gimple-streamer.h"
54 #include "tree-streamer.h"
55 #include "streamer-hooks.h"
56 #include "cfgloop.h"
57 #include "builtins.h"
58
59
60 static void lto_write_tree (struct output_block*, tree, bool);
61
62 /* Clear the line info stored in DATA_IN. */
63
64 static void
65 clear_line_info (struct output_block *ob)
66 {
67 ob->current_file = NULL;
68 ob->current_line = 0;
69 ob->current_col = 0;
70 }
71
72
73 /* Create the output block and return it. SECTION_TYPE is
74 LTO_section_function_body or LTO_static_initializer. */
75
76 struct output_block *
77 create_output_block (enum lto_section_type section_type)
78 {
79 struct output_block *ob = XCNEW (struct output_block);
80
81 ob->section_type = section_type;
82 ob->decl_state = lto_get_out_decl_state ();
83 ob->main_stream = XCNEW (struct lto_output_stream);
84 ob->string_stream = XCNEW (struct lto_output_stream);
85 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86
87 if (section_type == LTO_section_function_body)
88 ob->cfg_stream = XCNEW (struct lto_output_stream);
89
90 clear_line_info (ob);
91
92 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
93 gcc_obstack_init (&ob->obstack);
94
95 return ob;
96 }
97
98
99 /* Destroy the output block OB. */
100
101 void
102 destroy_output_block (struct output_block *ob)
103 {
104 enum lto_section_type section_type = ob->section_type;
105
106 delete ob->string_hash_table;
107 ob->string_hash_table = NULL;
108
109 free (ob->main_stream);
110 free (ob->string_stream);
111 if (section_type == LTO_section_function_body)
112 free (ob->cfg_stream);
113
114 streamer_tree_cache_delete (ob->writer_cache);
115 obstack_free (&ob->obstack, NULL);
116
117 free (ob);
118 }
119
120
121 /* Look up NODE in the type table and write the index for it to OB. */
122
123 static void
124 output_type_ref (struct output_block *ob, tree node)
125 {
126 streamer_write_record_start (ob, LTO_type_ref);
127 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
128 }
129
130
131 /* Return true if tree node T is written to various tables. For these
132 nodes, we sometimes want to write their phyiscal representation
133 (via lto_output_tree), and sometimes we need to emit an index
134 reference into a table (via lto_output_tree_ref). */
135
136 static bool
137 tree_is_indexable (tree t)
138 {
139 /* Parameters and return values of functions of variably modified types
140 must go to global stream, because they may be used in the type
141 definition. */
142 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
167 }
168
169
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
173
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
177 {
178 expanded_location xloc;
179
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
182 if (loc == UNKNOWN_LOCATION)
183 return;
184
185 xloc = expand_location (loc);
186
187 bp_pack_value (bp, ob->current_file != xloc.file, 1);
188 bp_pack_value (bp, ob->current_line != xloc.line, 1);
189 bp_pack_value (bp, ob->current_col != xloc.column, 1);
190
191 if (ob->current_file != xloc.file)
192 bp_pack_var_len_unsigned (bp,
193 streamer_string_index (ob, xloc.file,
194 strlen (xloc.file) + 1,
195 true));
196 ob->current_file = xloc.file;
197
198 if (ob->current_line != xloc.line)
199 bp_pack_var_len_unsigned (bp, xloc.line);
200 ob->current_line = xloc.line;
201
202 if (ob->current_col != xloc.column)
203 bp_pack_var_len_unsigned (bp, xloc.column);
204 ob->current_col = xloc.column;
205 }
206
207
208 /* If EXPR is an indexable tree node, output a reference to it to
209 output block OB. Otherwise, output the physical representation of
210 EXPR to OB. */
211
212 static void
213 lto_output_tree_ref (struct output_block *ob, tree expr)
214 {
215 enum tree_code code;
216
217 if (TYPE_P (expr))
218 {
219 output_type_ref (ob, expr);
220 return;
221 }
222
223 code = TREE_CODE (expr);
224 switch (code)
225 {
226 case SSA_NAME:
227 streamer_write_record_start (ob, LTO_ssa_name_ref);
228 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
229 break;
230
231 case FIELD_DECL:
232 streamer_write_record_start (ob, LTO_field_decl_ref);
233 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case FUNCTION_DECL:
237 streamer_write_record_start (ob, LTO_function_decl_ref);
238 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
239 break;
240
241 case VAR_DECL:
242 case DEBUG_EXPR_DECL:
243 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
244 case PARM_DECL:
245 streamer_write_record_start (ob, LTO_global_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case CONST_DECL:
250 streamer_write_record_start (ob, LTO_const_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case IMPORTED_DECL:
255 gcc_assert (decl_function_context (expr) == NULL);
256 streamer_write_record_start (ob, LTO_imported_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case TYPE_DECL:
261 streamer_write_record_start (ob, LTO_type_decl_ref);
262 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMELIST_DECL:
266 streamer_write_record_start (ob, LTO_namelist_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case NAMESPACE_DECL:
271 streamer_write_record_start (ob, LTO_namespace_decl_ref);
272 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case LABEL_DECL:
276 streamer_write_record_start (ob, LTO_label_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case RESULT_DECL:
281 streamer_write_record_start (ob, LTO_result_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case TRANSLATION_UNIT_DECL:
286 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 default:
291 /* No other node is indexable, so it should have been handled by
292 lto_output_tree. */
293 gcc_unreachable ();
294 }
295 }
296
297
298 /* Return true if EXPR is a tree node that can be written to disk. */
299
300 static inline bool
301 lto_is_streamable (tree expr)
302 {
303 enum tree_code code = TREE_CODE (expr);
304
305 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
306 name version in lto_output_tree_ref (see output_ssa_names). */
307 return !is_lang_specific (expr)
308 && code != SSA_NAME
309 && code != CALL_EXPR
310 && code != LANG_TYPE
311 && code != MODIFY_EXPR
312 && code != INIT_EXPR
313 && code != TARGET_EXPR
314 && code != BIND_EXPR
315 && code != WITH_CLEANUP_EXPR
316 && code != STATEMENT_LIST
317 && (code == CASE_LABEL_EXPR
318 || code == DECL_EXPR
319 || TREE_CODE_CLASS (code) != tcc_statement);
320 }
321
322
323 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
324
325 static tree
326 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
327 {
328 gcc_checking_assert (DECL_P (expr)
329 && TREE_CODE (expr) != FUNCTION_DECL
330 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
331
332 /* Handle DECL_INITIAL for symbols. */
333 tree initial = DECL_INITIAL (expr);
334 if (TREE_CODE (expr) == VAR_DECL
335 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
336 && !DECL_IN_CONSTANT_POOL (expr)
337 && initial)
338 {
339 varpool_node *vnode;
340 /* Extra section needs about 30 bytes; do not produce it for simple
341 scalar values. */
342 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
343 || !(vnode = varpool_node::get (expr))
344 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
345 initial = error_mark_node;
346 }
347
348 return initial;
349 }
350
351
352 /* Write a physical representation of tree node EXPR to output block
353 OB. If REF_P is true, the leaves of EXPR are emitted as references
354 via lto_output_tree_ref. IX is the index into the streamer cache
355 where EXPR is stored. */
356
357 static void
358 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
359 {
360 /* Pack all the non-pointer fields in EXPR into a bitpack and write
361 the resulting bitpack. */
362 bitpack_d bp = bitpack_create (ob->main_stream);
363 streamer_pack_tree_bitfields (ob, &bp, expr);
364 streamer_write_bitpack (&bp);
365
366 /* Write all the pointer fields in EXPR. */
367 streamer_write_tree_body (ob, expr, ref_p);
368
369 /* Write any LTO-specific data to OB. */
370 if (DECL_P (expr)
371 && TREE_CODE (expr) != FUNCTION_DECL
372 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
373 {
374 /* Handle DECL_INITIAL for symbols. */
375 tree initial = get_symbol_initial_value
376 (ob->decl_state->symtab_node_encoder, expr);
377 stream_write_tree (ob, initial, ref_p);
378 }
379 }
380
381 /* Write a physical representation of tree node EXPR to output block
382 OB. If REF_P is true, the leaves of EXPR are emitted as references
383 via lto_output_tree_ref. IX is the index into the streamer cache
384 where EXPR is stored. */
385
386 static void
387 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
388 {
389 if (!lto_is_streamable (expr))
390 internal_error ("tree code %qs is not supported in LTO streams",
391 get_tree_code_name (TREE_CODE (expr)));
392
393 /* Write the header, containing everything needed to materialize
394 EXPR on the reading side. */
395 streamer_write_tree_header (ob, expr);
396
397 lto_write_tree_1 (ob, expr, ref_p);
398
399 /* Mark the end of EXPR. */
400 streamer_write_zero (ob);
401 }
402
403 /* Emit the physical representation of tree node EXPR to output block
404 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
405 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
406
407 static void
408 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
409 bool ref_p, bool this_ref_p)
410 {
411 unsigned ix;
412
413 gcc_checking_assert (expr != NULL_TREE
414 && !(this_ref_p && tree_is_indexable (expr)));
415
416 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
417 expr, hash, &ix);
418 gcc_assert (!exists_p);
419 if (streamer_handle_as_builtin_p (expr))
420 {
421 /* MD and NORMAL builtins do not need to be written out
422 completely as they are always instantiated by the
423 compiler on startup. The only builtins that need to
424 be written out are BUILT_IN_FRONTEND. For all other
425 builtins, we simply write the class and code. */
426 streamer_write_builtin (ob, expr);
427 }
428 else if (TREE_CODE (expr) == INTEGER_CST
429 && !TREE_OVERFLOW (expr))
430 {
431 /* Shared INTEGER_CST nodes are special because they need their
432 original type to be materialized by the reader (to implement
433 TYPE_CACHED_VALUES). */
434 streamer_write_integer_cst (ob, expr, ref_p);
435 }
436 else
437 {
438 /* This is the first time we see EXPR, write its fields
439 to OB. */
440 lto_write_tree (ob, expr, ref_p);
441 }
442 }
443
444 class DFS
445 {
446 public:
447 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
448 bool single_p);
449 ~DFS ();
450
451 struct scc_entry
452 {
453 tree t;
454 hashval_t hash;
455 };
456 vec<scc_entry> sccstack;
457
458 private:
459 struct sccs
460 {
461 unsigned int dfsnum;
462 unsigned int low;
463 };
464
465 static int scc_entry_compare (const void *, const void *);
466
467 void DFS_write_tree_body (struct output_block *ob,
468 tree expr, sccs *expr_state, bool ref_p,
469 bool single_p);
470
471 void DFS_write_tree (struct output_block *ob, sccs *from_state,
472 tree expr, bool ref_p, bool this_ref_p,
473 bool single_p);
474 hashval_t
475 hash_scc (struct output_block *ob, unsigned first, unsigned size);
476
477 unsigned int next_dfs_num;
478 struct pointer_map_t *sccstate;
479 struct obstack sccstate_obstack;
480 };
481
482 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
483 bool single_p)
484 {
485 sccstack.create (0);
486 sccstate = pointer_map_create ();
487 gcc_obstack_init (&sccstate_obstack);
488 next_dfs_num = 1;
489 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
490 }
491
492 DFS::~DFS ()
493 {
494 sccstack.release ();
495 pointer_map_destroy (sccstate);
496 obstack_free (&sccstate_obstack, NULL);
497 }
498
499 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
500 DFS recurse for all tree edges originating from it. */
501
502 void
503 DFS::DFS_write_tree_body (struct output_block *ob,
504 tree expr, sccs *expr_state, bool ref_p,
505 bool single_p)
506 {
507 #define DFS_follow_tree_edge(DEST) \
508 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
509
510 enum tree_code code;
511
512 code = TREE_CODE (expr);
513
514 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
515 {
516 if (TREE_CODE (expr) != IDENTIFIER_NODE)
517 DFS_follow_tree_edge (TREE_TYPE (expr));
518 }
519
520 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
521 {
522 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
523 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
524 }
525
526 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
527 {
528 DFS_follow_tree_edge (TREE_REALPART (expr));
529 DFS_follow_tree_edge (TREE_IMAGPART (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
533 {
534 /* Drop names that were created for anonymous entities. */
535 if (DECL_NAME (expr)
536 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
537 && ANON_AGGRNAME_P (DECL_NAME (expr)))
538 ;
539 else
540 DFS_follow_tree_edge (DECL_NAME (expr));
541 DFS_follow_tree_edge (DECL_CONTEXT (expr));
542 }
543
544 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
545 {
546 DFS_follow_tree_edge (DECL_SIZE (expr));
547 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
548
549 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
550 special handling in LTO, it must be handled by streamer hooks. */
551
552 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
553
554 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
555 for early inlining so drop it on the floor instead of ICEing in
556 dwarf2out.c. */
557
558 if ((TREE_CODE (expr) == VAR_DECL
559 || TREE_CODE (expr) == PARM_DECL)
560 && DECL_HAS_VALUE_EXPR_P (expr))
561 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
562 if (TREE_CODE (expr) == VAR_DECL)
563 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
564 }
565
566 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
567 {
568 if (TREE_CODE (expr) == TYPE_DECL)
569 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
570 }
571
572 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
573 {
574 /* Make sure we don't inadvertently set the assembler name. */
575 if (DECL_ASSEMBLER_NAME_SET_P (expr))
576 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
577 }
578
579 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
580 {
581 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
582 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
583 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
584 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
585 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
586 }
587
588 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
589 {
590 DFS_follow_tree_edge (DECL_VINDEX (expr));
591 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
592 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
593 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
594 }
595
596 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
597 {
598 DFS_follow_tree_edge (TYPE_SIZE (expr));
599 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
600 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
601 DFS_follow_tree_edge (TYPE_NAME (expr));
602 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
603 reconstructed during fixup. */
604 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
605 during fixup. */
606 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
607 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
608 /* TYPE_CANONICAL is re-computed during type merging, so no need
609 to follow it here. */
610 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
611 }
612
613 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
614 {
615 if (TREE_CODE (expr) == ENUMERAL_TYPE)
616 DFS_follow_tree_edge (TYPE_VALUES (expr));
617 else if (TREE_CODE (expr) == ARRAY_TYPE)
618 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
619 else if (RECORD_OR_UNION_TYPE_P (expr))
620 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
621 DFS_follow_tree_edge (t);
622 else if (TREE_CODE (expr) == FUNCTION_TYPE
623 || TREE_CODE (expr) == METHOD_TYPE)
624 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
625
626 if (!POINTER_TYPE_P (expr))
627 DFS_follow_tree_edge (TYPE_MINVAL (expr));
628 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
629 if (RECORD_OR_UNION_TYPE_P (expr))
630 DFS_follow_tree_edge (TYPE_BINFO (expr));
631 }
632
633 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
634 {
635 DFS_follow_tree_edge (TREE_PURPOSE (expr));
636 DFS_follow_tree_edge (TREE_VALUE (expr));
637 DFS_follow_tree_edge (TREE_CHAIN (expr));
638 }
639
640 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
641 {
642 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
643 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
644 }
645
646 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
647 {
648 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
649 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
650 DFS_follow_tree_edge (TREE_BLOCK (expr));
651 }
652
653 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
654 {
655 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
656 /* ??? FIXME. See also streamer_write_chain. */
657 if (!(VAR_OR_FUNCTION_DECL_P (t)
658 && DECL_EXTERNAL (t)))
659 DFS_follow_tree_edge (t);
660
661 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
662
663 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
664 handle - those that represent inlined function scopes.
665 For the drop rest them on the floor instead of ICEing
666 in dwarf2out.c. */
667 if (inlined_function_outer_scope_p (expr))
668 {
669 tree ultimate_origin = block_ultimate_origin (expr);
670 DFS_follow_tree_edge (ultimate_origin);
671 }
672 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
673 information for early inlined BLOCKs so drop it on the floor instead
674 of ICEing in dwarf2out.c. */
675
676 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
677 streaming time. */
678
679 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
680 list is re-constructed from BLOCK_SUPERCONTEXT. */
681 }
682
683 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
684 {
685 unsigned i;
686 tree t;
687
688 /* Note that the number of BINFO slots has already been emitted in
689 EXPR's header (see streamer_write_tree_header) because this length
690 is needed to build the empty BINFO node on the reader side. */
691 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
692 DFS_follow_tree_edge (t);
693 DFS_follow_tree_edge (BINFO_OFFSET (expr));
694 DFS_follow_tree_edge (BINFO_VTABLE (expr));
695 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
696
697 /* The number of BINFO_BASE_ACCESSES has already been emitted in
698 EXPR's bitfield section. */
699 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
700 DFS_follow_tree_edge (t);
701
702 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
703 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
704 }
705
706 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
707 {
708 unsigned i;
709 tree index, value;
710
711 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
712 {
713 DFS_follow_tree_edge (index);
714 DFS_follow_tree_edge (value);
715 }
716 }
717
718 if (code == OMP_CLAUSE)
719 {
720 int i;
721 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
722 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
723 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
724 }
725
726 #undef DFS_follow_tree_edge
727 }
728
729 /* Return a hash value for the tree T.
730 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
731 may hold hash values if trees inside current SCC. */
732
733 static hashval_t
734 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
735 {
736 inchash::hash hstate;
737
738 #define visit(SIBLING) \
739 do { \
740 unsigned ix; \
741 if (!SIBLING) \
742 hstate.add_int (0); \
743 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
744 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
745 else if (map) \
746 hstate.add_int (*map->get (SIBLING)); \
747 else \
748 hstate.add_int (1); \
749 } while (0)
750
751 /* Hash TS_BASE. */
752 enum tree_code code = TREE_CODE (t);
753 hstate.add_int (code);
754 if (!TYPE_P (t))
755 {
756 hstate.add_flag (TREE_SIDE_EFFECTS (t));
757 hstate.add_flag (TREE_CONSTANT (t));
758 hstate.add_flag (TREE_READONLY (t));
759 hstate.add_flag (TREE_PUBLIC (t));
760 }
761 hstate.add_flag (TREE_ADDRESSABLE (t));
762 hstate.add_flag (TREE_THIS_VOLATILE (t));
763 if (DECL_P (t))
764 hstate.add_flag (DECL_UNSIGNED (t));
765 else if (TYPE_P (t))
766 hstate.add_flag (TYPE_UNSIGNED (t));
767 if (TYPE_P (t))
768 hstate.add_flag (TYPE_ARTIFICIAL (t));
769 else
770 hstate.add_flag (TREE_NO_WARNING (t));
771 hstate.add_flag (TREE_NOTHROW (t));
772 hstate.add_flag (TREE_STATIC (t));
773 hstate.add_flag (TREE_PROTECTED (t));
774 hstate.add_flag (TREE_DEPRECATED (t));
775 if (code != TREE_BINFO)
776 hstate.add_flag (TREE_PRIVATE (t));
777 if (TYPE_P (t))
778 {
779 hstate.add_flag (TYPE_SATURATING (t));
780 hstate.add_flag (TYPE_ADDR_SPACE (t));
781 }
782 else if (code == SSA_NAME)
783 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
784 hstate.commit_flag ();
785
786 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
787 {
788 int i;
789 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
790 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
791 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
792 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
793 }
794
795 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
796 {
797 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
798 hstate.add_flag (r.cl);
799 hstate.add_flag (r.sign);
800 hstate.add_flag (r.signalling);
801 hstate.add_flag (r.canonical);
802 hstate.commit_flag ();
803 hstate.add_int (r.uexp);
804 hstate.add (r.sig, sizeof (r.sig));
805 }
806
807 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
808 {
809 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
810 hstate.add_int (f.mode);
811 hstate.add_int (f.data.low);
812 hstate.add_int (f.data.high);
813 }
814
815 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
816 {
817 hstate.add_wide_int (DECL_MODE (t));
818 hstate.add_flag (DECL_NONLOCAL (t));
819 hstate.add_flag (DECL_VIRTUAL_P (t));
820 hstate.add_flag (DECL_IGNORED_P (t));
821 hstate.add_flag (DECL_ABSTRACT (t));
822 hstate.add_flag (DECL_ARTIFICIAL (t));
823 hstate.add_flag (DECL_USER_ALIGN (t));
824 hstate.add_flag (DECL_PRESERVE_P (t));
825 hstate.add_flag (DECL_EXTERNAL (t));
826 hstate.add_flag (DECL_GIMPLE_REG_P (t));
827 hstate.commit_flag ();
828 hstate.add_int (DECL_ALIGN (t));
829 if (code == LABEL_DECL)
830 {
831 hstate.add_int (EH_LANDING_PAD_NR (t));
832 hstate.add_int (LABEL_DECL_UID (t));
833 }
834 else if (code == FIELD_DECL)
835 {
836 hstate.add_flag (DECL_PACKED (t));
837 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
838 hstate.add_int (DECL_OFFSET_ALIGN (t));
839 }
840 else if (code == VAR_DECL)
841 {
842 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
843 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
844 }
845 if (code == RESULT_DECL
846 || code == PARM_DECL
847 || code == VAR_DECL)
848 {
849 hstate.add_flag (DECL_BY_REFERENCE (t));
850 if (code == VAR_DECL
851 || code == PARM_DECL)
852 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
853 }
854 hstate.commit_flag ();
855 }
856
857 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
858 hstate.add_int (DECL_REGISTER (t));
859
860 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
861 {
862 hstate.add_flag (DECL_COMMON (t));
863 hstate.add_flag (DECL_DLLIMPORT_P (t));
864 hstate.add_flag (DECL_WEAK (t));
865 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
866 hstate.add_flag (DECL_COMDAT (t));
867 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
868 hstate.add_int (DECL_VISIBILITY (t));
869 if (code == VAR_DECL)
870 {
871 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
872 hstate.add_flag (DECL_HARD_REGISTER (t));
873 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
874 }
875 if (TREE_CODE (t) == FUNCTION_DECL)
876 {
877 hstate.add_flag (DECL_FINAL_P (t));
878 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
879 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
880 }
881 hstate.commit_flag ();
882 }
883
884 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
885 {
886 hstate.add_int (DECL_BUILT_IN_CLASS (t));
887 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
888 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
889 hstate.add_flag (DECL_UNINLINABLE (t));
890 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
891 hstate.add_flag (DECL_IS_NOVOPS (t));
892 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
893 hstate.add_flag (DECL_IS_MALLOC (t));
894 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
895 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
896 hstate.add_flag (DECL_STATIC_CHAIN (t));
897 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
898 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
899 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
900 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
901 hstate.add_flag (DECL_PURE_P (t));
902 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
903 hstate.commit_flag ();
904 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
905 hstate.add_int (DECL_FUNCTION_CODE (t));
906 }
907
908 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
909 {
910 hstate.add_wide_int (TYPE_MODE (t));
911 hstate.add_flag (TYPE_STRING_FLAG (t));
912 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
913 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
914 hstate.add_flag (TYPE_PACKED (t));
915 hstate.add_flag (TYPE_RESTRICT (t));
916 hstate.add_flag (TYPE_USER_ALIGN (t));
917 hstate.add_flag (TYPE_READONLY (t));
918 if (RECORD_OR_UNION_TYPE_P (t))
919 {
920 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
921 hstate.add_flag (TYPE_FINAL_P (t));
922 }
923 else if (code == ARRAY_TYPE)
924 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
925 hstate.commit_flag ();
926 hstate.add_int (TYPE_PRECISION (t));
927 hstate.add_int (TYPE_ALIGN (t));
928 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
929 || (!in_lto_p
930 && get_alias_set (t) == 0))
931 ? 0 : -1);
932 }
933
934 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
935 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
936 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
937
938 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
939 gcc_unreachable ();
940
941 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
942 hstate.add (t, sizeof (struct cl_optimization));
943
944 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
945 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
946
947 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
948 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
949
950 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
951 {
952 if (code != IDENTIFIER_NODE)
953 visit (TREE_TYPE (t));
954 }
955
956 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
957 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
958 visit (VECTOR_CST_ELT (t, i));
959
960 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
961 {
962 visit (TREE_REALPART (t));
963 visit (TREE_IMAGPART (t));
964 }
965
966 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
967 {
968 /* Drop names that were created for anonymous entities. */
969 if (DECL_NAME (t)
970 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
971 && ANON_AGGRNAME_P (DECL_NAME (t)))
972 ;
973 else
974 visit (DECL_NAME (t));
975 if (DECL_FILE_SCOPE_P (t))
976 ;
977 else
978 visit (DECL_CONTEXT (t));
979 }
980
981 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
982 {
983 visit (DECL_SIZE (t));
984 visit (DECL_SIZE_UNIT (t));
985 visit (DECL_ATTRIBUTES (t));
986 if ((code == VAR_DECL
987 || code == PARM_DECL)
988 && DECL_HAS_VALUE_EXPR_P (t))
989 visit (DECL_VALUE_EXPR (t));
990 if (code == VAR_DECL
991 && DECL_HAS_DEBUG_EXPR_P (t))
992 visit (DECL_DEBUG_EXPR (t));
993 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
994 be able to call get_symbol_initial_value. */
995 }
996
997 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
998 {
999 if (code == TYPE_DECL)
1000 visit (DECL_ORIGINAL_TYPE (t));
1001 }
1002
1003 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1004 {
1005 if (DECL_ASSEMBLER_NAME_SET_P (t))
1006 visit (DECL_ASSEMBLER_NAME (t));
1007 }
1008
1009 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1010 {
1011 visit (DECL_FIELD_OFFSET (t));
1012 visit (DECL_BIT_FIELD_TYPE (t));
1013 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1014 visit (DECL_FIELD_BIT_OFFSET (t));
1015 visit (DECL_FCONTEXT (t));
1016 }
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1019 {
1020 visit (DECL_VINDEX (t));
1021 visit (DECL_FUNCTION_PERSONALITY (t));
1022 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
1023 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1024 }
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1027 {
1028 visit (TYPE_SIZE (t));
1029 visit (TYPE_SIZE_UNIT (t));
1030 visit (TYPE_ATTRIBUTES (t));
1031 visit (TYPE_NAME (t));
1032 visit (TYPE_MAIN_VARIANT (t));
1033 if (TYPE_FILE_SCOPE_P (t))
1034 ;
1035 else
1036 visit (TYPE_CONTEXT (t));
1037 visit (TYPE_STUB_DECL (t));
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1041 {
1042 if (code == ENUMERAL_TYPE)
1043 visit (TYPE_VALUES (t));
1044 else if (code == ARRAY_TYPE)
1045 visit (TYPE_DOMAIN (t));
1046 else if (RECORD_OR_UNION_TYPE_P (t))
1047 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1048 visit (f);
1049 else if (code == FUNCTION_TYPE
1050 || code == METHOD_TYPE)
1051 visit (TYPE_ARG_TYPES (t));
1052 if (!POINTER_TYPE_P (t))
1053 visit (TYPE_MINVAL (t));
1054 visit (TYPE_MAXVAL (t));
1055 if (RECORD_OR_UNION_TYPE_P (t))
1056 visit (TYPE_BINFO (t));
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1060 {
1061 visit (TREE_PURPOSE (t));
1062 visit (TREE_VALUE (t));
1063 visit (TREE_CHAIN (t));
1064 }
1065
1066 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1067 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1068 visit (TREE_VEC_ELT (t, i));
1069
1070 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1071 {
1072 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1073 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1074 visit (TREE_OPERAND (t, i));
1075 }
1076
1077 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1078 {
1079 unsigned i;
1080 tree b;
1081 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1082 visit (b);
1083 visit (BINFO_OFFSET (t));
1084 visit (BINFO_VTABLE (t));
1085 visit (BINFO_VPTR_FIELD (t));
1086 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1087 visit (b);
1088 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1089 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1090 }
1091
1092 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1093 {
1094 unsigned i;
1095 tree index, value;
1096 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1097 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1098 {
1099 visit (index);
1100 visit (value);
1101 }
1102 }
1103
1104 if (code == OMP_CLAUSE)
1105 {
1106 int i;
1107 HOST_WIDE_INT val;
1108
1109 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1110 switch (OMP_CLAUSE_CODE (t))
1111 {
1112 case OMP_CLAUSE_DEFAULT:
1113 val = OMP_CLAUSE_DEFAULT_KIND (t);
1114 break;
1115 case OMP_CLAUSE_SCHEDULE:
1116 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1117 break;
1118 case OMP_CLAUSE_DEPEND:
1119 val = OMP_CLAUSE_DEPEND_KIND (t);
1120 break;
1121 case OMP_CLAUSE_MAP:
1122 val = OMP_CLAUSE_MAP_KIND (t);
1123 break;
1124 case OMP_CLAUSE_PROC_BIND:
1125 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1126 break;
1127 case OMP_CLAUSE_REDUCTION:
1128 val = OMP_CLAUSE_REDUCTION_CODE (t);
1129 break;
1130 default:
1131 val = 0;
1132 break;
1133 }
1134 hstate.add_wide_int (val);
1135 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1136 visit (OMP_CLAUSE_OPERAND (t, i));
1137 visit (OMP_CLAUSE_CHAIN (t));
1138 }
1139
1140 return hstate.end ();
1141
1142 #undef visit
1143 }
1144
1145 /* Compare two SCC entries by their hash value for qsorting them. */
1146
1147 int
1148 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1149 {
1150 const scc_entry *p1 = (const scc_entry *) p1_;
1151 const scc_entry *p2 = (const scc_entry *) p2_;
1152 if (p1->hash < p2->hash)
1153 return -1;
1154 else if (p1->hash > p2->hash)
1155 return 1;
1156 return 0;
1157 }
1158
1159 /* Return a hash value for the SCC on the SCC stack from FIRST with
1160 size SIZE. */
1161
1162 hashval_t
1163 DFS::hash_scc (struct output_block *ob,
1164 unsigned first, unsigned size)
1165 {
1166 unsigned int last_classes = 0, iterations = 0;
1167
1168 /* Compute hash values for the SCC members. */
1169 for (unsigned i = 0; i < size; ++i)
1170 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1171 sccstack[first+i].t);
1172
1173 if (size == 1)
1174 return sccstack[first].hash;
1175
1176 /* We aim to get unique hash for every tree within SCC and compute hash value
1177 of the whole SCC by combing all values together in an stable (entry point
1178 independent) order. This guarantees that the same SCC regions within
1179 different translation units will get the same hash values and therefore
1180 will be merged at WPA time.
1181
1182 Often the hashes are already unique. In that case we compute scc hash
1183 by combining individual hash values in an increasing order.
1184
1185 If thre are duplicates we seek at least one tree with unique hash (and
1186 pick one with minimal hash and this property). Then we obtain stable
1187 order by DFS walk starting from this unique tree and then use index
1188 within this order to make individual hash values unique.
1189
1190 If there is no tree with unique hash, we iteratively propagate the hash
1191 values across the internal edges of SCC. This usually quickly leads
1192 to unique hashes. Consider, for example, an SCC containing two pointers
1193 that are identical except for type they point and assume that these
1194 types are also part of the SCC.
1195 The propagation will add the points-to type information into their hash
1196 values. */
1197 do
1198 {
1199 /* Sort the SCC so we can easily see check for uniqueness. */
1200 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1201
1202 unsigned int classes = 1;
1203 int firstunique = -1;
1204
1205 /* Find tree with lowest unique hash (if it exists) and compute
1206 number of equivalence classes. */
1207 if (sccstack[first].hash != sccstack[first+1].hash)
1208 firstunique = 0;
1209 for (unsigned i = 1; i < size; ++i)
1210 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1211 {
1212 classes++;
1213 if (firstunique == -1
1214 && (i == size - 1
1215 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1216 firstunique = i;
1217 }
1218
1219 /* If we found tree with unique hash; stop the iteration. */
1220 if (firstunique != -1
1221 /* Also terminate if we run out of iterations or if the number of
1222 equivalence classes is no longer increasing.
1223 For example a cyclic list of trees that are all equivalent will
1224 never have unique entry point; we however do not build such SCCs
1225 in our IL. */
1226 || classes <= last_classes || iterations > 16)
1227 {
1228 hashval_t scc_hash;
1229
1230 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1231 starting from FIRSTUNIQUE to obstain stable order. */
1232 if (classes != size && firstunique != -1)
1233 {
1234 hash_map <tree, hashval_t> map(size*2);
1235
1236 /* Store hash values into a map, so we can associate them with
1237 reordered SCC. */
1238 for (unsigned i = 0; i < size; ++i)
1239 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1240
1241 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1242 gcc_assert (again.sccstack.length () == size);
1243
1244 memcpy (sccstack.address () + first,
1245 again.sccstack.address (),
1246 sizeof (scc_entry) * size);
1247
1248 /* Update hash values of individual members by hashing in the
1249 index within the stable order. This ensures uniqueness.
1250 Also compute the scc_hash by mixing in all hash values in the
1251 stable order we obtained. */
1252 sccstack[first].hash = *map.get (sccstack[first].t);
1253 scc_hash = sccstack[first].hash;
1254 for (unsigned i = 1; i < size; ++i)
1255 {
1256 sccstack[first+i].hash
1257 = iterative_hash_hashval_t (i,
1258 *map.get (sccstack[first+i].t));
1259 scc_hash = iterative_hash_hashval_t (scc_hash,
1260 sccstack[first+i].hash);
1261 }
1262 }
1263 /* If we got unique hash values for each tree, then sort already
1264 ensured entry point independent order. Only compute the final
1265 scc hash.
1266
1267 If we failed to find the unique entry point, we go by the same
1268 route. We will eventually introduce unwanted hash conflicts. */
1269 else
1270 {
1271 scc_hash = sccstack[first].hash;
1272 for (unsigned i = 1; i < size; ++i)
1273 scc_hash = iterative_hash_hashval_t (scc_hash,
1274 sccstack[first+i].hash);
1275 /* We can not 100% guarantee that the hash will not conflict in
1276 in a way so the unique hash is not found. This however
1277 should be extremely rare situation. ICE for now so possible
1278 issues are found and evaulated. */
1279 gcc_checking_assert (classes == size);
1280 }
1281
1282 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1283 hash into the hash of each of the elements. */
1284 for (unsigned i = 0; i < size; ++i)
1285 sccstack[first+i].hash
1286 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1287 return scc_hash;
1288 }
1289
1290 last_classes = classes;
1291 iterations++;
1292
1293 /* We failed to identify the entry point; propagate hash values across
1294 the edges. */
1295 {
1296 hash_map <tree, hashval_t> map(size*2);
1297 for (unsigned i = 0; i < size; ++i)
1298 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1299
1300 for (unsigned i = 0; i < size; i++)
1301 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1302 sccstack[first+i].t);
1303 }
1304 }
1305 while (true);
1306 }
1307
1308 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1309 already in the streamer cache. Main routine called for
1310 each visit of EXPR. */
1311
1312 void
1313 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1314 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1315 {
1316 unsigned ix;
1317 sccs **slot;
1318
1319 /* Handle special cases. */
1320 if (expr == NULL_TREE)
1321 return;
1322
1323 /* Do not DFS walk into indexable trees. */
1324 if (this_ref_p && tree_is_indexable (expr))
1325 return;
1326
1327 /* Check if we already streamed EXPR. */
1328 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1329 return;
1330
1331 slot = (sccs **)pointer_map_insert (sccstate, expr);
1332 sccs *cstate = *slot;
1333 if (!cstate)
1334 {
1335 scc_entry e = { expr, 0 };
1336 /* Not yet visited. DFS recurse and push it onto the stack. */
1337 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1338 sccstack.safe_push (e);
1339 cstate->dfsnum = next_dfs_num++;
1340 cstate->low = cstate->dfsnum;
1341
1342 if (streamer_handle_as_builtin_p (expr))
1343 ;
1344 else if (TREE_CODE (expr) == INTEGER_CST
1345 && !TREE_OVERFLOW (expr))
1346 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1347 else
1348 {
1349 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1350
1351 /* Walk any LTO-specific edges. */
1352 if (DECL_P (expr)
1353 && TREE_CODE (expr) != FUNCTION_DECL
1354 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1355 {
1356 /* Handle DECL_INITIAL for symbols. */
1357 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1358 expr);
1359 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1360 }
1361 }
1362
1363 /* See if we found an SCC. */
1364 if (cstate->low == cstate->dfsnum)
1365 {
1366 unsigned first, size;
1367 tree x;
1368
1369 /* If we are re-walking a single leaf-SCC just return and
1370 let the caller access the sccstack. */
1371 if (single_p)
1372 return;
1373
1374 /* Pop the SCC and compute its size. */
1375 first = sccstack.length ();
1376 do
1377 {
1378 x = sccstack[--first].t;
1379 }
1380 while (x != expr);
1381 size = sccstack.length () - first;
1382
1383 /* No need to compute hashes for LTRANS units, we don't perform
1384 any merging there. */
1385 hashval_t scc_hash = 0;
1386 unsigned scc_entry_len = 0;
1387 if (!flag_wpa)
1388 {
1389 scc_hash = hash_scc (ob, first, size);
1390
1391 /* Put the entries with the least number of collisions first. */
1392 unsigned entry_start = 0;
1393 scc_entry_len = size + 1;
1394 for (unsigned i = 0; i < size;)
1395 {
1396 unsigned from = i;
1397 for (i = i + 1; i < size
1398 && (sccstack[first + i].hash
1399 == sccstack[first + from].hash); ++i)
1400 ;
1401 if (i - from < scc_entry_len)
1402 {
1403 scc_entry_len = i - from;
1404 entry_start = from;
1405 }
1406 }
1407 for (unsigned i = 0; i < scc_entry_len; ++i)
1408 {
1409 scc_entry tem = sccstack[first + i];
1410 sccstack[first + i] = sccstack[first + entry_start + i];
1411 sccstack[first + entry_start + i] = tem;
1412 }
1413
1414 if (scc_entry_len == 1)
1415 ; /* We already sorted SCC deterministically in hash_scc. */
1416 else
1417 /* Check that we have only one SCC.
1418 Naturally we may have conflicts if hash function is not
1419 strong enough. Lets see how far this gets. */
1420 {
1421 #ifdef ENABLE_CHECKING
1422 gcc_unreachable ();
1423 #endif
1424 }
1425 }
1426
1427 /* Write LTO_tree_scc. */
1428 streamer_write_record_start (ob, LTO_tree_scc);
1429 streamer_write_uhwi (ob, size);
1430 streamer_write_uhwi (ob, scc_hash);
1431
1432 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1433 All INTEGER_CSTs need to be handled this way as we need
1434 their type to materialize them. Also builtins are handled
1435 this way.
1436 ??? We still wrap these in LTO_tree_scc so at the
1437 input side we can properly identify the tree we want
1438 to ultimatively return. */
1439 if (size == 1)
1440 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1441 else
1442 {
1443 /* Write the size of the SCC entry candidates. */
1444 streamer_write_uhwi (ob, scc_entry_len);
1445
1446 /* Write all headers and populate the streamer cache. */
1447 for (unsigned i = 0; i < size; ++i)
1448 {
1449 hashval_t hash = sccstack[first+i].hash;
1450 tree t = sccstack[first+i].t;
1451 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1452 t, hash, &ix);
1453 gcc_assert (!exists_p);
1454
1455 if (!lto_is_streamable (t))
1456 internal_error ("tree code %qs is not supported "
1457 "in LTO streams",
1458 get_tree_code_name (TREE_CODE (t)));
1459
1460 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1461
1462 /* Write the header, containing everything needed to
1463 materialize EXPR on the reading side. */
1464 streamer_write_tree_header (ob, t);
1465 }
1466
1467 /* Write the bitpacks and tree references. */
1468 for (unsigned i = 0; i < size; ++i)
1469 {
1470 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1471
1472 /* Mark the end of the tree. */
1473 streamer_write_zero (ob);
1474 }
1475 }
1476
1477 /* Finally truncate the vector. */
1478 sccstack.truncate (first);
1479
1480 if (from_state)
1481 from_state->low = MIN (from_state->low, cstate->low);
1482 return;
1483 }
1484
1485 if (from_state)
1486 from_state->low = MIN (from_state->low, cstate->low);
1487 }
1488 gcc_checking_assert (from_state);
1489 if (cstate->dfsnum < from_state->dfsnum)
1490 from_state->low = MIN (cstate->dfsnum, from_state->low);
1491 }
1492
1493
1494 /* Emit the physical representation of tree node EXPR to output block
1495 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1496 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1497
1498 void
1499 lto_output_tree (struct output_block *ob, tree expr,
1500 bool ref_p, bool this_ref_p)
1501 {
1502 unsigned ix;
1503 bool existed_p;
1504
1505 if (expr == NULL_TREE)
1506 {
1507 streamer_write_record_start (ob, LTO_null);
1508 return;
1509 }
1510
1511 if (this_ref_p && tree_is_indexable (expr))
1512 {
1513 lto_output_tree_ref (ob, expr);
1514 return;
1515 }
1516
1517 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1518 if (existed_p)
1519 {
1520 /* If a node has already been streamed out, make sure that
1521 we don't write it more than once. Otherwise, the reader
1522 will instantiate two different nodes for the same object. */
1523 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1524 streamer_write_uhwi (ob, ix);
1525 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1526 lto_tree_code_to_tag (TREE_CODE (expr)));
1527 lto_stats.num_pickle_refs_output++;
1528 }
1529 else
1530 {
1531 /* This is the first time we see EXPR, write all reachable
1532 trees to OB. */
1533 static bool in_dfs_walk;
1534
1535 /* Protect against recursion which means disconnect between
1536 what tree edges we walk in the DFS walk and what edges
1537 we stream out. */
1538 gcc_assert (!in_dfs_walk);
1539
1540 /* Start the DFS walk. */
1541 /* Save ob state ... */
1542 /* let's see ... */
1543 in_dfs_walk = true;
1544 DFS (ob, expr, ref_p, this_ref_p, false);
1545 in_dfs_walk = false;
1546
1547 /* Finally append a reference to the tree we were writing.
1548 ??? If expr ended up as a singleton we could have
1549 inlined it here and avoid outputting a reference. */
1550 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1551 gcc_assert (existed_p);
1552 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1553 streamer_write_uhwi (ob, ix);
1554 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1555 lto_tree_code_to_tag (TREE_CODE (expr)));
1556 lto_stats.num_pickle_refs_output++;
1557 }
1558 }
1559
1560
1561 /* Output to OB a list of try/catch handlers starting with FIRST. */
1562
1563 static void
1564 output_eh_try_list (struct output_block *ob, eh_catch first)
1565 {
1566 eh_catch n;
1567
1568 for (n = first; n; n = n->next_catch)
1569 {
1570 streamer_write_record_start (ob, LTO_eh_catch);
1571 stream_write_tree (ob, n->type_list, true);
1572 stream_write_tree (ob, n->filter_list, true);
1573 stream_write_tree (ob, n->label, true);
1574 }
1575
1576 streamer_write_record_start (ob, LTO_null);
1577 }
1578
1579
1580 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1581 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1582 detect EH region sharing. */
1583
1584 static void
1585 output_eh_region (struct output_block *ob, eh_region r)
1586 {
1587 enum LTO_tags tag;
1588
1589 if (r == NULL)
1590 {
1591 streamer_write_record_start (ob, LTO_null);
1592 return;
1593 }
1594
1595 if (r->type == ERT_CLEANUP)
1596 tag = LTO_ert_cleanup;
1597 else if (r->type == ERT_TRY)
1598 tag = LTO_ert_try;
1599 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1600 tag = LTO_ert_allowed_exceptions;
1601 else if (r->type == ERT_MUST_NOT_THROW)
1602 tag = LTO_ert_must_not_throw;
1603 else
1604 gcc_unreachable ();
1605
1606 streamer_write_record_start (ob, tag);
1607 streamer_write_hwi (ob, r->index);
1608
1609 if (r->outer)
1610 streamer_write_hwi (ob, r->outer->index);
1611 else
1612 streamer_write_zero (ob);
1613
1614 if (r->inner)
1615 streamer_write_hwi (ob, r->inner->index);
1616 else
1617 streamer_write_zero (ob);
1618
1619 if (r->next_peer)
1620 streamer_write_hwi (ob, r->next_peer->index);
1621 else
1622 streamer_write_zero (ob);
1623
1624 if (r->type == ERT_TRY)
1625 {
1626 output_eh_try_list (ob, r->u.eh_try.first_catch);
1627 }
1628 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1629 {
1630 stream_write_tree (ob, r->u.allowed.type_list, true);
1631 stream_write_tree (ob, r->u.allowed.label, true);
1632 streamer_write_uhwi (ob, r->u.allowed.filter);
1633 }
1634 else if (r->type == ERT_MUST_NOT_THROW)
1635 {
1636 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1637 bitpack_d bp = bitpack_create (ob->main_stream);
1638 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1639 streamer_write_bitpack (&bp);
1640 }
1641
1642 if (r->landing_pads)
1643 streamer_write_hwi (ob, r->landing_pads->index);
1644 else
1645 streamer_write_zero (ob);
1646 }
1647
1648
1649 /* Output landing pad LP to OB. */
1650
1651 static void
1652 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1653 {
1654 if (lp == NULL)
1655 {
1656 streamer_write_record_start (ob, LTO_null);
1657 return;
1658 }
1659
1660 streamer_write_record_start (ob, LTO_eh_landing_pad);
1661 streamer_write_hwi (ob, lp->index);
1662 if (lp->next_lp)
1663 streamer_write_hwi (ob, lp->next_lp->index);
1664 else
1665 streamer_write_zero (ob);
1666
1667 if (lp->region)
1668 streamer_write_hwi (ob, lp->region->index);
1669 else
1670 streamer_write_zero (ob);
1671
1672 stream_write_tree (ob, lp->post_landing_pad, true);
1673 }
1674
1675
1676 /* Output the existing eh_table to OB. */
1677
1678 static void
1679 output_eh_regions (struct output_block *ob, struct function *fn)
1680 {
1681 if (fn->eh && fn->eh->region_tree)
1682 {
1683 unsigned i;
1684 eh_region eh;
1685 eh_landing_pad lp;
1686 tree ttype;
1687
1688 streamer_write_record_start (ob, LTO_eh_table);
1689
1690 /* Emit the index of the root of the EH region tree. */
1691 streamer_write_hwi (ob, fn->eh->region_tree->index);
1692
1693 /* Emit all the EH regions in the region array. */
1694 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1695 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1696 output_eh_region (ob, eh);
1697
1698 /* Emit all landing pads. */
1699 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1700 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1701 output_eh_lp (ob, lp);
1702
1703 /* Emit all the runtime type data. */
1704 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1705 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1706 stream_write_tree (ob, ttype, true);
1707
1708 /* Emit the table of action chains. */
1709 if (targetm.arm_eabi_unwinder)
1710 {
1711 tree t;
1712 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1713 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1714 stream_write_tree (ob, t, true);
1715 }
1716 else
1717 {
1718 uchar c;
1719 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1720 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1721 streamer_write_char_stream (ob->main_stream, c);
1722 }
1723 }
1724
1725 /* The LTO_null either terminates the record or indicates that there
1726 are no eh_records at all. */
1727 streamer_write_record_start (ob, LTO_null);
1728 }
1729
1730
1731 /* Output all of the active ssa names to the ssa_names stream. */
1732
1733 static void
1734 output_ssa_names (struct output_block *ob, struct function *fn)
1735 {
1736 unsigned int i, len;
1737
1738 len = vec_safe_length (SSANAMES (fn));
1739 streamer_write_uhwi (ob, len);
1740
1741 for (i = 1; i < len; i++)
1742 {
1743 tree ptr = (*SSANAMES (fn))[i];
1744
1745 if (ptr == NULL_TREE
1746 || SSA_NAME_IN_FREE_LIST (ptr)
1747 || virtual_operand_p (ptr))
1748 continue;
1749
1750 streamer_write_uhwi (ob, i);
1751 streamer_write_char_stream (ob->main_stream,
1752 SSA_NAME_IS_DEFAULT_DEF (ptr));
1753 if (SSA_NAME_VAR (ptr))
1754 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1755 else
1756 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1757 stream_write_tree (ob, TREE_TYPE (ptr), true);
1758 }
1759
1760 streamer_write_zero (ob);
1761 }
1762
1763
1764 /* Output a wide-int. */
1765
1766 static void
1767 streamer_write_wi (struct output_block *ob,
1768 const widest_int &w)
1769 {
1770 int len = w.get_len ();
1771
1772 streamer_write_uhwi (ob, w.get_precision ());
1773 streamer_write_uhwi (ob, len);
1774 for (int i = 0; i < len; i++)
1775 streamer_write_hwi (ob, w.elt (i));
1776 }
1777
1778
1779 /* Output the cfg. */
1780
1781 static void
1782 output_cfg (struct output_block *ob, struct function *fn)
1783 {
1784 struct lto_output_stream *tmp_stream = ob->main_stream;
1785 basic_block bb;
1786
1787 ob->main_stream = ob->cfg_stream;
1788
1789 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1790 profile_status_for_fn (fn));
1791
1792 /* Output the number of the highest basic block. */
1793 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1794
1795 FOR_ALL_BB_FN (bb, fn)
1796 {
1797 edge_iterator ei;
1798 edge e;
1799
1800 streamer_write_hwi (ob, bb->index);
1801
1802 /* Output the successors and the edge flags. */
1803 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1804 FOR_EACH_EDGE (e, ei, bb->succs)
1805 {
1806 streamer_write_uhwi (ob, e->dest->index);
1807 streamer_write_hwi (ob, e->probability);
1808 streamer_write_gcov_count (ob, e->count);
1809 streamer_write_uhwi (ob, e->flags);
1810 }
1811 }
1812
1813 streamer_write_hwi (ob, -1);
1814
1815 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1816 while (bb->next_bb)
1817 {
1818 streamer_write_hwi (ob, bb->next_bb->index);
1819 bb = bb->next_bb;
1820 }
1821
1822 streamer_write_hwi (ob, -1);
1823
1824 /* ??? The cfgloop interface is tied to cfun. */
1825 gcc_assert (cfun == fn);
1826
1827 /* Output the number of loops. */
1828 streamer_write_uhwi (ob, number_of_loops (fn));
1829
1830 /* Output each loop, skipping the tree root which has number zero. */
1831 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1832 {
1833 struct loop *loop = get_loop (fn, i);
1834
1835 /* Write the index of the loop header. That's enough to rebuild
1836 the loop tree on the reader side. Stream -1 for an unused
1837 loop entry. */
1838 if (!loop)
1839 {
1840 streamer_write_hwi (ob, -1);
1841 continue;
1842 }
1843 else
1844 streamer_write_hwi (ob, loop->header->index);
1845
1846 /* Write everything copy_loop_info copies. */
1847 streamer_write_enum (ob->main_stream,
1848 loop_estimation, EST_LAST, loop->estimate_state);
1849 streamer_write_hwi (ob, loop->any_upper_bound);
1850 if (loop->any_upper_bound)
1851 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1852 streamer_write_hwi (ob, loop->any_estimate);
1853 if (loop->any_estimate)
1854 streamer_write_wi (ob, loop->nb_iterations_estimate);
1855
1856 /* Write OMP SIMD related info. */
1857 streamer_write_hwi (ob, loop->safelen);
1858 streamer_write_hwi (ob, loop->dont_vectorize);
1859 streamer_write_hwi (ob, loop->force_vectorize);
1860 stream_write_tree (ob, loop->simduid, true);
1861 }
1862
1863 ob->main_stream = tmp_stream;
1864 }
1865
1866
1867 /* Create the header in the file using OB. If the section type is for
1868 a function, set FN to the decl for that function. */
1869
1870 void
1871 produce_asm (struct output_block *ob, tree fn)
1872 {
1873 enum lto_section_type section_type = ob->section_type;
1874 struct lto_function_header header;
1875 char *section_name;
1876
1877 if (section_type == LTO_section_function_body)
1878 {
1879 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1880 section_name = lto_get_section_name (section_type, name, NULL);
1881 }
1882 else
1883 section_name = lto_get_section_name (section_type, NULL, NULL);
1884
1885 lto_begin_section (section_name, !flag_wpa);
1886 free (section_name);
1887
1888 /* The entire header is stream computed here. */
1889 memset (&header, 0, sizeof (struct lto_function_header));
1890
1891 /* Write the header. */
1892 header.lto_header.major_version = LTO_major_version;
1893 header.lto_header.minor_version = LTO_minor_version;
1894
1895 header.compressed_size = 0;
1896
1897 if (section_type == LTO_section_function_body)
1898 header.cfg_size = ob->cfg_stream->total_size;
1899 header.main_size = ob->main_stream->total_size;
1900 header.string_size = ob->string_stream->total_size;
1901 lto_write_data (&header, sizeof header);
1902
1903 /* Put all of the gimple and the string table out the asm file as a
1904 block of text. */
1905 if (section_type == LTO_section_function_body)
1906 lto_write_stream (ob->cfg_stream);
1907 lto_write_stream (ob->main_stream);
1908 lto_write_stream (ob->string_stream);
1909
1910 lto_end_section ();
1911 }
1912
1913
1914 /* Output the base body of struct function FN using output block OB. */
1915
1916 static void
1917 output_struct_function_base (struct output_block *ob, struct function *fn)
1918 {
1919 struct bitpack_d bp;
1920 unsigned i;
1921 tree t;
1922
1923 /* Output the static chain and non-local goto save area. */
1924 stream_write_tree (ob, fn->static_chain_decl, true);
1925 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1926
1927 /* Output all the local variables in the function. */
1928 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1929 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1930 stream_write_tree (ob, t, true);
1931
1932 /* Output current IL state of the function. */
1933 streamer_write_uhwi (ob, fn->curr_properties);
1934
1935 /* Write all the attributes for FN. */
1936 bp = bitpack_create (ob->main_stream);
1937 bp_pack_value (&bp, fn->is_thunk, 1);
1938 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1939 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1940 bp_pack_value (&bp, fn->returns_struct, 1);
1941 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1942 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1943 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1944 bp_pack_value (&bp, fn->after_inlining, 1);
1945 bp_pack_value (&bp, fn->stdarg, 1);
1946 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1947 bp_pack_value (&bp, fn->calls_alloca, 1);
1948 bp_pack_value (&bp, fn->calls_setjmp, 1);
1949 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1950 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1951 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1952 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1953
1954 /* Output the function start and end loci. */
1955 stream_output_location (ob, &bp, fn->function_start_locus);
1956 stream_output_location (ob, &bp, fn->function_end_locus);
1957
1958 streamer_write_bitpack (&bp);
1959 }
1960
1961
1962 /* Output the body of function NODE->DECL. */
1963
1964 static void
1965 output_function (struct cgraph_node *node)
1966 {
1967 tree function;
1968 struct function *fn;
1969 basic_block bb;
1970 struct output_block *ob;
1971
1972 function = node->decl;
1973 fn = DECL_STRUCT_FUNCTION (function);
1974 ob = create_output_block (LTO_section_function_body);
1975
1976 clear_line_info (ob);
1977 ob->symbol = node;
1978
1979 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1980
1981 /* Set current_function_decl and cfun. */
1982 push_cfun (fn);
1983
1984 /* Make string 0 be a NULL string. */
1985 streamer_write_char_stream (ob->string_stream, 0);
1986
1987 streamer_write_record_start (ob, LTO_function);
1988
1989 /* Output decls for parameters and args. */
1990 stream_write_tree (ob, DECL_RESULT (function), true);
1991 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1992
1993 /* Output DECL_INITIAL for the function, which contains the tree of
1994 lexical scopes. */
1995 stream_write_tree (ob, DECL_INITIAL (function), true);
1996
1997 /* We also stream abstract functions where we stream only stuff needed for
1998 debug info. */
1999 if (gimple_has_body_p (function))
2000 {
2001 streamer_write_uhwi (ob, 1);
2002 output_struct_function_base (ob, fn);
2003
2004 /* Output all the SSA names used in the function. */
2005 output_ssa_names (ob, fn);
2006
2007 /* Output any exception handling regions. */
2008 output_eh_regions (ob, fn);
2009
2010
2011 /* We will renumber the statements. The code that does this uses
2012 the same ordering that we use for serializing them so we can use
2013 the same code on the other end and not have to write out the
2014 statement numbers. We do not assign UIDs to PHIs here because
2015 virtual PHIs get re-computed on-the-fly which would make numbers
2016 inconsistent. */
2017 set_gimple_stmt_max_uid (cfun, 0);
2018 FOR_ALL_BB_FN (bb, cfun)
2019 {
2020 gimple_stmt_iterator gsi;
2021 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2022 {
2023 gimple stmt = gsi_stmt (gsi);
2024
2025 /* Virtual PHIs are not going to be streamed. */
2026 if (!virtual_operand_p (gimple_phi_result (stmt)))
2027 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2028 }
2029 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2030 {
2031 gimple stmt = gsi_stmt (gsi);
2032 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2033 }
2034 }
2035 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2036 virtual phis now. */
2037 FOR_ALL_BB_FN (bb, cfun)
2038 {
2039 gimple_stmt_iterator gsi;
2040 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2041 {
2042 gimple stmt = gsi_stmt (gsi);
2043 if (virtual_operand_p (gimple_phi_result (stmt)))
2044 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2045 }
2046 }
2047
2048 /* Output the code for the function. */
2049 FOR_ALL_BB_FN (bb, fn)
2050 output_bb (ob, bb, fn);
2051
2052 /* The terminator for this function. */
2053 streamer_write_record_start (ob, LTO_null);
2054
2055 output_cfg (ob, fn);
2056
2057 pop_cfun ();
2058 }
2059 else
2060 streamer_write_uhwi (ob, 0);
2061
2062 /* Create a section to hold the pickled output of this function. */
2063 produce_asm (ob, function);
2064
2065 destroy_output_block (ob);
2066 }
2067
2068 /* Output the body of function NODE->DECL. */
2069
2070 static void
2071 output_constructor (struct varpool_node *node)
2072 {
2073 tree var = node->decl;
2074 struct output_block *ob;
2075
2076 ob = create_output_block (LTO_section_function_body);
2077
2078 clear_line_info (ob);
2079 ob->symbol = node;
2080
2081 /* Make string 0 be a NULL string. */
2082 streamer_write_char_stream (ob->string_stream, 0);
2083
2084 /* Output DECL_INITIAL for the function, which contains the tree of
2085 lexical scopes. */
2086 stream_write_tree (ob, DECL_INITIAL (var), true);
2087
2088 /* Create a section to hold the pickled output of this function. */
2089 produce_asm (ob, var);
2090
2091 destroy_output_block (ob);
2092 }
2093
2094
2095 /* Emit toplevel asms. */
2096
2097 void
2098 lto_output_toplevel_asms (void)
2099 {
2100 struct output_block *ob;
2101 struct asm_node *can;
2102 char *section_name;
2103 struct lto_asm_header header;
2104
2105 if (! asm_nodes)
2106 return;
2107
2108 ob = create_output_block (LTO_section_asm);
2109
2110 /* Make string 0 be a NULL string. */
2111 streamer_write_char_stream (ob->string_stream, 0);
2112
2113 for (can = asm_nodes; can; can = can->next)
2114 {
2115 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2116 streamer_write_hwi (ob, can->order);
2117 }
2118
2119 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2120
2121 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2122 lto_begin_section (section_name, !flag_wpa);
2123 free (section_name);
2124
2125 /* The entire header stream is computed here. */
2126 memset (&header, 0, sizeof (header));
2127
2128 /* Write the header. */
2129 header.lto_header.major_version = LTO_major_version;
2130 header.lto_header.minor_version = LTO_minor_version;
2131
2132 header.main_size = ob->main_stream->total_size;
2133 header.string_size = ob->string_stream->total_size;
2134 lto_write_data (&header, sizeof header);
2135
2136 /* Put all of the gimple and the string table out the asm file as a
2137 block of text. */
2138 lto_write_stream (ob->main_stream);
2139 lto_write_stream (ob->string_stream);
2140
2141 lto_end_section ();
2142
2143 destroy_output_block (ob);
2144 }
2145
2146
2147 /* Copy the function body or variable constructor of NODE without deserializing. */
2148
2149 static void
2150 copy_function_or_variable (struct symtab_node *node)
2151 {
2152 tree function = node->decl;
2153 struct lto_file_decl_data *file_data = node->lto_file_data;
2154 const char *data;
2155 size_t len;
2156 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2157 char *section_name =
2158 lto_get_section_name (LTO_section_function_body, name, NULL);
2159 size_t i, j;
2160 struct lto_in_decl_state *in_state;
2161 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2162
2163 lto_begin_section (section_name, !flag_wpa);
2164 free (section_name);
2165
2166 /* We may have renamed the declaration, e.g., a static function. */
2167 name = lto_get_decl_name_mapping (file_data, name);
2168
2169 data = lto_get_section_data (file_data, LTO_section_function_body,
2170 name, &len);
2171 gcc_assert (data);
2172
2173 /* Do a bit copy of the function body. */
2174 lto_write_data (data, len);
2175
2176 /* Copy decls. */
2177 in_state =
2178 lto_get_function_in_decl_state (node->lto_file_data, function);
2179 gcc_assert (in_state);
2180
2181 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2182 {
2183 size_t n = in_state->streams[i].size;
2184 tree *trees = in_state->streams[i].trees;
2185 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2186
2187 /* The out state must have the same indices and the in state.
2188 So just copy the vector. All the encoders in the in state
2189 must be empty where we reach here. */
2190 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2191 encoder->trees.reserve_exact (n);
2192 for (j = 0; j < n; j++)
2193 encoder->trees.safe_push (trees[j]);
2194 }
2195
2196 lto_free_section_data (file_data, LTO_section_function_body, name,
2197 data, len);
2198 lto_end_section ();
2199 }
2200
2201 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2202
2203 static tree
2204 wrap_refs (tree *tp, int *ws, void *)
2205 {
2206 tree t = *tp;
2207 if (handled_component_p (t)
2208 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2209 {
2210 tree decl = TREE_OPERAND (t, 0);
2211 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2212 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2213 build1 (ADDR_EXPR, ptrtype, decl),
2214 build_int_cst (ptrtype, 0));
2215 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2216 *ws = 0;
2217 }
2218 else if (TREE_CODE (t) == CONSTRUCTOR)
2219 ;
2220 else if (!EXPR_P (t))
2221 *ws = 0;
2222 return NULL_TREE;
2223 }
2224
2225 /* Main entry point from the pass manager. */
2226
2227 void
2228 lto_output (void)
2229 {
2230 struct lto_out_decl_state *decl_state;
2231 #ifdef ENABLE_CHECKING
2232 bitmap output = lto_bitmap_alloc ();
2233 #endif
2234 int i, n_nodes;
2235 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2236
2237 /* Initialize the streamer. */
2238 lto_streamer_init ();
2239
2240 n_nodes = lto_symtab_encoder_size (encoder);
2241 /* Process only the functions with bodies. */
2242 for (i = 0; i < n_nodes; i++)
2243 {
2244 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2245 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2246 {
2247 if (lto_symtab_encoder_encode_body_p (encoder, node)
2248 && !node->alias)
2249 {
2250 #ifdef ENABLE_CHECKING
2251 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2252 bitmap_set_bit (output, DECL_UID (node->decl));
2253 #endif
2254 decl_state = lto_new_out_decl_state ();
2255 lto_push_out_decl_state (decl_state);
2256 if (gimple_has_body_p (node->decl) || !flag_wpa)
2257 output_function (node);
2258 else
2259 copy_function_or_variable (node);
2260 gcc_assert (lto_get_out_decl_state () == decl_state);
2261 lto_pop_out_decl_state ();
2262 lto_record_function_out_decl_state (node->decl, decl_state);
2263 }
2264 }
2265 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2266 {
2267 /* Wrap symbol references inside the ctor in a type
2268 preserving MEM_REF. */
2269 tree ctor = DECL_INITIAL (node->decl);
2270 if (ctor && !in_lto_p)
2271 walk_tree (&ctor, wrap_refs, NULL, NULL);
2272 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2273 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2274 && !node->alias)
2275 {
2276 timevar_push (TV_IPA_LTO_CTORS_OUT);
2277 #ifdef ENABLE_CHECKING
2278 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2279 bitmap_set_bit (output, DECL_UID (node->decl));
2280 #endif
2281 decl_state = lto_new_out_decl_state ();
2282 lto_push_out_decl_state (decl_state);
2283 if (DECL_INITIAL (node->decl) != error_mark_node
2284 || !flag_wpa)
2285 output_constructor (node);
2286 else
2287 copy_function_or_variable (node);
2288 gcc_assert (lto_get_out_decl_state () == decl_state);
2289 lto_pop_out_decl_state ();
2290 lto_record_function_out_decl_state (node->decl, decl_state);
2291 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2292 }
2293 }
2294 }
2295
2296 /* Emit the callgraph after emitting function bodies. This needs to
2297 be done now to make sure that all the statements in every function
2298 have been renumbered so that edges can be associated with call
2299 statements using the statement UIDs. */
2300 output_symtab ();
2301
2302 #ifdef ENABLE_CHECKING
2303 lto_bitmap_free (output);
2304 #endif
2305 }
2306
2307 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2308 from it and required for correct representation of its semantics.
2309 Each node in ENCODER must be a global declaration or a type. A node
2310 is written only once, even if it appears multiple times in the
2311 vector. Certain transitively-reachable nodes, such as those
2312 representing expressions, may be duplicated, but such nodes
2313 must not appear in ENCODER itself. */
2314
2315 static void
2316 write_global_stream (struct output_block *ob,
2317 struct lto_tree_ref_encoder *encoder)
2318 {
2319 tree t;
2320 size_t index;
2321 const size_t size = lto_tree_ref_encoder_size (encoder);
2322
2323 for (index = 0; index < size; index++)
2324 {
2325 t = lto_tree_ref_encoder_get_tree (encoder, index);
2326 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2327 stream_write_tree (ob, t, false);
2328 }
2329 }
2330
2331
2332 /* Write a sequence of indices into the globals vector corresponding
2333 to the trees in ENCODER. These are used by the reader to map the
2334 indices used to refer to global entities within function bodies to
2335 their referents. */
2336
2337 static void
2338 write_global_references (struct output_block *ob,
2339 struct lto_tree_ref_encoder *encoder)
2340 {
2341 tree t;
2342 uint32_t index;
2343 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2344
2345 /* Write size and slot indexes as 32-bit unsigned numbers. */
2346 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2347 data[0] = size;
2348
2349 for (index = 0; index < size; index++)
2350 {
2351 uint32_t slot_num;
2352
2353 t = lto_tree_ref_encoder_get_tree (encoder, index);
2354 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2355 gcc_assert (slot_num != (unsigned)-1);
2356 data[index + 1] = slot_num;
2357 }
2358
2359 lto_write_data (data, sizeof (int32_t) * (size + 1));
2360 free (data);
2361 }
2362
2363
2364 /* Write all the streams in an lto_out_decl_state STATE using
2365 output block OB and output stream OUT_STREAM. */
2366
2367 void
2368 lto_output_decl_state_streams (struct output_block *ob,
2369 struct lto_out_decl_state *state)
2370 {
2371 int i;
2372
2373 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2374 write_global_stream (ob, &state->streams[i]);
2375 }
2376
2377
2378 /* Write all the references in an lto_out_decl_state STATE using
2379 output block OB and output stream OUT_STREAM. */
2380
2381 void
2382 lto_output_decl_state_refs (struct output_block *ob,
2383 struct lto_out_decl_state *state)
2384 {
2385 unsigned i;
2386 uint32_t ref;
2387 tree decl;
2388
2389 /* Write reference to FUNCTION_DECL. If there is not function,
2390 write reference to void_type_node. */
2391 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2392 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2393 gcc_assert (ref != (unsigned)-1);
2394 lto_write_data (&ref, sizeof (uint32_t));
2395
2396 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2397 write_global_references (ob, &state->streams[i]);
2398 }
2399
2400
2401 /* Return the written size of STATE. */
2402
2403 static size_t
2404 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2405 {
2406 int i;
2407 size_t size;
2408
2409 size = sizeof (int32_t); /* fn_ref. */
2410 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2411 {
2412 size += sizeof (int32_t); /* vector size. */
2413 size += (lto_tree_ref_encoder_size (&state->streams[i])
2414 * sizeof (int32_t));
2415 }
2416 return size;
2417 }
2418
2419
2420 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2421 so far. */
2422
2423 static void
2424 write_symbol (struct streamer_tree_cache_d *cache,
2425 tree t, hash_set<const char *> *seen, bool alias)
2426 {
2427 const char *name;
2428 enum gcc_plugin_symbol_kind kind;
2429 enum gcc_plugin_symbol_visibility visibility;
2430 unsigned slot_num;
2431 uint64_t size;
2432 const char *comdat;
2433 unsigned char c;
2434
2435 /* None of the following kinds of symbols are needed in the
2436 symbol table. */
2437 if (!TREE_PUBLIC (t)
2438 || is_builtin_fn (t)
2439 || DECL_ABSTRACT (t)
2440 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2441 return;
2442 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2443
2444 gcc_assert (TREE_CODE (t) == VAR_DECL
2445 || TREE_CODE (t) == FUNCTION_DECL);
2446
2447 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2448
2449 /* This behaves like assemble_name_raw in varasm.c, performing the
2450 same name manipulations that ASM_OUTPUT_LABELREF does. */
2451 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2452
2453 if (seen->add (name))
2454 return;
2455
2456 streamer_tree_cache_lookup (cache, t, &slot_num);
2457 gcc_assert (slot_num != (unsigned)-1);
2458
2459 if (DECL_EXTERNAL (t))
2460 {
2461 if (DECL_WEAK (t))
2462 kind = GCCPK_WEAKUNDEF;
2463 else
2464 kind = GCCPK_UNDEF;
2465 }
2466 else
2467 {
2468 if (DECL_WEAK (t))
2469 kind = GCCPK_WEAKDEF;
2470 else if (DECL_COMMON (t))
2471 kind = GCCPK_COMMON;
2472 else
2473 kind = GCCPK_DEF;
2474
2475 /* When something is defined, it should have node attached. */
2476 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2477 || varpool_node::get (t)->definition);
2478 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2479 || (cgraph_node::get (t)
2480 && cgraph_node::get (t)->definition));
2481 }
2482
2483 /* Imitate what default_elf_asm_output_external do.
2484 When symbol is external, we need to output it with DEFAULT visibility
2485 when compiling with -fvisibility=default, while with HIDDEN visibility
2486 when symbol has attribute (visibility("hidden")) specified.
2487 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2488 right. */
2489
2490 if (DECL_EXTERNAL (t)
2491 && !targetm.binds_local_p (t))
2492 visibility = GCCPV_DEFAULT;
2493 else
2494 switch (DECL_VISIBILITY (t))
2495 {
2496 case VISIBILITY_DEFAULT:
2497 visibility = GCCPV_DEFAULT;
2498 break;
2499 case VISIBILITY_PROTECTED:
2500 visibility = GCCPV_PROTECTED;
2501 break;
2502 case VISIBILITY_HIDDEN:
2503 visibility = GCCPV_HIDDEN;
2504 break;
2505 case VISIBILITY_INTERNAL:
2506 visibility = GCCPV_INTERNAL;
2507 break;
2508 }
2509
2510 if (kind == GCCPK_COMMON
2511 && DECL_SIZE_UNIT (t)
2512 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2513 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2514 else
2515 size = 0;
2516
2517 if (DECL_ONE_ONLY (t))
2518 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2519 else
2520 comdat = "";
2521
2522 lto_write_data (name, strlen (name) + 1);
2523 lto_write_data (comdat, strlen (comdat) + 1);
2524 c = (unsigned char) kind;
2525 lto_write_data (&c, 1);
2526 c = (unsigned char) visibility;
2527 lto_write_data (&c, 1);
2528 lto_write_data (&size, 8);
2529 lto_write_data (&slot_num, 4);
2530 }
2531
2532 /* Return true if NODE should appear in the plugin symbol table. */
2533
2534 bool
2535 output_symbol_p (symtab_node *node)
2536 {
2537 struct cgraph_node *cnode;
2538 if (!node->real_symbol_p ())
2539 return false;
2540 /* We keep external functions in symtab for sake of inlining
2541 and devirtualization. We do not want to see them in symbol table as
2542 references unless they are really used. */
2543 cnode = dyn_cast <cgraph_node *> (node);
2544 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2545 && cnode->callers)
2546 return true;
2547
2548 /* Ignore all references from external vars initializers - they are not really
2549 part of the compilation unit until they are used by folding. Some symbols,
2550 like references to external construction vtables can not be referred to at all.
2551 We decide this at can_refer_decl_in_current_unit_p. */
2552 if (!node->definition || DECL_EXTERNAL (node->decl))
2553 {
2554 int i;
2555 struct ipa_ref *ref;
2556 for (i = 0; node->iterate_referring (i, ref); i++)
2557 {
2558 if (ref->use == IPA_REF_ALIAS)
2559 continue;
2560 if (is_a <cgraph_node *> (ref->referring))
2561 return true;
2562 if (!DECL_EXTERNAL (ref->referring->decl))
2563 return true;
2564 }
2565 return false;
2566 }
2567 return true;
2568 }
2569
2570
2571 /* Write an IL symbol table to OB.
2572 SET and VSET are cgraph/varpool node sets we are outputting. */
2573
2574 static void
2575 produce_symtab (struct output_block *ob)
2576 {
2577 struct streamer_tree_cache_d *cache = ob->writer_cache;
2578 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2579 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2580 lto_symtab_encoder_iterator lsei;
2581
2582 lto_begin_section (section_name, false);
2583 free (section_name);
2584
2585 hash_set<const char *> seen;
2586
2587 /* Write the symbol table.
2588 First write everything defined and then all declarations.
2589 This is necessary to handle cases where we have duplicated symbols. */
2590 for (lsei = lsei_start (encoder);
2591 !lsei_end_p (lsei); lsei_next (&lsei))
2592 {
2593 symtab_node *node = lsei_node (lsei);
2594
2595 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2596 continue;
2597 write_symbol (cache, node->decl, &seen, false);
2598 }
2599 for (lsei = lsei_start (encoder);
2600 !lsei_end_p (lsei); lsei_next (&lsei))
2601 {
2602 symtab_node *node = lsei_node (lsei);
2603
2604 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2605 continue;
2606 write_symbol (cache, node->decl, &seen, false);
2607 }
2608
2609 lto_end_section ();
2610 }
2611
2612
2613 /* This pass is run after all of the functions are serialized and all
2614 of the IPA passes have written their serialized forms. This pass
2615 causes the vector of all of the global decls and types used from
2616 this file to be written in to a section that can then be read in to
2617 recover these on other side. */
2618
2619 void
2620 produce_asm_for_decls (void)
2621 {
2622 struct lto_out_decl_state *out_state;
2623 struct lto_out_decl_state *fn_out_state;
2624 struct lto_decl_header header;
2625 char *section_name;
2626 struct output_block *ob;
2627 unsigned idx, num_fns;
2628 size_t decl_state_size;
2629 int32_t num_decl_states;
2630
2631 ob = create_output_block (LTO_section_decls);
2632
2633 memset (&header, 0, sizeof (struct lto_decl_header));
2634
2635 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2636 lto_begin_section (section_name, !flag_wpa);
2637 free (section_name);
2638
2639 /* Make string 0 be a NULL string. */
2640 streamer_write_char_stream (ob->string_stream, 0);
2641
2642 gcc_assert (!alias_pairs);
2643
2644 /* Get rid of the global decl state hash tables to save some memory. */
2645 out_state = lto_get_out_decl_state ();
2646 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2647 if (out_state->streams[i].tree_hash_table)
2648 {
2649 delete out_state->streams[i].tree_hash_table;
2650 out_state->streams[i].tree_hash_table = NULL;
2651 }
2652
2653 /* Write the global symbols. */
2654 lto_output_decl_state_streams (ob, out_state);
2655 num_fns = lto_function_decl_states.length ();
2656 for (idx = 0; idx < num_fns; idx++)
2657 {
2658 fn_out_state =
2659 lto_function_decl_states[idx];
2660 lto_output_decl_state_streams (ob, fn_out_state);
2661 }
2662
2663 header.lto_header.major_version = LTO_major_version;
2664 header.lto_header.minor_version = LTO_minor_version;
2665
2666 /* Currently not used. This field would allow us to preallocate
2667 the globals vector, so that it need not be resized as it is extended. */
2668 header.num_nodes = -1;
2669
2670 /* Compute the total size of all decl out states. */
2671 decl_state_size = sizeof (int32_t);
2672 decl_state_size += lto_out_decl_state_written_size (out_state);
2673 for (idx = 0; idx < num_fns; idx++)
2674 {
2675 fn_out_state =
2676 lto_function_decl_states[idx];
2677 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2678 }
2679 header.decl_state_size = decl_state_size;
2680
2681 header.main_size = ob->main_stream->total_size;
2682 header.string_size = ob->string_stream->total_size;
2683
2684 lto_write_data (&header, sizeof header);
2685
2686 /* Write the main out-decl state, followed by out-decl states of
2687 functions. */
2688 num_decl_states = num_fns + 1;
2689 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2690 lto_output_decl_state_refs (ob, out_state);
2691 for (idx = 0; idx < num_fns; idx++)
2692 {
2693 fn_out_state = lto_function_decl_states[idx];
2694 lto_output_decl_state_refs (ob, fn_out_state);
2695 }
2696
2697 lto_write_stream (ob->main_stream);
2698 lto_write_stream (ob->string_stream);
2699
2700 lto_end_section ();
2701
2702 /* Write the symbol table. It is used by linker to determine dependencies
2703 and thus we can skip it for WPA. */
2704 if (!flag_wpa)
2705 produce_symtab (ob);
2706
2707 /* Write command line opts. */
2708 lto_write_options ();
2709
2710 /* Deallocate memory and clean up. */
2711 for (idx = 0; idx < num_fns; idx++)
2712 {
2713 fn_out_state =
2714 lto_function_decl_states[idx];
2715 lto_delete_out_decl_state (fn_out_state);
2716 }
2717 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2718 lto_function_decl_states.release ();
2719 destroy_output_block (ob);
2720 }