Merger of git branch "gimple-classes-v2-option-3"
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "predict.h"
37 #include "vec.h"
38 #include "machmode.h"
39 #include "hard-reg-set.h"
40 #include "function.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimple-iterator.h"
50 #include "gimple-ssa.h"
51 #include "tree-ssanames.h"
52 #include "tree-pass.h"
53 #include "diagnostic-core.h"
54 #include "inchash.h"
55 #include "except.h"
56 #include "lto-symtab.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "lto-streamer.h"
62 #include "data-streamer.h"
63 #include "gimple-streamer.h"
64 #include "tree-streamer.h"
65 #include "streamer-hooks.h"
66 #include "cfgloop.h"
67 #include "builtins.h"
68
69
70 static void lto_write_tree (struct output_block*, tree, bool);
71
72 /* Clear the line info stored in DATA_IN. */
73
74 static void
75 clear_line_info (struct output_block *ob)
76 {
77 ob->current_file = NULL;
78 ob->current_line = 0;
79 ob->current_col = 0;
80 }
81
82
83 /* Create the output block and return it. SECTION_TYPE is
84 LTO_section_function_body or LTO_static_initializer. */
85
86 struct output_block *
87 create_output_block (enum lto_section_type section_type)
88 {
89 struct output_block *ob = XCNEW (struct output_block);
90
91 ob->section_type = section_type;
92 ob->decl_state = lto_get_out_decl_state ();
93 ob->main_stream = XCNEW (struct lto_output_stream);
94 ob->string_stream = XCNEW (struct lto_output_stream);
95 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
96
97 if (section_type == LTO_section_function_body)
98 ob->cfg_stream = XCNEW (struct lto_output_stream);
99
100 clear_line_info (ob);
101
102 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
103 gcc_obstack_init (&ob->obstack);
104
105 return ob;
106 }
107
108
109 /* Destroy the output block OB. */
110
111 void
112 destroy_output_block (struct output_block *ob)
113 {
114 enum lto_section_type section_type = ob->section_type;
115
116 delete ob->string_hash_table;
117 ob->string_hash_table = NULL;
118
119 free (ob->main_stream);
120 free (ob->string_stream);
121 if (section_type == LTO_section_function_body)
122 free (ob->cfg_stream);
123
124 streamer_tree_cache_delete (ob->writer_cache);
125 obstack_free (&ob->obstack, NULL);
126
127 free (ob);
128 }
129
130
131 /* Look up NODE in the type table and write the index for it to OB. */
132
133 static void
134 output_type_ref (struct output_block *ob, tree node)
135 {
136 streamer_write_record_start (ob, LTO_type_ref);
137 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
138 }
139
140
141 /* Return true if tree node T is written to various tables. For these
142 nodes, we sometimes want to write their phyiscal representation
143 (via lto_output_tree), and sometimes we need to emit an index
144 reference into a table (via lto_output_tree_ref). */
145
146 static bool
147 tree_is_indexable (tree t)
148 {
149 /* Parameters and return values of functions of variably modified types
150 must go to global stream, because they may be used in the type
151 definition. */
152 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
153 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
154 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
155 else if (TREE_CODE (t) == IMPORTED_DECL)
156 return false;
157 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
158 || TREE_CODE (t) == TYPE_DECL
159 || TREE_CODE (t) == CONST_DECL
160 || TREE_CODE (t) == NAMELIST_DECL)
161 && decl_function_context (t))
162 return false;
163 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
164 return false;
165 /* Variably modified types need to be streamed alongside function
166 bodies because they can refer to local entities. Together with
167 them we have to localize their members as well.
168 ??? In theory that includes non-FIELD_DECLs as well. */
169 else if (TYPE_P (t)
170 && variably_modified_type_p (t, NULL_TREE))
171 return false;
172 else if (TREE_CODE (t) == FIELD_DECL
173 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
174 return false;
175 else
176 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
177 }
178
179
180 /* Output info about new location into bitpack BP.
181 After outputting bitpack, lto_output_location_data has
182 to be done to output actual data. */
183
184 void
185 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
186 location_t loc)
187 {
188 expanded_location xloc;
189
190 loc = LOCATION_LOCUS (loc);
191 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
192 if (loc == UNKNOWN_LOCATION)
193 return;
194
195 xloc = expand_location (loc);
196
197 bp_pack_value (bp, ob->current_file != xloc.file, 1);
198 bp_pack_value (bp, ob->current_line != xloc.line, 1);
199 bp_pack_value (bp, ob->current_col != xloc.column, 1);
200
201 if (ob->current_file != xloc.file)
202 bp_pack_string (ob, bp, xloc.file, true);
203 ob->current_file = xloc.file;
204
205 if (ob->current_line != xloc.line)
206 bp_pack_var_len_unsigned (bp, xloc.line);
207 ob->current_line = xloc.line;
208
209 if (ob->current_col != xloc.column)
210 bp_pack_var_len_unsigned (bp, xloc.column);
211 ob->current_col = xloc.column;
212 }
213
214
215 /* If EXPR is an indexable tree node, output a reference to it to
216 output block OB. Otherwise, output the physical representation of
217 EXPR to OB. */
218
219 static void
220 lto_output_tree_ref (struct output_block *ob, tree expr)
221 {
222 enum tree_code code;
223
224 if (TYPE_P (expr))
225 {
226 output_type_ref (ob, expr);
227 return;
228 }
229
230 code = TREE_CODE (expr);
231 switch (code)
232 {
233 case SSA_NAME:
234 streamer_write_record_start (ob, LTO_ssa_name_ref);
235 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
236 break;
237
238 case FIELD_DECL:
239 streamer_write_record_start (ob, LTO_field_decl_ref);
240 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
241 break;
242
243 case FUNCTION_DECL:
244 streamer_write_record_start (ob, LTO_function_decl_ref);
245 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case VAR_DECL:
249 case DEBUG_EXPR_DECL:
250 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
251 case PARM_DECL:
252 streamer_write_record_start (ob, LTO_global_decl_ref);
253 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
255
256 case CONST_DECL:
257 streamer_write_record_start (ob, LTO_const_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
260
261 case IMPORTED_DECL:
262 gcc_assert (decl_function_context (expr) == NULL);
263 streamer_write_record_start (ob, LTO_imported_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 case TYPE_DECL:
268 streamer_write_record_start (ob, LTO_type_decl_ref);
269 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
271
272 case NAMELIST_DECL:
273 streamer_write_record_start (ob, LTO_namelist_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case NAMESPACE_DECL:
278 streamer_write_record_start (ob, LTO_namespace_decl_ref);
279 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case LABEL_DECL:
283 streamer_write_record_start (ob, LTO_label_decl_ref);
284 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 case RESULT_DECL:
288 streamer_write_record_start (ob, LTO_result_decl_ref);
289 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
290 break;
291
292 case TRANSLATION_UNIT_DECL:
293 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
294 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
295 break;
296
297 default:
298 /* No other node is indexable, so it should have been handled by
299 lto_output_tree. */
300 gcc_unreachable ();
301 }
302 }
303
304
305 /* Return true if EXPR is a tree node that can be written to disk. */
306
307 static inline bool
308 lto_is_streamable (tree expr)
309 {
310 enum tree_code code = TREE_CODE (expr);
311
312 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
313 name version in lto_output_tree_ref (see output_ssa_names). */
314 return !is_lang_specific (expr)
315 && code != SSA_NAME
316 && code != CALL_EXPR
317 && code != LANG_TYPE
318 && code != MODIFY_EXPR
319 && code != INIT_EXPR
320 && code != TARGET_EXPR
321 && code != BIND_EXPR
322 && code != WITH_CLEANUP_EXPR
323 && code != STATEMENT_LIST
324 && (code == CASE_LABEL_EXPR
325 || code == DECL_EXPR
326 || TREE_CODE_CLASS (code) != tcc_statement);
327 }
328
329
330 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
331
332 static tree
333 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
334 {
335 gcc_checking_assert (DECL_P (expr)
336 && TREE_CODE (expr) != FUNCTION_DECL
337 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
338
339 /* Handle DECL_INITIAL for symbols. */
340 tree initial = DECL_INITIAL (expr);
341 if (TREE_CODE (expr) == VAR_DECL
342 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
343 && !DECL_IN_CONSTANT_POOL (expr)
344 && initial)
345 {
346 varpool_node *vnode;
347 /* Extra section needs about 30 bytes; do not produce it for simple
348 scalar values. */
349 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
350 || !(vnode = varpool_node::get (expr))
351 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
352 initial = error_mark_node;
353 }
354
355 return initial;
356 }
357
358
359 /* Write a physical representation of tree node EXPR to output block
360 OB. If REF_P is true, the leaves of EXPR are emitted as references
361 via lto_output_tree_ref. IX is the index into the streamer cache
362 where EXPR is stored. */
363
364 static void
365 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
366 {
367 /* Pack all the non-pointer fields in EXPR into a bitpack and write
368 the resulting bitpack. */
369 bitpack_d bp = bitpack_create (ob->main_stream);
370 streamer_pack_tree_bitfields (ob, &bp, expr);
371 streamer_write_bitpack (&bp);
372
373 /* Write all the pointer fields in EXPR. */
374 streamer_write_tree_body (ob, expr, ref_p);
375
376 /* Write any LTO-specific data to OB. */
377 if (DECL_P (expr)
378 && TREE_CODE (expr) != FUNCTION_DECL
379 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
380 {
381 /* Handle DECL_INITIAL for symbols. */
382 tree initial = get_symbol_initial_value
383 (ob->decl_state->symtab_node_encoder, expr);
384 stream_write_tree (ob, initial, ref_p);
385 }
386 }
387
388 /* Write a physical representation of tree node EXPR to output block
389 OB. If REF_P is true, the leaves of EXPR are emitted as references
390 via lto_output_tree_ref. IX is the index into the streamer cache
391 where EXPR is stored. */
392
393 static void
394 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
395 {
396 if (!lto_is_streamable (expr))
397 internal_error ("tree code %qs is not supported in LTO streams",
398 get_tree_code_name (TREE_CODE (expr)));
399
400 /* Write the header, containing everything needed to materialize
401 EXPR on the reading side. */
402 streamer_write_tree_header (ob, expr);
403
404 lto_write_tree_1 (ob, expr, ref_p);
405
406 /* Mark the end of EXPR. */
407 streamer_write_zero (ob);
408 }
409
410 /* Emit the physical representation of tree node EXPR to output block
411 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
412 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
413
414 static void
415 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
416 bool ref_p, bool this_ref_p)
417 {
418 unsigned ix;
419
420 gcc_checking_assert (expr != NULL_TREE
421 && !(this_ref_p && tree_is_indexable (expr)));
422
423 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
424 expr, hash, &ix);
425 gcc_assert (!exists_p);
426 if (streamer_handle_as_builtin_p (expr))
427 {
428 /* MD and NORMAL builtins do not need to be written out
429 completely as they are always instantiated by the
430 compiler on startup. The only builtins that need to
431 be written out are BUILT_IN_FRONTEND. For all other
432 builtins, we simply write the class and code. */
433 streamer_write_builtin (ob, expr);
434 }
435 else if (TREE_CODE (expr) == INTEGER_CST
436 && !TREE_OVERFLOW (expr))
437 {
438 /* Shared INTEGER_CST nodes are special because they need their
439 original type to be materialized by the reader (to implement
440 TYPE_CACHED_VALUES). */
441 streamer_write_integer_cst (ob, expr, ref_p);
442 }
443 else
444 {
445 /* This is the first time we see EXPR, write its fields
446 to OB. */
447 lto_write_tree (ob, expr, ref_p);
448 }
449 }
450
451 class DFS
452 {
453 public:
454 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
455 bool single_p);
456 ~DFS ();
457
458 struct scc_entry
459 {
460 tree t;
461 hashval_t hash;
462 };
463 vec<scc_entry> sccstack;
464
465 private:
466 struct sccs
467 {
468 unsigned int dfsnum;
469 unsigned int low;
470 };
471
472 static int scc_entry_compare (const void *, const void *);
473
474 void DFS_write_tree_body (struct output_block *ob,
475 tree expr, sccs *expr_state, bool ref_p,
476 bool single_p);
477
478 void DFS_write_tree (struct output_block *ob, sccs *from_state,
479 tree expr, bool ref_p, bool this_ref_p,
480 bool single_p);
481 hashval_t
482 hash_scc (struct output_block *ob, unsigned first, unsigned size);
483
484 unsigned int next_dfs_num;
485 hash_map<tree, sccs *> sccstate;
486 struct obstack sccstate_obstack;
487 };
488
489 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
490 bool single_p)
491 {
492 sccstack.create (0);
493 gcc_obstack_init (&sccstate_obstack);
494 next_dfs_num = 1;
495 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
496 }
497
498 DFS::~DFS ()
499 {
500 sccstack.release ();
501 obstack_free (&sccstate_obstack, NULL);
502 }
503
504 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
505 DFS recurse for all tree edges originating from it. */
506
507 void
508 DFS::DFS_write_tree_body (struct output_block *ob,
509 tree expr, sccs *expr_state, bool ref_p,
510 bool single_p)
511 {
512 #define DFS_follow_tree_edge(DEST) \
513 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
514
515 enum tree_code code;
516
517 code = TREE_CODE (expr);
518
519 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
520 {
521 if (TREE_CODE (expr) != IDENTIFIER_NODE)
522 DFS_follow_tree_edge (TREE_TYPE (expr));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
526 {
527 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
528 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
529 }
530
531 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
532 {
533 DFS_follow_tree_edge (TREE_REALPART (expr));
534 DFS_follow_tree_edge (TREE_IMAGPART (expr));
535 }
536
537 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
538 {
539 /* Drop names that were created for anonymous entities. */
540 if (DECL_NAME (expr)
541 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
542 && ANON_AGGRNAME_P (DECL_NAME (expr)))
543 ;
544 else
545 DFS_follow_tree_edge (DECL_NAME (expr));
546 DFS_follow_tree_edge (DECL_CONTEXT (expr));
547 }
548
549 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
550 {
551 DFS_follow_tree_edge (DECL_SIZE (expr));
552 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
553
554 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
555 special handling in LTO, it must be handled by streamer hooks. */
556
557 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
558
559 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
560 for early inlining so drop it on the floor instead of ICEing in
561 dwarf2out.c. */
562
563 if ((TREE_CODE (expr) == VAR_DECL
564 || TREE_CODE (expr) == PARM_DECL)
565 && DECL_HAS_VALUE_EXPR_P (expr))
566 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
567 if (TREE_CODE (expr) == VAR_DECL)
568 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
569 }
570
571 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
572 {
573 if (TREE_CODE (expr) == TYPE_DECL)
574 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
575 }
576
577 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
578 {
579 /* Make sure we don't inadvertently set the assembler name. */
580 if (DECL_ASSEMBLER_NAME_SET_P (expr))
581 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
582 }
583
584 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
585 {
586 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
587 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
588 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
589 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
590 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
594 {
595 DFS_follow_tree_edge (DECL_VINDEX (expr));
596 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
597 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
598 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
602 {
603 DFS_follow_tree_edge (TYPE_SIZE (expr));
604 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
605 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
606 DFS_follow_tree_edge (TYPE_NAME (expr));
607 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
608 reconstructed during fixup. */
609 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
610 during fixup. */
611 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
612 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
613 /* TYPE_CANONICAL is re-computed during type merging, so no need
614 to follow it here. */
615 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
616 }
617
618 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
619 {
620 if (TREE_CODE (expr) == ENUMERAL_TYPE)
621 DFS_follow_tree_edge (TYPE_VALUES (expr));
622 else if (TREE_CODE (expr) == ARRAY_TYPE)
623 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
624 else if (RECORD_OR_UNION_TYPE_P (expr))
625 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
626 DFS_follow_tree_edge (t);
627 else if (TREE_CODE (expr) == FUNCTION_TYPE
628 || TREE_CODE (expr) == METHOD_TYPE)
629 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
630
631 if (!POINTER_TYPE_P (expr))
632 DFS_follow_tree_edge (TYPE_MINVAL (expr));
633 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
634 if (RECORD_OR_UNION_TYPE_P (expr))
635 DFS_follow_tree_edge (TYPE_BINFO (expr));
636 }
637
638 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
639 {
640 DFS_follow_tree_edge (TREE_PURPOSE (expr));
641 DFS_follow_tree_edge (TREE_VALUE (expr));
642 DFS_follow_tree_edge (TREE_CHAIN (expr));
643 }
644
645 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
646 {
647 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
648 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
649 }
650
651 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
652 {
653 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
654 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
655 DFS_follow_tree_edge (TREE_BLOCK (expr));
656 }
657
658 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
659 {
660 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
661 if (VAR_OR_FUNCTION_DECL_P (t)
662 && DECL_EXTERNAL (t))
663 /* We have to stream externals in the block chain as
664 non-references. See also
665 tree-streamer-out.c:streamer_write_chain. */
666 DFS_write_tree (ob, expr_state, t, ref_p, false, single_p);
667 else
668 DFS_follow_tree_edge (t);
669
670 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
671
672 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
673 handle - those that represent inlined function scopes.
674 For the drop rest them on the floor instead of ICEing
675 in dwarf2out.c. */
676 if (inlined_function_outer_scope_p (expr))
677 {
678 tree ultimate_origin = block_ultimate_origin (expr);
679 DFS_follow_tree_edge (ultimate_origin);
680 }
681 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
682 information for early inlined BLOCKs so drop it on the floor instead
683 of ICEing in dwarf2out.c. */
684
685 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
686 streaming time. */
687
688 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
689 list is re-constructed from BLOCK_SUPERCONTEXT. */
690 }
691
692 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
693 {
694 unsigned i;
695 tree t;
696
697 /* Note that the number of BINFO slots has already been emitted in
698 EXPR's header (see streamer_write_tree_header) because this length
699 is needed to build the empty BINFO node on the reader side. */
700 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
701 DFS_follow_tree_edge (t);
702 DFS_follow_tree_edge (BINFO_OFFSET (expr));
703 DFS_follow_tree_edge (BINFO_VTABLE (expr));
704 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
705
706 /* The number of BINFO_BASE_ACCESSES has already been emitted in
707 EXPR's bitfield section. */
708 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
709 DFS_follow_tree_edge (t);
710
711 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
712 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
713 }
714
715 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
716 {
717 unsigned i;
718 tree index, value;
719
720 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
721 {
722 DFS_follow_tree_edge (index);
723 DFS_follow_tree_edge (value);
724 }
725 }
726
727 if (code == OMP_CLAUSE)
728 {
729 int i;
730 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
731 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
732 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
733 }
734
735 #undef DFS_follow_tree_edge
736 }
737
738 /* Return a hash value for the tree T.
739 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
740 may hold hash values if trees inside current SCC. */
741
742 static hashval_t
743 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
744 {
745 inchash::hash hstate;
746
747 #define visit(SIBLING) \
748 do { \
749 unsigned ix; \
750 if (!SIBLING) \
751 hstate.add_int (0); \
752 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
753 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
754 else if (map) \
755 hstate.add_int (*map->get (SIBLING)); \
756 else \
757 hstate.add_int (1); \
758 } while (0)
759
760 /* Hash TS_BASE. */
761 enum tree_code code = TREE_CODE (t);
762 hstate.add_int (code);
763 if (!TYPE_P (t))
764 {
765 hstate.add_flag (TREE_SIDE_EFFECTS (t));
766 hstate.add_flag (TREE_CONSTANT (t));
767 hstate.add_flag (TREE_READONLY (t));
768 hstate.add_flag (TREE_PUBLIC (t));
769 }
770 hstate.add_flag (TREE_ADDRESSABLE (t));
771 hstate.add_flag (TREE_THIS_VOLATILE (t));
772 if (DECL_P (t))
773 hstate.add_flag (DECL_UNSIGNED (t));
774 else if (TYPE_P (t))
775 hstate.add_flag (TYPE_UNSIGNED (t));
776 if (TYPE_P (t))
777 hstate.add_flag (TYPE_ARTIFICIAL (t));
778 else
779 hstate.add_flag (TREE_NO_WARNING (t));
780 hstate.add_flag (TREE_NOTHROW (t));
781 hstate.add_flag (TREE_STATIC (t));
782 hstate.add_flag (TREE_PROTECTED (t));
783 hstate.add_flag (TREE_DEPRECATED (t));
784 if (code != TREE_BINFO)
785 hstate.add_flag (TREE_PRIVATE (t));
786 if (TYPE_P (t))
787 {
788 hstate.add_flag (TYPE_SATURATING (t));
789 hstate.add_flag (TYPE_ADDR_SPACE (t));
790 }
791 else if (code == SSA_NAME)
792 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
793 hstate.commit_flag ();
794
795 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
796 {
797 int i;
798 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
799 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
800 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
801 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
802 }
803
804 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
805 {
806 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
807 hstate.add_flag (r.cl);
808 hstate.add_flag (r.sign);
809 hstate.add_flag (r.signalling);
810 hstate.add_flag (r.canonical);
811 hstate.commit_flag ();
812 hstate.add_int (r.uexp);
813 hstate.add (r.sig, sizeof (r.sig));
814 }
815
816 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
817 {
818 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
819 hstate.add_int (f.mode);
820 hstate.add_int (f.data.low);
821 hstate.add_int (f.data.high);
822 }
823
824 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
825 {
826 hstate.add_wide_int (DECL_MODE (t));
827 hstate.add_flag (DECL_NONLOCAL (t));
828 hstate.add_flag (DECL_VIRTUAL_P (t));
829 hstate.add_flag (DECL_IGNORED_P (t));
830 hstate.add_flag (DECL_ABSTRACT_P (t));
831 hstate.add_flag (DECL_ARTIFICIAL (t));
832 hstate.add_flag (DECL_USER_ALIGN (t));
833 hstate.add_flag (DECL_PRESERVE_P (t));
834 hstate.add_flag (DECL_EXTERNAL (t));
835 hstate.add_flag (DECL_GIMPLE_REG_P (t));
836 hstate.commit_flag ();
837 hstate.add_int (DECL_ALIGN (t));
838 if (code == LABEL_DECL)
839 {
840 hstate.add_int (EH_LANDING_PAD_NR (t));
841 hstate.add_int (LABEL_DECL_UID (t));
842 }
843 else if (code == FIELD_DECL)
844 {
845 hstate.add_flag (DECL_PACKED (t));
846 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
847 hstate.add_int (DECL_OFFSET_ALIGN (t));
848 }
849 else if (code == VAR_DECL)
850 {
851 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
852 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
853 }
854 if (code == RESULT_DECL
855 || code == PARM_DECL
856 || code == VAR_DECL)
857 {
858 hstate.add_flag (DECL_BY_REFERENCE (t));
859 if (code == VAR_DECL
860 || code == PARM_DECL)
861 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
862 }
863 hstate.commit_flag ();
864 }
865
866 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
867 hstate.add_int (DECL_REGISTER (t));
868
869 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
870 {
871 hstate.add_flag (DECL_COMMON (t));
872 hstate.add_flag (DECL_DLLIMPORT_P (t));
873 hstate.add_flag (DECL_WEAK (t));
874 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
875 hstate.add_flag (DECL_COMDAT (t));
876 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
877 hstate.add_int (DECL_VISIBILITY (t));
878 if (code == VAR_DECL)
879 {
880 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
881 hstate.add_flag (DECL_HARD_REGISTER (t));
882 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
883 }
884 if (TREE_CODE (t) == FUNCTION_DECL)
885 {
886 hstate.add_flag (DECL_FINAL_P (t));
887 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
888 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
889 }
890 hstate.commit_flag ();
891 }
892
893 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
894 {
895 hstate.add_int (DECL_BUILT_IN_CLASS (t));
896 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
897 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
898 hstate.add_flag (DECL_UNINLINABLE (t));
899 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
900 hstate.add_flag (DECL_IS_NOVOPS (t));
901 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
902 hstate.add_flag (DECL_IS_MALLOC (t));
903 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
904 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
905 hstate.add_flag (DECL_STATIC_CHAIN (t));
906 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
907 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
908 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
909 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
910 hstate.add_flag (DECL_PURE_P (t));
911 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
912 hstate.commit_flag ();
913 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
914 hstate.add_int (DECL_FUNCTION_CODE (t));
915 }
916
917 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
918 {
919 hstate.add_wide_int (TYPE_MODE (t));
920 hstate.add_flag (TYPE_STRING_FLAG (t));
921 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
922 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
923 hstate.add_flag (TYPE_PACKED (t));
924 hstate.add_flag (TYPE_RESTRICT (t));
925 hstate.add_flag (TYPE_USER_ALIGN (t));
926 hstate.add_flag (TYPE_READONLY (t));
927 if (RECORD_OR_UNION_TYPE_P (t))
928 {
929 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
930 hstate.add_flag (TYPE_FINAL_P (t));
931 }
932 else if (code == ARRAY_TYPE)
933 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
934 hstate.commit_flag ();
935 hstate.add_int (TYPE_PRECISION (t));
936 hstate.add_int (TYPE_ALIGN (t));
937 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
938 || (!in_lto_p
939 && get_alias_set (t) == 0))
940 ? 0 : -1);
941 }
942
943 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
944 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
945 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
946
947 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
948 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
949
950 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
951 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
952
953 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
954 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
955
956 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
957 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
958
959 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
960 {
961 if (code != IDENTIFIER_NODE)
962 visit (TREE_TYPE (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
966 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
967 visit (VECTOR_CST_ELT (t, i));
968
969 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
970 {
971 visit (TREE_REALPART (t));
972 visit (TREE_IMAGPART (t));
973 }
974
975 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
976 {
977 /* Drop names that were created for anonymous entities. */
978 if (DECL_NAME (t)
979 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
980 && ANON_AGGRNAME_P (DECL_NAME (t)))
981 ;
982 else
983 visit (DECL_NAME (t));
984 if (DECL_FILE_SCOPE_P (t))
985 ;
986 else
987 visit (DECL_CONTEXT (t));
988 }
989
990 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
991 {
992 visit (DECL_SIZE (t));
993 visit (DECL_SIZE_UNIT (t));
994 visit (DECL_ATTRIBUTES (t));
995 if ((code == VAR_DECL
996 || code == PARM_DECL)
997 && DECL_HAS_VALUE_EXPR_P (t))
998 visit (DECL_VALUE_EXPR (t));
999 if (code == VAR_DECL
1000 && DECL_HAS_DEBUG_EXPR_P (t))
1001 visit (DECL_DEBUG_EXPR (t));
1002 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1003 be able to call get_symbol_initial_value. */
1004 }
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1007 {
1008 if (code == TYPE_DECL)
1009 visit (DECL_ORIGINAL_TYPE (t));
1010 }
1011
1012 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1013 {
1014 if (DECL_ASSEMBLER_NAME_SET_P (t))
1015 visit (DECL_ASSEMBLER_NAME (t));
1016 }
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1019 {
1020 visit (DECL_FIELD_OFFSET (t));
1021 visit (DECL_BIT_FIELD_TYPE (t));
1022 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1023 visit (DECL_FIELD_BIT_OFFSET (t));
1024 visit (DECL_FCONTEXT (t));
1025 }
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1028 {
1029 visit (DECL_VINDEX (t));
1030 visit (DECL_FUNCTION_PERSONALITY (t));
1031 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1032 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1033 }
1034
1035 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1036 {
1037 visit (TYPE_SIZE (t));
1038 visit (TYPE_SIZE_UNIT (t));
1039 visit (TYPE_ATTRIBUTES (t));
1040 visit (TYPE_NAME (t));
1041 visit (TYPE_MAIN_VARIANT (t));
1042 if (TYPE_FILE_SCOPE_P (t))
1043 ;
1044 else
1045 visit (TYPE_CONTEXT (t));
1046 visit (TYPE_STUB_DECL (t));
1047 }
1048
1049 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1050 {
1051 if (code == ENUMERAL_TYPE)
1052 visit (TYPE_VALUES (t));
1053 else if (code == ARRAY_TYPE)
1054 visit (TYPE_DOMAIN (t));
1055 else if (RECORD_OR_UNION_TYPE_P (t))
1056 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1057 visit (f);
1058 else if (code == FUNCTION_TYPE
1059 || code == METHOD_TYPE)
1060 visit (TYPE_ARG_TYPES (t));
1061 if (!POINTER_TYPE_P (t))
1062 visit (TYPE_MINVAL (t));
1063 visit (TYPE_MAXVAL (t));
1064 if (RECORD_OR_UNION_TYPE_P (t))
1065 visit (TYPE_BINFO (t));
1066 }
1067
1068 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1069 {
1070 visit (TREE_PURPOSE (t));
1071 visit (TREE_VALUE (t));
1072 visit (TREE_CHAIN (t));
1073 }
1074
1075 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1076 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1077 visit (TREE_VEC_ELT (t, i));
1078
1079 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1080 {
1081 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1082 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1083 visit (TREE_OPERAND (t, i));
1084 }
1085
1086 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1087 {
1088 unsigned i;
1089 tree b;
1090 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1091 visit (b);
1092 visit (BINFO_OFFSET (t));
1093 visit (BINFO_VTABLE (t));
1094 visit (BINFO_VPTR_FIELD (t));
1095 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1096 visit (b);
1097 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1098 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1099 }
1100
1101 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1102 {
1103 unsigned i;
1104 tree index, value;
1105 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1106 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1107 {
1108 visit (index);
1109 visit (value);
1110 }
1111 }
1112
1113 if (code == OMP_CLAUSE)
1114 {
1115 int i;
1116 HOST_WIDE_INT val;
1117
1118 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1119 switch (OMP_CLAUSE_CODE (t))
1120 {
1121 case OMP_CLAUSE_DEFAULT:
1122 val = OMP_CLAUSE_DEFAULT_KIND (t);
1123 break;
1124 case OMP_CLAUSE_SCHEDULE:
1125 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1126 break;
1127 case OMP_CLAUSE_DEPEND:
1128 val = OMP_CLAUSE_DEPEND_KIND (t);
1129 break;
1130 case OMP_CLAUSE_MAP:
1131 val = OMP_CLAUSE_MAP_KIND (t);
1132 break;
1133 case OMP_CLAUSE_PROC_BIND:
1134 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1135 break;
1136 case OMP_CLAUSE_REDUCTION:
1137 val = OMP_CLAUSE_REDUCTION_CODE (t);
1138 break;
1139 default:
1140 val = 0;
1141 break;
1142 }
1143 hstate.add_wide_int (val);
1144 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1145 visit (OMP_CLAUSE_OPERAND (t, i));
1146 visit (OMP_CLAUSE_CHAIN (t));
1147 }
1148
1149 return hstate.end ();
1150
1151 #undef visit
1152 }
1153
1154 /* Compare two SCC entries by their hash value for qsorting them. */
1155
1156 int
1157 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1158 {
1159 const scc_entry *p1 = (const scc_entry *) p1_;
1160 const scc_entry *p2 = (const scc_entry *) p2_;
1161 if (p1->hash < p2->hash)
1162 return -1;
1163 else if (p1->hash > p2->hash)
1164 return 1;
1165 return 0;
1166 }
1167
1168 /* Return a hash value for the SCC on the SCC stack from FIRST with
1169 size SIZE. */
1170
1171 hashval_t
1172 DFS::hash_scc (struct output_block *ob,
1173 unsigned first, unsigned size)
1174 {
1175 unsigned int last_classes = 0, iterations = 0;
1176
1177 /* Compute hash values for the SCC members. */
1178 for (unsigned i = 0; i < size; ++i)
1179 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1180 sccstack[first+i].t);
1181
1182 if (size == 1)
1183 return sccstack[first].hash;
1184
1185 /* We aim to get unique hash for every tree within SCC and compute hash value
1186 of the whole SCC by combing all values together in an stable (entry point
1187 independent) order. This guarantees that the same SCC regions within
1188 different translation units will get the same hash values and therefore
1189 will be merged at WPA time.
1190
1191 Often the hashes are already unique. In that case we compute scc hash
1192 by combining individual hash values in an increasing order.
1193
1194 If thre are duplicates we seek at least one tree with unique hash (and
1195 pick one with minimal hash and this property). Then we obtain stable
1196 order by DFS walk starting from this unique tree and then use index
1197 within this order to make individual hash values unique.
1198
1199 If there is no tree with unique hash, we iteratively propagate the hash
1200 values across the internal edges of SCC. This usually quickly leads
1201 to unique hashes. Consider, for example, an SCC containing two pointers
1202 that are identical except for type they point and assume that these
1203 types are also part of the SCC.
1204 The propagation will add the points-to type information into their hash
1205 values. */
1206 do
1207 {
1208 /* Sort the SCC so we can easily see check for uniqueness. */
1209 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1210
1211 unsigned int classes = 1;
1212 int firstunique = -1;
1213
1214 /* Find tree with lowest unique hash (if it exists) and compute
1215 number of equivalence classes. */
1216 if (sccstack[first].hash != sccstack[first+1].hash)
1217 firstunique = 0;
1218 for (unsigned i = 1; i < size; ++i)
1219 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1220 {
1221 classes++;
1222 if (firstunique == -1
1223 && (i == size - 1
1224 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1225 firstunique = i;
1226 }
1227
1228 /* If we found tree with unique hash; stop the iteration. */
1229 if (firstunique != -1
1230 /* Also terminate if we run out of iterations or if the number of
1231 equivalence classes is no longer increasing.
1232 For example a cyclic list of trees that are all equivalent will
1233 never have unique entry point; we however do not build such SCCs
1234 in our IL. */
1235 || classes <= last_classes || iterations > 16)
1236 {
1237 hashval_t scc_hash;
1238
1239 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1240 starting from FIRSTUNIQUE to obstain stable order. */
1241 if (classes != size && firstunique != -1)
1242 {
1243 hash_map <tree, hashval_t> map(size*2);
1244
1245 /* Store hash values into a map, so we can associate them with
1246 reordered SCC. */
1247 for (unsigned i = 0; i < size; ++i)
1248 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1249
1250 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1251 gcc_assert (again.sccstack.length () == size);
1252
1253 memcpy (sccstack.address () + first,
1254 again.sccstack.address (),
1255 sizeof (scc_entry) * size);
1256
1257 /* Update hash values of individual members by hashing in the
1258 index within the stable order. This ensures uniqueness.
1259 Also compute the scc_hash by mixing in all hash values in the
1260 stable order we obtained. */
1261 sccstack[first].hash = *map.get (sccstack[first].t);
1262 scc_hash = sccstack[first].hash;
1263 for (unsigned i = 1; i < size; ++i)
1264 {
1265 sccstack[first+i].hash
1266 = iterative_hash_hashval_t (i,
1267 *map.get (sccstack[first+i].t));
1268 scc_hash = iterative_hash_hashval_t (scc_hash,
1269 sccstack[first+i].hash);
1270 }
1271 }
1272 /* If we got unique hash values for each tree, then sort already
1273 ensured entry point independent order. Only compute the final
1274 scc hash.
1275
1276 If we failed to find the unique entry point, we go by the same
1277 route. We will eventually introduce unwanted hash conflicts. */
1278 else
1279 {
1280 scc_hash = sccstack[first].hash;
1281 for (unsigned i = 1; i < size; ++i)
1282 scc_hash = iterative_hash_hashval_t (scc_hash,
1283 sccstack[first+i].hash);
1284 /* We can not 100% guarantee that the hash will not conflict in
1285 in a way so the unique hash is not found. This however
1286 should be extremely rare situation. ICE for now so possible
1287 issues are found and evaulated. */
1288 gcc_checking_assert (classes == size);
1289 }
1290
1291 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1292 hash into the hash of each of the elements. */
1293 for (unsigned i = 0; i < size; ++i)
1294 sccstack[first+i].hash
1295 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1296 return scc_hash;
1297 }
1298
1299 last_classes = classes;
1300 iterations++;
1301
1302 /* We failed to identify the entry point; propagate hash values across
1303 the edges. */
1304 {
1305 hash_map <tree, hashval_t> map(size*2);
1306 for (unsigned i = 0; i < size; ++i)
1307 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1308
1309 for (unsigned i = 0; i < size; i++)
1310 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1311 sccstack[first+i].t);
1312 }
1313 }
1314 while (true);
1315 }
1316
1317 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1318 already in the streamer cache. Main routine called for
1319 each visit of EXPR. */
1320
1321 void
1322 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1323 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1324 {
1325 unsigned ix;
1326
1327 /* Handle special cases. */
1328 if (expr == NULL_TREE)
1329 return;
1330
1331 /* Do not DFS walk into indexable trees. */
1332 if (this_ref_p && tree_is_indexable (expr))
1333 return;
1334
1335 /* Check if we already streamed EXPR. */
1336 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1337 return;
1338
1339 sccs **slot = &sccstate.get_or_insert (expr);
1340 sccs *cstate = *slot;
1341 if (!cstate)
1342 {
1343 scc_entry e = { expr, 0 };
1344 /* Not yet visited. DFS recurse and push it onto the stack. */
1345 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1346 sccstack.safe_push (e);
1347 cstate->dfsnum = next_dfs_num++;
1348 cstate->low = cstate->dfsnum;
1349
1350 if (streamer_handle_as_builtin_p (expr))
1351 ;
1352 else if (TREE_CODE (expr) == INTEGER_CST
1353 && !TREE_OVERFLOW (expr))
1354 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1355 else
1356 {
1357 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1358
1359 /* Walk any LTO-specific edges. */
1360 if (DECL_P (expr)
1361 && TREE_CODE (expr) != FUNCTION_DECL
1362 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1363 {
1364 /* Handle DECL_INITIAL for symbols. */
1365 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1366 expr);
1367 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1368 }
1369 }
1370
1371 /* See if we found an SCC. */
1372 if (cstate->low == cstate->dfsnum)
1373 {
1374 unsigned first, size;
1375 tree x;
1376
1377 /* If we are re-walking a single leaf-SCC just return and
1378 let the caller access the sccstack. */
1379 if (single_p)
1380 return;
1381
1382 /* Pop the SCC and compute its size. */
1383 first = sccstack.length ();
1384 do
1385 {
1386 x = sccstack[--first].t;
1387 }
1388 while (x != expr);
1389 size = sccstack.length () - first;
1390
1391 /* No need to compute hashes for LTRANS units, we don't perform
1392 any merging there. */
1393 hashval_t scc_hash = 0;
1394 unsigned scc_entry_len = 0;
1395 if (!flag_wpa)
1396 {
1397 scc_hash = hash_scc (ob, first, size);
1398
1399 /* Put the entries with the least number of collisions first. */
1400 unsigned entry_start = 0;
1401 scc_entry_len = size + 1;
1402 for (unsigned i = 0; i < size;)
1403 {
1404 unsigned from = i;
1405 for (i = i + 1; i < size
1406 && (sccstack[first + i].hash
1407 == sccstack[first + from].hash); ++i)
1408 ;
1409 if (i - from < scc_entry_len)
1410 {
1411 scc_entry_len = i - from;
1412 entry_start = from;
1413 }
1414 }
1415 for (unsigned i = 0; i < scc_entry_len; ++i)
1416 {
1417 scc_entry tem = sccstack[first + i];
1418 sccstack[first + i] = sccstack[first + entry_start + i];
1419 sccstack[first + entry_start + i] = tem;
1420 }
1421
1422 if (scc_entry_len == 1)
1423 ; /* We already sorted SCC deterministically in hash_scc. */
1424 else
1425 /* Check that we have only one SCC.
1426 Naturally we may have conflicts if hash function is not
1427 strong enough. Lets see how far this gets. */
1428 {
1429 #ifdef ENABLE_CHECKING
1430 gcc_unreachable ();
1431 #endif
1432 }
1433 }
1434
1435 /* Write LTO_tree_scc. */
1436 streamer_write_record_start (ob, LTO_tree_scc);
1437 streamer_write_uhwi (ob, size);
1438 streamer_write_uhwi (ob, scc_hash);
1439
1440 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1441 All INTEGER_CSTs need to be handled this way as we need
1442 their type to materialize them. Also builtins are handled
1443 this way.
1444 ??? We still wrap these in LTO_tree_scc so at the
1445 input side we can properly identify the tree we want
1446 to ultimatively return. */
1447 if (size == 1)
1448 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1449 else
1450 {
1451 /* Write the size of the SCC entry candidates. */
1452 streamer_write_uhwi (ob, scc_entry_len);
1453
1454 /* Write all headers and populate the streamer cache. */
1455 for (unsigned i = 0; i < size; ++i)
1456 {
1457 hashval_t hash = sccstack[first+i].hash;
1458 tree t = sccstack[first+i].t;
1459 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1460 t, hash, &ix);
1461 gcc_assert (!exists_p);
1462
1463 if (!lto_is_streamable (t))
1464 internal_error ("tree code %qs is not supported "
1465 "in LTO streams",
1466 get_tree_code_name (TREE_CODE (t)));
1467
1468 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1469
1470 /* Write the header, containing everything needed to
1471 materialize EXPR on the reading side. */
1472 streamer_write_tree_header (ob, t);
1473 }
1474
1475 /* Write the bitpacks and tree references. */
1476 for (unsigned i = 0; i < size; ++i)
1477 {
1478 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1479
1480 /* Mark the end of the tree. */
1481 streamer_write_zero (ob);
1482 }
1483 }
1484
1485 /* Finally truncate the vector. */
1486 sccstack.truncate (first);
1487
1488 if (from_state)
1489 from_state->low = MIN (from_state->low, cstate->low);
1490 return;
1491 }
1492
1493 if (from_state)
1494 from_state->low = MIN (from_state->low, cstate->low);
1495 }
1496 gcc_checking_assert (from_state);
1497 if (cstate->dfsnum < from_state->dfsnum)
1498 from_state->low = MIN (cstate->dfsnum, from_state->low);
1499 }
1500
1501
1502 /* Emit the physical representation of tree node EXPR to output block
1503 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1504 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1505
1506 void
1507 lto_output_tree (struct output_block *ob, tree expr,
1508 bool ref_p, bool this_ref_p)
1509 {
1510 unsigned ix;
1511 bool existed_p;
1512
1513 if (expr == NULL_TREE)
1514 {
1515 streamer_write_record_start (ob, LTO_null);
1516 return;
1517 }
1518
1519 if (this_ref_p && tree_is_indexable (expr))
1520 {
1521 lto_output_tree_ref (ob, expr);
1522 return;
1523 }
1524
1525 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1526 if (existed_p)
1527 {
1528 /* If a node has already been streamed out, make sure that
1529 we don't write it more than once. Otherwise, the reader
1530 will instantiate two different nodes for the same object. */
1531 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1532 streamer_write_uhwi (ob, ix);
1533 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1534 lto_tree_code_to_tag (TREE_CODE (expr)));
1535 lto_stats.num_pickle_refs_output++;
1536 }
1537 else
1538 {
1539 /* This is the first time we see EXPR, write all reachable
1540 trees to OB. */
1541 static bool in_dfs_walk;
1542
1543 /* Protect against recursion which means disconnect between
1544 what tree edges we walk in the DFS walk and what edges
1545 we stream out. */
1546 gcc_assert (!in_dfs_walk);
1547
1548 /* Start the DFS walk. */
1549 /* Save ob state ... */
1550 /* let's see ... */
1551 in_dfs_walk = true;
1552 DFS (ob, expr, ref_p, this_ref_p, false);
1553 in_dfs_walk = false;
1554
1555 /* Finally append a reference to the tree we were writing.
1556 ??? If expr ended up as a singleton we could have
1557 inlined it here and avoid outputting a reference. */
1558 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1559 gcc_assert (existed_p);
1560 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1561 streamer_write_uhwi (ob, ix);
1562 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1563 lto_tree_code_to_tag (TREE_CODE (expr)));
1564 lto_stats.num_pickle_refs_output++;
1565 }
1566 }
1567
1568
1569 /* Output to OB a list of try/catch handlers starting with FIRST. */
1570
1571 static void
1572 output_eh_try_list (struct output_block *ob, eh_catch first)
1573 {
1574 eh_catch n;
1575
1576 for (n = first; n; n = n->next_catch)
1577 {
1578 streamer_write_record_start (ob, LTO_eh_catch);
1579 stream_write_tree (ob, n->type_list, true);
1580 stream_write_tree (ob, n->filter_list, true);
1581 stream_write_tree (ob, n->label, true);
1582 }
1583
1584 streamer_write_record_start (ob, LTO_null);
1585 }
1586
1587
1588 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1589 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1590 detect EH region sharing. */
1591
1592 static void
1593 output_eh_region (struct output_block *ob, eh_region r)
1594 {
1595 enum LTO_tags tag;
1596
1597 if (r == NULL)
1598 {
1599 streamer_write_record_start (ob, LTO_null);
1600 return;
1601 }
1602
1603 if (r->type == ERT_CLEANUP)
1604 tag = LTO_ert_cleanup;
1605 else if (r->type == ERT_TRY)
1606 tag = LTO_ert_try;
1607 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1608 tag = LTO_ert_allowed_exceptions;
1609 else if (r->type == ERT_MUST_NOT_THROW)
1610 tag = LTO_ert_must_not_throw;
1611 else
1612 gcc_unreachable ();
1613
1614 streamer_write_record_start (ob, tag);
1615 streamer_write_hwi (ob, r->index);
1616
1617 if (r->outer)
1618 streamer_write_hwi (ob, r->outer->index);
1619 else
1620 streamer_write_zero (ob);
1621
1622 if (r->inner)
1623 streamer_write_hwi (ob, r->inner->index);
1624 else
1625 streamer_write_zero (ob);
1626
1627 if (r->next_peer)
1628 streamer_write_hwi (ob, r->next_peer->index);
1629 else
1630 streamer_write_zero (ob);
1631
1632 if (r->type == ERT_TRY)
1633 {
1634 output_eh_try_list (ob, r->u.eh_try.first_catch);
1635 }
1636 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1637 {
1638 stream_write_tree (ob, r->u.allowed.type_list, true);
1639 stream_write_tree (ob, r->u.allowed.label, true);
1640 streamer_write_uhwi (ob, r->u.allowed.filter);
1641 }
1642 else if (r->type == ERT_MUST_NOT_THROW)
1643 {
1644 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1645 bitpack_d bp = bitpack_create (ob->main_stream);
1646 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1647 streamer_write_bitpack (&bp);
1648 }
1649
1650 if (r->landing_pads)
1651 streamer_write_hwi (ob, r->landing_pads->index);
1652 else
1653 streamer_write_zero (ob);
1654 }
1655
1656
1657 /* Output landing pad LP to OB. */
1658
1659 static void
1660 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1661 {
1662 if (lp == NULL)
1663 {
1664 streamer_write_record_start (ob, LTO_null);
1665 return;
1666 }
1667
1668 streamer_write_record_start (ob, LTO_eh_landing_pad);
1669 streamer_write_hwi (ob, lp->index);
1670 if (lp->next_lp)
1671 streamer_write_hwi (ob, lp->next_lp->index);
1672 else
1673 streamer_write_zero (ob);
1674
1675 if (lp->region)
1676 streamer_write_hwi (ob, lp->region->index);
1677 else
1678 streamer_write_zero (ob);
1679
1680 stream_write_tree (ob, lp->post_landing_pad, true);
1681 }
1682
1683
1684 /* Output the existing eh_table to OB. */
1685
1686 static void
1687 output_eh_regions (struct output_block *ob, struct function *fn)
1688 {
1689 if (fn->eh && fn->eh->region_tree)
1690 {
1691 unsigned i;
1692 eh_region eh;
1693 eh_landing_pad lp;
1694 tree ttype;
1695
1696 streamer_write_record_start (ob, LTO_eh_table);
1697
1698 /* Emit the index of the root of the EH region tree. */
1699 streamer_write_hwi (ob, fn->eh->region_tree->index);
1700
1701 /* Emit all the EH regions in the region array. */
1702 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1703 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1704 output_eh_region (ob, eh);
1705
1706 /* Emit all landing pads. */
1707 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1708 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1709 output_eh_lp (ob, lp);
1710
1711 /* Emit all the runtime type data. */
1712 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1713 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1714 stream_write_tree (ob, ttype, true);
1715
1716 /* Emit the table of action chains. */
1717 if (targetm.arm_eabi_unwinder)
1718 {
1719 tree t;
1720 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1721 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1722 stream_write_tree (ob, t, true);
1723 }
1724 else
1725 {
1726 uchar c;
1727 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1728 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1729 streamer_write_char_stream (ob->main_stream, c);
1730 }
1731 }
1732
1733 /* The LTO_null either terminates the record or indicates that there
1734 are no eh_records at all. */
1735 streamer_write_record_start (ob, LTO_null);
1736 }
1737
1738
1739 /* Output all of the active ssa names to the ssa_names stream. */
1740
1741 static void
1742 output_ssa_names (struct output_block *ob, struct function *fn)
1743 {
1744 unsigned int i, len;
1745
1746 len = vec_safe_length (SSANAMES (fn));
1747 streamer_write_uhwi (ob, len);
1748
1749 for (i = 1; i < len; i++)
1750 {
1751 tree ptr = (*SSANAMES (fn))[i];
1752
1753 if (ptr == NULL_TREE
1754 || SSA_NAME_IN_FREE_LIST (ptr)
1755 || virtual_operand_p (ptr))
1756 continue;
1757
1758 streamer_write_uhwi (ob, i);
1759 streamer_write_char_stream (ob->main_stream,
1760 SSA_NAME_IS_DEFAULT_DEF (ptr));
1761 if (SSA_NAME_VAR (ptr))
1762 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1763 else
1764 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1765 stream_write_tree (ob, TREE_TYPE (ptr), true);
1766 }
1767
1768 streamer_write_zero (ob);
1769 }
1770
1771
1772 /* Output a wide-int. */
1773
1774 static void
1775 streamer_write_wi (struct output_block *ob,
1776 const widest_int &w)
1777 {
1778 int len = w.get_len ();
1779
1780 streamer_write_uhwi (ob, w.get_precision ());
1781 streamer_write_uhwi (ob, len);
1782 for (int i = 0; i < len; i++)
1783 streamer_write_hwi (ob, w.elt (i));
1784 }
1785
1786
1787 /* Output the cfg. */
1788
1789 static void
1790 output_cfg (struct output_block *ob, struct function *fn)
1791 {
1792 struct lto_output_stream *tmp_stream = ob->main_stream;
1793 basic_block bb;
1794
1795 ob->main_stream = ob->cfg_stream;
1796
1797 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1798 profile_status_for_fn (fn));
1799
1800 /* Output the number of the highest basic block. */
1801 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1802
1803 FOR_ALL_BB_FN (bb, fn)
1804 {
1805 edge_iterator ei;
1806 edge e;
1807
1808 streamer_write_hwi (ob, bb->index);
1809
1810 /* Output the successors and the edge flags. */
1811 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1812 FOR_EACH_EDGE (e, ei, bb->succs)
1813 {
1814 streamer_write_uhwi (ob, e->dest->index);
1815 streamer_write_hwi (ob, e->probability);
1816 streamer_write_gcov_count (ob, e->count);
1817 streamer_write_uhwi (ob, e->flags);
1818 }
1819 }
1820
1821 streamer_write_hwi (ob, -1);
1822
1823 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1824 while (bb->next_bb)
1825 {
1826 streamer_write_hwi (ob, bb->next_bb->index);
1827 bb = bb->next_bb;
1828 }
1829
1830 streamer_write_hwi (ob, -1);
1831
1832 /* ??? The cfgloop interface is tied to cfun. */
1833 gcc_assert (cfun == fn);
1834
1835 /* Output the number of loops. */
1836 streamer_write_uhwi (ob, number_of_loops (fn));
1837
1838 /* Output each loop, skipping the tree root which has number zero. */
1839 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1840 {
1841 struct loop *loop = get_loop (fn, i);
1842
1843 /* Write the index of the loop header. That's enough to rebuild
1844 the loop tree on the reader side. Stream -1 for an unused
1845 loop entry. */
1846 if (!loop)
1847 {
1848 streamer_write_hwi (ob, -1);
1849 continue;
1850 }
1851 else
1852 streamer_write_hwi (ob, loop->header->index);
1853
1854 /* Write everything copy_loop_info copies. */
1855 streamer_write_enum (ob->main_stream,
1856 loop_estimation, EST_LAST, loop->estimate_state);
1857 streamer_write_hwi (ob, loop->any_upper_bound);
1858 if (loop->any_upper_bound)
1859 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1860 streamer_write_hwi (ob, loop->any_estimate);
1861 if (loop->any_estimate)
1862 streamer_write_wi (ob, loop->nb_iterations_estimate);
1863
1864 /* Write OMP SIMD related info. */
1865 streamer_write_hwi (ob, loop->safelen);
1866 streamer_write_hwi (ob, loop->dont_vectorize);
1867 streamer_write_hwi (ob, loop->force_vectorize);
1868 stream_write_tree (ob, loop->simduid, true);
1869 }
1870
1871 ob->main_stream = tmp_stream;
1872 }
1873
1874
1875 /* Create the header in the file using OB. If the section type is for
1876 a function, set FN to the decl for that function. */
1877
1878 void
1879 produce_asm (struct output_block *ob, tree fn)
1880 {
1881 enum lto_section_type section_type = ob->section_type;
1882 struct lto_function_header header;
1883 char *section_name;
1884
1885 if (section_type == LTO_section_function_body)
1886 {
1887 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1888 section_name = lto_get_section_name (section_type, name, NULL);
1889 }
1890 else
1891 section_name = lto_get_section_name (section_type, NULL, NULL);
1892
1893 lto_begin_section (section_name, !flag_wpa);
1894 free (section_name);
1895
1896 /* The entire header is stream computed here. */
1897 memset (&header, 0, sizeof (struct lto_function_header));
1898
1899 /* Write the header. */
1900 header.major_version = LTO_major_version;
1901 header.minor_version = LTO_minor_version;
1902
1903 if (section_type == LTO_section_function_body)
1904 header.cfg_size = ob->cfg_stream->total_size;
1905 header.main_size = ob->main_stream->total_size;
1906 header.string_size = ob->string_stream->total_size;
1907 lto_write_data (&header, sizeof header);
1908
1909 /* Put all of the gimple and the string table out the asm file as a
1910 block of text. */
1911 if (section_type == LTO_section_function_body)
1912 lto_write_stream (ob->cfg_stream);
1913 lto_write_stream (ob->main_stream);
1914 lto_write_stream (ob->string_stream);
1915
1916 lto_end_section ();
1917 }
1918
1919
1920 /* Output the base body of struct function FN using output block OB. */
1921
1922 static void
1923 output_struct_function_base (struct output_block *ob, struct function *fn)
1924 {
1925 struct bitpack_d bp;
1926 unsigned i;
1927 tree t;
1928
1929 /* Output the static chain and non-local goto save area. */
1930 stream_write_tree (ob, fn->static_chain_decl, true);
1931 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1932
1933 /* Output all the local variables in the function. */
1934 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1935 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1936 stream_write_tree (ob, t, true);
1937
1938 /* Output current IL state of the function. */
1939 streamer_write_uhwi (ob, fn->curr_properties);
1940
1941 /* Write all the attributes for FN. */
1942 bp = bitpack_create (ob->main_stream);
1943 bp_pack_value (&bp, fn->is_thunk, 1);
1944 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1945 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1946 bp_pack_value (&bp, fn->returns_struct, 1);
1947 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1948 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1949 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1950 bp_pack_value (&bp, fn->after_inlining, 1);
1951 bp_pack_value (&bp, fn->stdarg, 1);
1952 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1953 bp_pack_value (&bp, fn->calls_alloca, 1);
1954 bp_pack_value (&bp, fn->calls_setjmp, 1);
1955 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1956 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1957 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1958 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1959
1960 /* Output the function start and end loci. */
1961 stream_output_location (ob, &bp, fn->function_start_locus);
1962 stream_output_location (ob, &bp, fn->function_end_locus);
1963
1964 streamer_write_bitpack (&bp);
1965 }
1966
1967
1968 /* Output the body of function NODE->DECL. */
1969
1970 static void
1971 output_function (struct cgraph_node *node)
1972 {
1973 tree function;
1974 struct function *fn;
1975 basic_block bb;
1976 struct output_block *ob;
1977
1978 function = node->decl;
1979 fn = DECL_STRUCT_FUNCTION (function);
1980 ob = create_output_block (LTO_section_function_body);
1981
1982 clear_line_info (ob);
1983 ob->symbol = node;
1984
1985 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1986
1987 /* Set current_function_decl and cfun. */
1988 push_cfun (fn);
1989
1990 /* Make string 0 be a NULL string. */
1991 streamer_write_char_stream (ob->string_stream, 0);
1992
1993 streamer_write_record_start (ob, LTO_function);
1994
1995 /* Output decls for parameters and args. */
1996 stream_write_tree (ob, DECL_RESULT (function), true);
1997 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1998
1999 /* Output DECL_INITIAL for the function, which contains the tree of
2000 lexical scopes. */
2001 stream_write_tree (ob, DECL_INITIAL (function), true);
2002
2003 /* We also stream abstract functions where we stream only stuff needed for
2004 debug info. */
2005 if (gimple_has_body_p (function))
2006 {
2007 streamer_write_uhwi (ob, 1);
2008 output_struct_function_base (ob, fn);
2009
2010 /* Output all the SSA names used in the function. */
2011 output_ssa_names (ob, fn);
2012
2013 /* Output any exception handling regions. */
2014 output_eh_regions (ob, fn);
2015
2016
2017 /* We will renumber the statements. The code that does this uses
2018 the same ordering that we use for serializing them so we can use
2019 the same code on the other end and not have to write out the
2020 statement numbers. We do not assign UIDs to PHIs here because
2021 virtual PHIs get re-computed on-the-fly which would make numbers
2022 inconsistent. */
2023 set_gimple_stmt_max_uid (cfun, 0);
2024 FOR_ALL_BB_FN (bb, cfun)
2025 {
2026 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2027 gsi_next (&gsi))
2028 {
2029 gphi *stmt = gsi.phi ();
2030
2031 /* Virtual PHIs are not going to be streamed. */
2032 if (!virtual_operand_p (gimple_phi_result (stmt)))
2033 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2034 }
2035 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2036 gsi_next (&gsi))
2037 {
2038 gimple stmt = gsi_stmt (gsi);
2039 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2040 }
2041 }
2042 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2043 virtual phis now. */
2044 FOR_ALL_BB_FN (bb, cfun)
2045 {
2046 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2047 gsi_next (&gsi))
2048 {
2049 gphi *stmt = gsi.phi ();
2050 if (virtual_operand_p (gimple_phi_result (stmt)))
2051 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2052 }
2053 }
2054
2055 /* Output the code for the function. */
2056 FOR_ALL_BB_FN (bb, fn)
2057 output_bb (ob, bb, fn);
2058
2059 /* The terminator for this function. */
2060 streamer_write_record_start (ob, LTO_null);
2061
2062 output_cfg (ob, fn);
2063
2064 pop_cfun ();
2065 }
2066 else
2067 streamer_write_uhwi (ob, 0);
2068
2069 /* Create a section to hold the pickled output of this function. */
2070 produce_asm (ob, function);
2071
2072 destroy_output_block (ob);
2073 }
2074
2075 /* Output the body of function NODE->DECL. */
2076
2077 static void
2078 output_constructor (struct varpool_node *node)
2079 {
2080 tree var = node->decl;
2081 struct output_block *ob;
2082
2083 ob = create_output_block (LTO_section_function_body);
2084
2085 clear_line_info (ob);
2086 ob->symbol = node;
2087
2088 /* Make string 0 be a NULL string. */
2089 streamer_write_char_stream (ob->string_stream, 0);
2090
2091 /* Output DECL_INITIAL for the function, which contains the tree of
2092 lexical scopes. */
2093 stream_write_tree (ob, DECL_INITIAL (var), true);
2094
2095 /* Create a section to hold the pickled output of this function. */
2096 produce_asm (ob, var);
2097
2098 destroy_output_block (ob);
2099 }
2100
2101
2102 /* Emit toplevel asms. */
2103
2104 void
2105 lto_output_toplevel_asms (void)
2106 {
2107 struct output_block *ob;
2108 struct asm_node *can;
2109 char *section_name;
2110 struct lto_simple_header_with_strings header;
2111
2112 if (!symtab->first_asm_symbol ())
2113 return;
2114
2115 ob = create_output_block (LTO_section_asm);
2116
2117 /* Make string 0 be a NULL string. */
2118 streamer_write_char_stream (ob->string_stream, 0);
2119
2120 for (can = symtab->first_asm_symbol (); can; can = can->next)
2121 {
2122 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2123 streamer_write_hwi (ob, can->order);
2124 }
2125
2126 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2127
2128 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2129 lto_begin_section (section_name, !flag_wpa);
2130 free (section_name);
2131
2132 /* The entire header stream is computed here. */
2133 memset (&header, 0, sizeof (header));
2134
2135 /* Write the header. */
2136 header.major_version = LTO_major_version;
2137 header.minor_version = LTO_minor_version;
2138
2139 header.main_size = ob->main_stream->total_size;
2140 header.string_size = ob->string_stream->total_size;
2141 lto_write_data (&header, sizeof header);
2142
2143 /* Put all of the gimple and the string table out the asm file as a
2144 block of text. */
2145 lto_write_stream (ob->main_stream);
2146 lto_write_stream (ob->string_stream);
2147
2148 lto_end_section ();
2149
2150 destroy_output_block (ob);
2151 }
2152
2153
2154 /* Copy the function body or variable constructor of NODE without deserializing. */
2155
2156 static void
2157 copy_function_or_variable (struct symtab_node *node)
2158 {
2159 tree function = node->decl;
2160 struct lto_file_decl_data *file_data = node->lto_file_data;
2161 const char *data;
2162 size_t len;
2163 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2164 char *section_name =
2165 lto_get_section_name (LTO_section_function_body, name, NULL);
2166 size_t i, j;
2167 struct lto_in_decl_state *in_state;
2168 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2169
2170 lto_begin_section (section_name, !flag_wpa);
2171 free (section_name);
2172
2173 /* We may have renamed the declaration, e.g., a static function. */
2174 name = lto_get_decl_name_mapping (file_data, name);
2175
2176 data = lto_get_section_data (file_data, LTO_section_function_body,
2177 name, &len);
2178 gcc_assert (data);
2179
2180 /* Do a bit copy of the function body. */
2181 lto_write_data (data, len);
2182
2183 /* Copy decls. */
2184 in_state =
2185 lto_get_function_in_decl_state (node->lto_file_data, function);
2186 gcc_assert (in_state);
2187
2188 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2189 {
2190 size_t n = in_state->streams[i].size;
2191 tree *trees = in_state->streams[i].trees;
2192 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2193
2194 /* The out state must have the same indices and the in state.
2195 So just copy the vector. All the encoders in the in state
2196 must be empty where we reach here. */
2197 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2198 encoder->trees.reserve_exact (n);
2199 for (j = 0; j < n; j++)
2200 encoder->trees.safe_push (trees[j]);
2201 }
2202
2203 lto_free_section_data (file_data, LTO_section_function_body, name,
2204 data, len);
2205 lto_end_section ();
2206 }
2207
2208 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2209
2210 static tree
2211 wrap_refs (tree *tp, int *ws, void *)
2212 {
2213 tree t = *tp;
2214 if (handled_component_p (t)
2215 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2216 {
2217 tree decl = TREE_OPERAND (t, 0);
2218 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2219 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2220 build1 (ADDR_EXPR, ptrtype, decl),
2221 build_int_cst (ptrtype, 0));
2222 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2223 *ws = 0;
2224 }
2225 else if (TREE_CODE (t) == CONSTRUCTOR)
2226 ;
2227 else if (!EXPR_P (t))
2228 *ws = 0;
2229 return NULL_TREE;
2230 }
2231
2232 /* Main entry point from the pass manager. */
2233
2234 void
2235 lto_output (void)
2236 {
2237 struct lto_out_decl_state *decl_state;
2238 #ifdef ENABLE_CHECKING
2239 bitmap output = lto_bitmap_alloc ();
2240 #endif
2241 int i, n_nodes;
2242 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2243
2244 /* Initialize the streamer. */
2245 lto_streamer_init ();
2246
2247 n_nodes = lto_symtab_encoder_size (encoder);
2248 /* Process only the functions with bodies. */
2249 for (i = 0; i < n_nodes; i++)
2250 {
2251 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2252 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2253 {
2254 if (lto_symtab_encoder_encode_body_p (encoder, node)
2255 && !node->alias)
2256 {
2257 #ifdef ENABLE_CHECKING
2258 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2259 bitmap_set_bit (output, DECL_UID (node->decl));
2260 #endif
2261 decl_state = lto_new_out_decl_state ();
2262 lto_push_out_decl_state (decl_state);
2263 if (gimple_has_body_p (node->decl) || !flag_wpa
2264 /* Thunks have no body but they may be synthetized
2265 at WPA time. */
2266 || DECL_ARGUMENTS (node->decl))
2267 output_function (node);
2268 else
2269 copy_function_or_variable (node);
2270 gcc_assert (lto_get_out_decl_state () == decl_state);
2271 lto_pop_out_decl_state ();
2272 lto_record_function_out_decl_state (node->decl, decl_state);
2273 }
2274 }
2275 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2276 {
2277 /* Wrap symbol references inside the ctor in a type
2278 preserving MEM_REF. */
2279 tree ctor = DECL_INITIAL (node->decl);
2280 if (ctor && !in_lto_p)
2281 walk_tree (&ctor, wrap_refs, NULL, NULL);
2282 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2283 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2284 && !node->alias)
2285 {
2286 timevar_push (TV_IPA_LTO_CTORS_OUT);
2287 #ifdef ENABLE_CHECKING
2288 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2289 bitmap_set_bit (output, DECL_UID (node->decl));
2290 #endif
2291 decl_state = lto_new_out_decl_state ();
2292 lto_push_out_decl_state (decl_state);
2293 if (DECL_INITIAL (node->decl) != error_mark_node
2294 || !flag_wpa)
2295 output_constructor (node);
2296 else
2297 copy_function_or_variable (node);
2298 gcc_assert (lto_get_out_decl_state () == decl_state);
2299 lto_pop_out_decl_state ();
2300 lto_record_function_out_decl_state (node->decl, decl_state);
2301 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2302 }
2303 }
2304 }
2305
2306 /* Emit the callgraph after emitting function bodies. This needs to
2307 be done now to make sure that all the statements in every function
2308 have been renumbered so that edges can be associated with call
2309 statements using the statement UIDs. */
2310 output_symtab ();
2311
2312 output_offload_tables ();
2313
2314 #ifdef ENABLE_CHECKING
2315 lto_bitmap_free (output);
2316 #endif
2317 }
2318
2319 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2320 from it and required for correct representation of its semantics.
2321 Each node in ENCODER must be a global declaration or a type. A node
2322 is written only once, even if it appears multiple times in the
2323 vector. Certain transitively-reachable nodes, such as those
2324 representing expressions, may be duplicated, but such nodes
2325 must not appear in ENCODER itself. */
2326
2327 static void
2328 write_global_stream (struct output_block *ob,
2329 struct lto_tree_ref_encoder *encoder)
2330 {
2331 tree t;
2332 size_t index;
2333 const size_t size = lto_tree_ref_encoder_size (encoder);
2334
2335 for (index = 0; index < size; index++)
2336 {
2337 t = lto_tree_ref_encoder_get_tree (encoder, index);
2338 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2339 stream_write_tree (ob, t, false);
2340 }
2341 }
2342
2343
2344 /* Write a sequence of indices into the globals vector corresponding
2345 to the trees in ENCODER. These are used by the reader to map the
2346 indices used to refer to global entities within function bodies to
2347 their referents. */
2348
2349 static void
2350 write_global_references (struct output_block *ob,
2351 struct lto_tree_ref_encoder *encoder)
2352 {
2353 tree t;
2354 uint32_t index;
2355 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2356
2357 /* Write size and slot indexes as 32-bit unsigned numbers. */
2358 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2359 data[0] = size;
2360
2361 for (index = 0; index < size; index++)
2362 {
2363 uint32_t slot_num;
2364
2365 t = lto_tree_ref_encoder_get_tree (encoder, index);
2366 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2367 gcc_assert (slot_num != (unsigned)-1);
2368 data[index + 1] = slot_num;
2369 }
2370
2371 lto_write_data (data, sizeof (int32_t) * (size + 1));
2372 free (data);
2373 }
2374
2375
2376 /* Write all the streams in an lto_out_decl_state STATE using
2377 output block OB and output stream OUT_STREAM. */
2378
2379 void
2380 lto_output_decl_state_streams (struct output_block *ob,
2381 struct lto_out_decl_state *state)
2382 {
2383 int i;
2384
2385 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2386 write_global_stream (ob, &state->streams[i]);
2387 }
2388
2389
2390 /* Write all the references in an lto_out_decl_state STATE using
2391 output block OB and output stream OUT_STREAM. */
2392
2393 void
2394 lto_output_decl_state_refs (struct output_block *ob,
2395 struct lto_out_decl_state *state)
2396 {
2397 unsigned i;
2398 uint32_t ref;
2399 tree decl;
2400
2401 /* Write reference to FUNCTION_DECL. If there is not function,
2402 write reference to void_type_node. */
2403 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2404 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2405 gcc_assert (ref != (unsigned)-1);
2406 lto_write_data (&ref, sizeof (uint32_t));
2407
2408 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2409 write_global_references (ob, &state->streams[i]);
2410 }
2411
2412
2413 /* Return the written size of STATE. */
2414
2415 static size_t
2416 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2417 {
2418 int i;
2419 size_t size;
2420
2421 size = sizeof (int32_t); /* fn_ref. */
2422 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2423 {
2424 size += sizeof (int32_t); /* vector size. */
2425 size += (lto_tree_ref_encoder_size (&state->streams[i])
2426 * sizeof (int32_t));
2427 }
2428 return size;
2429 }
2430
2431
2432 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2433 so far. */
2434
2435 static void
2436 write_symbol (struct streamer_tree_cache_d *cache,
2437 tree t, hash_set<const char *> *seen, bool alias)
2438 {
2439 const char *name;
2440 enum gcc_plugin_symbol_kind kind;
2441 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2442 unsigned slot_num;
2443 uint64_t size;
2444 const char *comdat;
2445 unsigned char c;
2446
2447 /* None of the following kinds of symbols are needed in the
2448 symbol table. */
2449 if (!TREE_PUBLIC (t)
2450 || is_builtin_fn (t)
2451 || DECL_ABSTRACT_P (t)
2452 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2453 return;
2454 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2455
2456 gcc_assert (TREE_CODE (t) == VAR_DECL
2457 || TREE_CODE (t) == FUNCTION_DECL);
2458
2459 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2460
2461 /* This behaves like assemble_name_raw in varasm.c, performing the
2462 same name manipulations that ASM_OUTPUT_LABELREF does. */
2463 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2464
2465 if (seen->add (name))
2466 return;
2467
2468 streamer_tree_cache_lookup (cache, t, &slot_num);
2469 gcc_assert (slot_num != (unsigned)-1);
2470
2471 if (DECL_EXTERNAL (t))
2472 {
2473 if (DECL_WEAK (t))
2474 kind = GCCPK_WEAKUNDEF;
2475 else
2476 kind = GCCPK_UNDEF;
2477 }
2478 else
2479 {
2480 if (DECL_WEAK (t))
2481 kind = GCCPK_WEAKDEF;
2482 else if (DECL_COMMON (t))
2483 kind = GCCPK_COMMON;
2484 else
2485 kind = GCCPK_DEF;
2486
2487 /* When something is defined, it should have node attached. */
2488 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2489 || varpool_node::get (t)->definition);
2490 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2491 || (cgraph_node::get (t)
2492 && cgraph_node::get (t)->definition));
2493 }
2494
2495 /* Imitate what default_elf_asm_output_external do.
2496 When symbol is external, we need to output it with DEFAULT visibility
2497 when compiling with -fvisibility=default, while with HIDDEN visibility
2498 when symbol has attribute (visibility("hidden")) specified.
2499 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2500 right. */
2501
2502 if (DECL_EXTERNAL (t)
2503 && !targetm.binds_local_p (t))
2504 visibility = GCCPV_DEFAULT;
2505 else
2506 switch (DECL_VISIBILITY (t))
2507 {
2508 case VISIBILITY_DEFAULT:
2509 visibility = GCCPV_DEFAULT;
2510 break;
2511 case VISIBILITY_PROTECTED:
2512 visibility = GCCPV_PROTECTED;
2513 break;
2514 case VISIBILITY_HIDDEN:
2515 visibility = GCCPV_HIDDEN;
2516 break;
2517 case VISIBILITY_INTERNAL:
2518 visibility = GCCPV_INTERNAL;
2519 break;
2520 }
2521
2522 if (kind == GCCPK_COMMON
2523 && DECL_SIZE_UNIT (t)
2524 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2525 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2526 else
2527 size = 0;
2528
2529 if (DECL_ONE_ONLY (t))
2530 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2531 else
2532 comdat = "";
2533
2534 lto_write_data (name, strlen (name) + 1);
2535 lto_write_data (comdat, strlen (comdat) + 1);
2536 c = (unsigned char) kind;
2537 lto_write_data (&c, 1);
2538 c = (unsigned char) visibility;
2539 lto_write_data (&c, 1);
2540 lto_write_data (&size, 8);
2541 lto_write_data (&slot_num, 4);
2542 }
2543
2544 /* Return true if NODE should appear in the plugin symbol table. */
2545
2546 bool
2547 output_symbol_p (symtab_node *node)
2548 {
2549 struct cgraph_node *cnode;
2550 if (!node->real_symbol_p ())
2551 return false;
2552 /* We keep external functions in symtab for sake of inlining
2553 and devirtualization. We do not want to see them in symbol table as
2554 references unless they are really used. */
2555 cnode = dyn_cast <cgraph_node *> (node);
2556 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2557 && cnode->callers)
2558 return true;
2559
2560 /* Ignore all references from external vars initializers - they are not really
2561 part of the compilation unit until they are used by folding. Some symbols,
2562 like references to external construction vtables can not be referred to at all.
2563 We decide this at can_refer_decl_in_current_unit_p. */
2564 if (!node->definition || DECL_EXTERNAL (node->decl))
2565 {
2566 int i;
2567 struct ipa_ref *ref;
2568 for (i = 0; node->iterate_referring (i, ref); i++)
2569 {
2570 if (ref->use == IPA_REF_ALIAS)
2571 continue;
2572 if (is_a <cgraph_node *> (ref->referring))
2573 return true;
2574 if (!DECL_EXTERNAL (ref->referring->decl))
2575 return true;
2576 }
2577 return false;
2578 }
2579 return true;
2580 }
2581
2582
2583 /* Write an IL symbol table to OB.
2584 SET and VSET are cgraph/varpool node sets we are outputting. */
2585
2586 static void
2587 produce_symtab (struct output_block *ob)
2588 {
2589 struct streamer_tree_cache_d *cache = ob->writer_cache;
2590 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2591 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2592 lto_symtab_encoder_iterator lsei;
2593
2594 lto_begin_section (section_name, false);
2595 free (section_name);
2596
2597 hash_set<const char *> seen;
2598
2599 /* Write the symbol table.
2600 First write everything defined and then all declarations.
2601 This is necessary to handle cases where we have duplicated symbols. */
2602 for (lsei = lsei_start (encoder);
2603 !lsei_end_p (lsei); lsei_next (&lsei))
2604 {
2605 symtab_node *node = lsei_node (lsei);
2606
2607 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2608 continue;
2609 write_symbol (cache, node->decl, &seen, false);
2610 }
2611 for (lsei = lsei_start (encoder);
2612 !lsei_end_p (lsei); lsei_next (&lsei))
2613 {
2614 symtab_node *node = lsei_node (lsei);
2615
2616 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2617 continue;
2618 write_symbol (cache, node->decl, &seen, false);
2619 }
2620
2621 lto_end_section ();
2622 }
2623
2624
2625 /* This pass is run after all of the functions are serialized and all
2626 of the IPA passes have written their serialized forms. This pass
2627 causes the vector of all of the global decls and types used from
2628 this file to be written in to a section that can then be read in to
2629 recover these on other side. */
2630
2631 void
2632 produce_asm_for_decls (void)
2633 {
2634 struct lto_out_decl_state *out_state;
2635 struct lto_out_decl_state *fn_out_state;
2636 struct lto_decl_header header;
2637 char *section_name;
2638 struct output_block *ob;
2639 unsigned idx, num_fns;
2640 size_t decl_state_size;
2641 int32_t num_decl_states;
2642
2643 ob = create_output_block (LTO_section_decls);
2644
2645 memset (&header, 0, sizeof (struct lto_decl_header));
2646
2647 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2648 lto_begin_section (section_name, !flag_wpa);
2649 free (section_name);
2650
2651 /* Make string 0 be a NULL string. */
2652 streamer_write_char_stream (ob->string_stream, 0);
2653
2654 gcc_assert (!alias_pairs);
2655
2656 /* Get rid of the global decl state hash tables to save some memory. */
2657 out_state = lto_get_out_decl_state ();
2658 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2659 if (out_state->streams[i].tree_hash_table)
2660 {
2661 delete out_state->streams[i].tree_hash_table;
2662 out_state->streams[i].tree_hash_table = NULL;
2663 }
2664
2665 /* Write the global symbols. */
2666 lto_output_decl_state_streams (ob, out_state);
2667 num_fns = lto_function_decl_states.length ();
2668 for (idx = 0; idx < num_fns; idx++)
2669 {
2670 fn_out_state =
2671 lto_function_decl_states[idx];
2672 lto_output_decl_state_streams (ob, fn_out_state);
2673 }
2674
2675 header.major_version = LTO_major_version;
2676 header.minor_version = LTO_minor_version;
2677
2678 /* Currently not used. This field would allow us to preallocate
2679 the globals vector, so that it need not be resized as it is extended. */
2680 header.num_nodes = -1;
2681
2682 /* Compute the total size of all decl out states. */
2683 decl_state_size = sizeof (int32_t);
2684 decl_state_size += lto_out_decl_state_written_size (out_state);
2685 for (idx = 0; idx < num_fns; idx++)
2686 {
2687 fn_out_state =
2688 lto_function_decl_states[idx];
2689 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2690 }
2691 header.decl_state_size = decl_state_size;
2692
2693 header.main_size = ob->main_stream->total_size;
2694 header.string_size = ob->string_stream->total_size;
2695
2696 lto_write_data (&header, sizeof header);
2697
2698 /* Write the main out-decl state, followed by out-decl states of
2699 functions. */
2700 num_decl_states = num_fns + 1;
2701 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2702 lto_output_decl_state_refs (ob, out_state);
2703 for (idx = 0; idx < num_fns; idx++)
2704 {
2705 fn_out_state = lto_function_decl_states[idx];
2706 lto_output_decl_state_refs (ob, fn_out_state);
2707 }
2708
2709 lto_write_stream (ob->main_stream);
2710 lto_write_stream (ob->string_stream);
2711
2712 lto_end_section ();
2713
2714 /* Write the symbol table. It is used by linker to determine dependencies
2715 and thus we can skip it for WPA. */
2716 if (!flag_wpa)
2717 produce_symtab (ob);
2718
2719 /* Write command line opts. */
2720 lto_write_options ();
2721
2722 /* Deallocate memory and clean up. */
2723 for (idx = 0; idx < num_fns; idx++)
2724 {
2725 fn_out_state =
2726 lto_function_decl_states[idx];
2727 lto_delete_out_decl_state (fn_out_state);
2728 }
2729 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2730 lto_function_decl_states.release ();
2731 destroy_output_block (ob);
2732 }