lto-streamer-out.c (struct sccs): Turn to ...
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "inchash.h"
48 #include "except.h"
49 #include "lto-symtab.h"
50 #include "lto-streamer.h"
51 #include "data-streamer.h"
52 #include "gimple-streamer.h"
53 #include "tree-streamer.h"
54 #include "streamer-hooks.h"
55 #include "cfgloop.h"
56 #include "builtins.h"
57
58
59 static void lto_write_tree (struct output_block*, tree, bool);
60
61 /* Clear the line info stored in DATA_IN. */
62
63 static void
64 clear_line_info (struct output_block *ob)
65 {
66 ob->current_file = NULL;
67 ob->current_line = 0;
68 ob->current_col = 0;
69 }
70
71
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
74
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
77 {
78 struct output_block *ob = XCNEW (struct output_block);
79
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
85
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
88
89 clear_line_info (ob);
90
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
93
94 return ob;
95 }
96
97
98 /* Destroy the output block OB. */
99
100 void
101 destroy_output_block (struct output_block *ob)
102 {
103 enum lto_section_type section_type = ob->section_type;
104
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
107
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
112
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
115
116 free (ob);
117 }
118
119
120 /* Look up NODE in the type table and write the index for it to OB. */
121
122 static void
123 output_type_ref (struct output_block *ob, tree node)
124 {
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
127 }
128
129
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
134
135 static bool
136 tree_is_indexable (tree t)
137 {
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
143 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
144 else if (TREE_CODE (t) == IMPORTED_DECL)
145 return false;
146 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
147 || TREE_CODE (t) == TYPE_DECL
148 || TREE_CODE (t) == CONST_DECL
149 || TREE_CODE (t) == NAMELIST_DECL)
150 && decl_function_context (t))
151 return false;
152 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
153 return false;
154 /* Variably modified types need to be streamed alongside function
155 bodies because they can refer to local entities. Together with
156 them we have to localize their members as well.
157 ??? In theory that includes non-FIELD_DECLs as well. */
158 else if (TYPE_P (t)
159 && variably_modified_type_p (t, NULL_TREE))
160 return false;
161 else if (TREE_CODE (t) == FIELD_DECL
162 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
163 return false;
164 else
165 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
166 }
167
168
169 /* Output info about new location into bitpack BP.
170 After outputting bitpack, lto_output_location_data has
171 to be done to output actual data. */
172
173 void
174 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
175 location_t loc)
176 {
177 expanded_location xloc;
178
179 loc = LOCATION_LOCUS (loc);
180 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
181 if (loc == UNKNOWN_LOCATION)
182 return;
183
184 xloc = expand_location (loc);
185
186 bp_pack_value (bp, ob->current_file != xloc.file, 1);
187 bp_pack_value (bp, ob->current_line != xloc.line, 1);
188 bp_pack_value (bp, ob->current_col != xloc.column, 1);
189
190 if (ob->current_file != xloc.file)
191 bp_pack_var_len_unsigned (bp,
192 streamer_string_index (ob, xloc.file,
193 strlen (xloc.file) + 1,
194 true));
195 ob->current_file = xloc.file;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 case PARM_DECL:
244 streamer_write_record_start (ob, LTO_global_decl_ref);
245 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case CONST_DECL:
249 streamer_write_record_start (ob, LTO_const_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case IMPORTED_DECL:
254 gcc_assert (decl_function_context (expr) == NULL);
255 streamer_write_record_start (ob, LTO_imported_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case TYPE_DECL:
260 streamer_write_record_start (ob, LTO_type_decl_ref);
261 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case NAMELIST_DECL:
265 streamer_write_record_start (ob, LTO_namelist_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 case NAMESPACE_DECL:
270 streamer_write_record_start (ob, LTO_namespace_decl_ref);
271 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case LABEL_DECL:
275 streamer_write_record_start (ob, LTO_label_decl_ref);
276 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
278
279 case RESULT_DECL:
280 streamer_write_record_start (ob, LTO_result_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
283
284 case TRANSLATION_UNIT_DECL:
285 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
286 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
288
289 default:
290 /* No other node is indexable, so it should have been handled by
291 lto_output_tree. */
292 gcc_unreachable ();
293 }
294 }
295
296
297 /* Return true if EXPR is a tree node that can be written to disk. */
298
299 static inline bool
300 lto_is_streamable (tree expr)
301 {
302 enum tree_code code = TREE_CODE (expr);
303
304 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
305 name version in lto_output_tree_ref (see output_ssa_names). */
306 return !is_lang_specific (expr)
307 && code != SSA_NAME
308 && code != CALL_EXPR
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
319 }
320
321
322 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
323
324 static tree
325 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
326 {
327 gcc_checking_assert (DECL_P (expr)
328 && TREE_CODE (expr) != FUNCTION_DECL
329 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
330
331 /* Handle DECL_INITIAL for symbols. */
332 tree initial = DECL_INITIAL (expr);
333 if (TREE_CODE (expr) == VAR_DECL
334 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
335 && !DECL_IN_CONSTANT_POOL (expr)
336 && initial)
337 {
338 varpool_node *vnode;
339 /* Extra section needs about 30 bytes; do not produce it for simple
340 scalar values. */
341 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
342 || !(vnode = varpool_node::get (expr))
343 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
344 initial = error_mark_node;
345 }
346
347 return initial;
348 }
349
350
351 /* Write a physical representation of tree node EXPR to output block
352 OB. If REF_P is true, the leaves of EXPR are emitted as references
353 via lto_output_tree_ref. IX is the index into the streamer cache
354 where EXPR is stored. */
355
356 static void
357 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
358 {
359 /* Pack all the non-pointer fields in EXPR into a bitpack and write
360 the resulting bitpack. */
361 bitpack_d bp = bitpack_create (ob->main_stream);
362 streamer_pack_tree_bitfields (ob, &bp, expr);
363 streamer_write_bitpack (&bp);
364
365 /* Write all the pointer fields in EXPR. */
366 streamer_write_tree_body (ob, expr, ref_p);
367
368 /* Write any LTO-specific data to OB. */
369 if (DECL_P (expr)
370 && TREE_CODE (expr) != FUNCTION_DECL
371 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
372 {
373 /* Handle DECL_INITIAL for symbols. */
374 tree initial = get_symbol_initial_value
375 (ob->decl_state->symtab_node_encoder, expr);
376 stream_write_tree (ob, initial, ref_p);
377 }
378 }
379
380 /* Write a physical representation of tree node EXPR to output block
381 OB. If REF_P is true, the leaves of EXPR are emitted as references
382 via lto_output_tree_ref. IX is the index into the streamer cache
383 where EXPR is stored. */
384
385 static void
386 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
387 {
388 if (!lto_is_streamable (expr))
389 internal_error ("tree code %qs is not supported in LTO streams",
390 get_tree_code_name (TREE_CODE (expr)));
391
392 /* Write the header, containing everything needed to materialize
393 EXPR on the reading side. */
394 streamer_write_tree_header (ob, expr);
395
396 lto_write_tree_1 (ob, expr, ref_p);
397
398 /* Mark the end of EXPR. */
399 streamer_write_zero (ob);
400 }
401
402 /* Emit the physical representation of tree node EXPR to output block
403 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
404 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
405
406 static void
407 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
408 bool ref_p, bool this_ref_p)
409 {
410 unsigned ix;
411
412 gcc_checking_assert (expr != NULL_TREE
413 && !(this_ref_p && tree_is_indexable (expr)));
414
415 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
416 expr, hash, &ix);
417 gcc_assert (!exists_p);
418 if (streamer_handle_as_builtin_p (expr))
419 {
420 /* MD and NORMAL builtins do not need to be written out
421 completely as they are always instantiated by the
422 compiler on startup. The only builtins that need to
423 be written out are BUILT_IN_FRONTEND. For all other
424 builtins, we simply write the class and code. */
425 streamer_write_builtin (ob, expr);
426 }
427 else if (TREE_CODE (expr) == INTEGER_CST
428 && !TREE_OVERFLOW (expr))
429 {
430 /* Shared INTEGER_CST nodes are special because they need their
431 original type to be materialized by the reader (to implement
432 TYPE_CACHED_VALUES). */
433 streamer_write_integer_cst (ob, expr, ref_p);
434 }
435 else
436 {
437 /* This is the first time we see EXPR, write its fields
438 to OB. */
439 lto_write_tree (ob, expr, ref_p);
440 }
441 }
442
443 class DFS
444 {
445 public:
446 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
447 bool single_p);
448 ~DFS ();
449
450 struct scc_entry
451 {
452 tree t;
453 hashval_t hash;
454 };
455 vec<scc_entry> sccstack;
456
457 private:
458 struct sccs
459 {
460 unsigned int dfsnum;
461 unsigned int low;
462 };
463
464 static int scc_entry_compare (const void *, const void *);
465
466 void DFS_write_tree_body (struct output_block *ob,
467 tree expr, sccs *expr_state, bool ref_p,
468 bool single_p);
469
470 void DFS_write_tree (struct output_block *ob, sccs *from_state,
471 tree expr, bool ref_p, bool this_ref_p,
472 bool single_p);
473 hashval_t
474 hash_scc (struct output_block *ob, unsigned first, unsigned size);
475
476 unsigned int next_dfs_num;
477 struct pointer_map_t *sccstate;
478 struct obstack sccstate_obstack;
479 };
480
481 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
482 bool single_p)
483 {
484 sccstack.create (0);
485 sccstate = pointer_map_create ();
486 gcc_obstack_init (&sccstate_obstack);
487 next_dfs_num = 1;
488 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
489 }
490
491 DFS::~DFS ()
492 {
493 sccstack.release ();
494 pointer_map_destroy (sccstate);
495 obstack_free (&sccstate_obstack, NULL);
496 }
497
498 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
499 DFS recurse for all tree edges originating from it. */
500
501 void
502 DFS::DFS_write_tree_body (struct output_block *ob,
503 tree expr, sccs *expr_state, bool ref_p,
504 bool single_p)
505 {
506 #define DFS_follow_tree_edge(DEST) \
507 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
508
509 enum tree_code code;
510
511 code = TREE_CODE (expr);
512
513 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
514 {
515 if (TREE_CODE (expr) != IDENTIFIER_NODE)
516 DFS_follow_tree_edge (TREE_TYPE (expr));
517 }
518
519 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
520 {
521 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
522 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
526 {
527 DFS_follow_tree_edge (TREE_REALPART (expr));
528 DFS_follow_tree_edge (TREE_IMAGPART (expr));
529 }
530
531 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
532 {
533 /* Drop names that were created for anonymous entities. */
534 if (DECL_NAME (expr)
535 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
536 && ANON_AGGRNAME_P (DECL_NAME (expr)))
537 ;
538 else
539 DFS_follow_tree_edge (DECL_NAME (expr));
540 DFS_follow_tree_edge (DECL_CONTEXT (expr));
541 }
542
543 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
544 {
545 DFS_follow_tree_edge (DECL_SIZE (expr));
546 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
547
548 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
549 special handling in LTO, it must be handled by streamer hooks. */
550
551 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
552
553 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
554 for early inlining so drop it on the floor instead of ICEing in
555 dwarf2out.c. */
556
557 if ((TREE_CODE (expr) == VAR_DECL
558 || TREE_CODE (expr) == PARM_DECL)
559 && DECL_HAS_VALUE_EXPR_P (expr))
560 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
561 if (TREE_CODE (expr) == VAR_DECL)
562 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
563 }
564
565 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
566 {
567 if (TREE_CODE (expr) == TYPE_DECL)
568 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
569 }
570
571 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
572 {
573 /* Make sure we don't inadvertently set the assembler name. */
574 if (DECL_ASSEMBLER_NAME_SET_P (expr))
575 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
576 }
577
578 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
579 {
580 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
581 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
582 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
583 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
584 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
585 }
586
587 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
588 {
589 DFS_follow_tree_edge (DECL_VINDEX (expr));
590 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
591 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
592 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
593 }
594
595 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
596 {
597 DFS_follow_tree_edge (TYPE_SIZE (expr));
598 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
599 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
600 DFS_follow_tree_edge (TYPE_NAME (expr));
601 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
602 reconstructed during fixup. */
603 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
604 during fixup. */
605 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
606 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
607 /* TYPE_CANONICAL is re-computed during type merging, so no need
608 to follow it here. */
609 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
610 }
611
612 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
613 {
614 if (TREE_CODE (expr) == ENUMERAL_TYPE)
615 DFS_follow_tree_edge (TYPE_VALUES (expr));
616 else if (TREE_CODE (expr) == ARRAY_TYPE)
617 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
618 else if (RECORD_OR_UNION_TYPE_P (expr))
619 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
620 DFS_follow_tree_edge (t);
621 else if (TREE_CODE (expr) == FUNCTION_TYPE
622 || TREE_CODE (expr) == METHOD_TYPE)
623 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
624
625 if (!POINTER_TYPE_P (expr))
626 DFS_follow_tree_edge (TYPE_MINVAL (expr));
627 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
628 if (RECORD_OR_UNION_TYPE_P (expr))
629 DFS_follow_tree_edge (TYPE_BINFO (expr));
630 }
631
632 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
633 {
634 DFS_follow_tree_edge (TREE_PURPOSE (expr));
635 DFS_follow_tree_edge (TREE_VALUE (expr));
636 DFS_follow_tree_edge (TREE_CHAIN (expr));
637 }
638
639 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
640 {
641 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
642 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
643 }
644
645 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
646 {
647 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
648 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
649 DFS_follow_tree_edge (TREE_BLOCK (expr));
650 }
651
652 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
653 {
654 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
655 /* ??? FIXME. See also streamer_write_chain. */
656 if (!(VAR_OR_FUNCTION_DECL_P (t)
657 && DECL_EXTERNAL (t)))
658 DFS_follow_tree_edge (t);
659
660 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
661
662 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
663 handle - those that represent inlined function scopes.
664 For the drop rest them on the floor instead of ICEing
665 in dwarf2out.c. */
666 if (inlined_function_outer_scope_p (expr))
667 {
668 tree ultimate_origin = block_ultimate_origin (expr);
669 DFS_follow_tree_edge (ultimate_origin);
670 }
671 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
672 information for early inlined BLOCKs so drop it on the floor instead
673 of ICEing in dwarf2out.c. */
674
675 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
676 streaming time. */
677
678 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
679 list is re-constructed from BLOCK_SUPERCONTEXT. */
680 }
681
682 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
683 {
684 unsigned i;
685 tree t;
686
687 /* Note that the number of BINFO slots has already been emitted in
688 EXPR's header (see streamer_write_tree_header) because this length
689 is needed to build the empty BINFO node on the reader side. */
690 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
691 DFS_follow_tree_edge (t);
692 DFS_follow_tree_edge (BINFO_OFFSET (expr));
693 DFS_follow_tree_edge (BINFO_VTABLE (expr));
694 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
695
696 /* The number of BINFO_BASE_ACCESSES has already been emitted in
697 EXPR's bitfield section. */
698 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
699 DFS_follow_tree_edge (t);
700
701 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
702 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
703 }
704
705 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
706 {
707 unsigned i;
708 tree index, value;
709
710 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
711 {
712 DFS_follow_tree_edge (index);
713 DFS_follow_tree_edge (value);
714 }
715 }
716
717 if (code == OMP_CLAUSE)
718 {
719 int i;
720 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
721 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
722 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
723 }
724
725 #undef DFS_follow_tree_edge
726 }
727
728 /* Return a hash value for the tree T.
729 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
730 may hold hash values if trees inside current SCC. */
731
732 static hashval_t
733 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
734 {
735 inchash hstate;
736
737 #define visit(SIBLING) \
738 do { \
739 unsigned ix; \
740 if (!SIBLING) \
741 hstate.add_int (0); \
742 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
743 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
744 else if (map) \
745 hstate.add_int (*map->get (SIBLING)); \
746 else \
747 hstate.add_int (1); \
748 } while (0)
749
750 /* Hash TS_BASE. */
751 enum tree_code code = TREE_CODE (t);
752 hstate.add_int (code);
753 if (!TYPE_P (t))
754 {
755 hstate.add_flag (TREE_SIDE_EFFECTS (t));
756 hstate.add_flag (TREE_CONSTANT (t));
757 hstate.add_flag (TREE_READONLY (t));
758 hstate.add_flag (TREE_PUBLIC (t));
759 }
760 hstate.add_flag (TREE_ADDRESSABLE (t));
761 hstate.add_flag (TREE_THIS_VOLATILE (t));
762 if (DECL_P (t))
763 hstate.add_flag (DECL_UNSIGNED (t));
764 else if (TYPE_P (t))
765 hstate.add_flag (TYPE_UNSIGNED (t));
766 if (TYPE_P (t))
767 hstate.add_flag (TYPE_ARTIFICIAL (t));
768 else
769 hstate.add_flag (TREE_NO_WARNING (t));
770 hstate.add_flag (TREE_NOTHROW (t));
771 hstate.add_flag (TREE_STATIC (t));
772 hstate.add_flag (TREE_PROTECTED (t));
773 hstate.add_flag (TREE_DEPRECATED (t));
774 if (code != TREE_BINFO)
775 hstate.add_flag (TREE_PRIVATE (t));
776 if (TYPE_P (t))
777 {
778 hstate.add_flag (TYPE_SATURATING (t));
779 hstate.add_flag (TYPE_ADDR_SPACE (t));
780 }
781 else if (code == SSA_NAME)
782 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
783 hstate.commit_flag ();
784
785 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
786 {
787 int i;
788 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
789 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
790 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
791 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
792 }
793
794 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
795 {
796 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
797 hstate.add_flag (r.cl);
798 hstate.add_flag (r.sign);
799 hstate.add_flag (r.signalling);
800 hstate.add_flag (r.canonical);
801 hstate.commit_flag ();
802 hstate.add_int (r.uexp);
803 hstate.add (r.sig, sizeof (r.sig));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
807 {
808 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
809 hstate.add_int (f.mode);
810 hstate.add_int (f.data.low);
811 hstate.add_int (f.data.high);
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
815 {
816 hstate.add_wide_int (DECL_MODE (t));
817 hstate.add_flag (DECL_NONLOCAL (t));
818 hstate.add_flag (DECL_VIRTUAL_P (t));
819 hstate.add_flag (DECL_IGNORED_P (t));
820 hstate.add_flag (DECL_ABSTRACT (t));
821 hstate.add_flag (DECL_ARTIFICIAL (t));
822 hstate.add_flag (DECL_USER_ALIGN (t));
823 hstate.add_flag (DECL_PRESERVE_P (t));
824 hstate.add_flag (DECL_EXTERNAL (t));
825 hstate.add_flag (DECL_GIMPLE_REG_P (t));
826 hstate.commit_flag ();
827 hstate.add_int (DECL_ALIGN (t));
828 if (code == LABEL_DECL)
829 {
830 hstate.add_int (EH_LANDING_PAD_NR (t));
831 hstate.add_int (LABEL_DECL_UID (t));
832 }
833 else if (code == FIELD_DECL)
834 {
835 hstate.add_flag (DECL_PACKED (t));
836 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
837 hstate.add_int (DECL_OFFSET_ALIGN (t));
838 }
839 else if (code == VAR_DECL)
840 {
841 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
842 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
843 }
844 if (code == RESULT_DECL
845 || code == PARM_DECL
846 || code == VAR_DECL)
847 {
848 hstate.add_flag (DECL_BY_REFERENCE (t));
849 if (code == VAR_DECL
850 || code == PARM_DECL)
851 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
852 }
853 hstate.commit_flag ();
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
857 hstate.add_int (DECL_REGISTER (t));
858
859 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
860 {
861 hstate.add_flag (DECL_COMMON (t));
862 hstate.add_flag (DECL_DLLIMPORT_P (t));
863 hstate.add_flag (DECL_WEAK (t));
864 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
865 hstate.add_flag (DECL_COMDAT (t));
866 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
867 hstate.add_int (DECL_VISIBILITY (t));
868 if (code == VAR_DECL)
869 {
870 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
871 hstate.add_flag (DECL_HARD_REGISTER (t));
872 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
873 }
874 if (TREE_CODE (t) == FUNCTION_DECL)
875 {
876 hstate.add_flag (DECL_FINAL_P (t));
877 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
878 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
879 }
880 hstate.commit_flag ();
881 }
882
883 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
884 {
885 hstate.add_int (DECL_BUILT_IN_CLASS (t));
886 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
887 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
888 hstate.add_flag (DECL_UNINLINABLE (t));
889 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
890 hstate.add_flag (DECL_IS_NOVOPS (t));
891 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
892 hstate.add_flag (DECL_IS_MALLOC (t));
893 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
894 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
895 hstate.add_flag (DECL_STATIC_CHAIN (t));
896 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
897 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
898 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
899 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
900 hstate.add_flag (DECL_PURE_P (t));
901 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
902 hstate.commit_flag ();
903 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
904 hstate.add_int (DECL_FUNCTION_CODE (t));
905 }
906
907 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
908 {
909 hstate.add_wide_int (TYPE_MODE (t));
910 hstate.add_flag (TYPE_STRING_FLAG (t));
911 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
912 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
913 hstate.add_flag (TYPE_PACKED (t));
914 hstate.add_flag (TYPE_RESTRICT (t));
915 hstate.add_flag (TYPE_USER_ALIGN (t));
916 hstate.add_flag (TYPE_READONLY (t));
917 if (RECORD_OR_UNION_TYPE_P (t))
918 {
919 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
920 hstate.add_flag (TYPE_FINAL_P (t));
921 }
922 else if (code == ARRAY_TYPE)
923 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
924 hstate.commit_flag ();
925 hstate.add_int (TYPE_PRECISION (t));
926 hstate.add_int (TYPE_ALIGN (t));
927 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
928 || (!in_lto_p
929 && get_alias_set (t) == 0))
930 ? 0 : -1);
931 }
932
933 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
934 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
935 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
936
937 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
938 gcc_unreachable ();
939
940 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
941 hstate.add (t, sizeof (struct cl_optimization));
942
943 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
944 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
945
946 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
947 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
948
949 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
950 {
951 if (code != IDENTIFIER_NODE)
952 visit (TREE_TYPE (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
956 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
957 visit (VECTOR_CST_ELT (t, i));
958
959 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
960 {
961 visit (TREE_REALPART (t));
962 visit (TREE_IMAGPART (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
966 {
967 /* Drop names that were created for anonymous entities. */
968 if (DECL_NAME (t)
969 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
970 && ANON_AGGRNAME_P (DECL_NAME (t)))
971 ;
972 else
973 visit (DECL_NAME (t));
974 if (DECL_FILE_SCOPE_P (t))
975 ;
976 else
977 visit (DECL_CONTEXT (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
981 {
982 visit (DECL_SIZE (t));
983 visit (DECL_SIZE_UNIT (t));
984 visit (DECL_ATTRIBUTES (t));
985 if ((code == VAR_DECL
986 || code == PARM_DECL)
987 && DECL_HAS_VALUE_EXPR_P (t))
988 visit (DECL_VALUE_EXPR (t));
989 if (code == VAR_DECL
990 && DECL_HAS_DEBUG_EXPR_P (t))
991 visit (DECL_DEBUG_EXPR (t));
992 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
993 be able to call get_symbol_initial_value. */
994 }
995
996 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
997 {
998 if (code == TYPE_DECL)
999 visit (DECL_ORIGINAL_TYPE (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1003 {
1004 if (DECL_ASSEMBLER_NAME_SET_P (t))
1005 visit (DECL_ASSEMBLER_NAME (t));
1006 }
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1009 {
1010 visit (DECL_FIELD_OFFSET (t));
1011 visit (DECL_BIT_FIELD_TYPE (t));
1012 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1013 visit (DECL_FIELD_BIT_OFFSET (t));
1014 visit (DECL_FCONTEXT (t));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1018 {
1019 visit (DECL_VINDEX (t));
1020 visit (DECL_FUNCTION_PERSONALITY (t));
1021 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
1022 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1026 {
1027 visit (TYPE_SIZE (t));
1028 visit (TYPE_SIZE_UNIT (t));
1029 visit (TYPE_ATTRIBUTES (t));
1030 visit (TYPE_NAME (t));
1031 visit (TYPE_MAIN_VARIANT (t));
1032 if (TYPE_FILE_SCOPE_P (t))
1033 ;
1034 else
1035 visit (TYPE_CONTEXT (t));
1036 visit (TYPE_STUB_DECL (t));
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1040 {
1041 if (code == ENUMERAL_TYPE)
1042 visit (TYPE_VALUES (t));
1043 else if (code == ARRAY_TYPE)
1044 visit (TYPE_DOMAIN (t));
1045 else if (RECORD_OR_UNION_TYPE_P (t))
1046 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1047 visit (f);
1048 else if (code == FUNCTION_TYPE
1049 || code == METHOD_TYPE)
1050 visit (TYPE_ARG_TYPES (t));
1051 if (!POINTER_TYPE_P (t))
1052 visit (TYPE_MINVAL (t));
1053 visit (TYPE_MAXVAL (t));
1054 if (RECORD_OR_UNION_TYPE_P (t))
1055 visit (TYPE_BINFO (t));
1056 }
1057
1058 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1059 {
1060 visit (TREE_PURPOSE (t));
1061 visit (TREE_VALUE (t));
1062 visit (TREE_CHAIN (t));
1063 }
1064
1065 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1066 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1067 visit (TREE_VEC_ELT (t, i));
1068
1069 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1070 {
1071 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1072 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1073 visit (TREE_OPERAND (t, i));
1074 }
1075
1076 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1077 {
1078 unsigned i;
1079 tree b;
1080 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1081 visit (b);
1082 visit (BINFO_OFFSET (t));
1083 visit (BINFO_VTABLE (t));
1084 visit (BINFO_VPTR_FIELD (t));
1085 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1086 visit (b);
1087 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1088 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1089 }
1090
1091 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1092 {
1093 unsigned i;
1094 tree index, value;
1095 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1097 {
1098 visit (index);
1099 visit (value);
1100 }
1101 }
1102
1103 if (code == OMP_CLAUSE)
1104 {
1105 int i;
1106 HOST_WIDE_INT val;
1107
1108 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1109 switch (OMP_CLAUSE_CODE (t))
1110 {
1111 case OMP_CLAUSE_DEFAULT:
1112 val = OMP_CLAUSE_DEFAULT_KIND (t);
1113 break;
1114 case OMP_CLAUSE_SCHEDULE:
1115 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1116 break;
1117 case OMP_CLAUSE_DEPEND:
1118 val = OMP_CLAUSE_DEPEND_KIND (t);
1119 break;
1120 case OMP_CLAUSE_MAP:
1121 val = OMP_CLAUSE_MAP_KIND (t);
1122 break;
1123 case OMP_CLAUSE_PROC_BIND:
1124 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1125 break;
1126 case OMP_CLAUSE_REDUCTION:
1127 val = OMP_CLAUSE_REDUCTION_CODE (t);
1128 break;
1129 default:
1130 val = 0;
1131 break;
1132 }
1133 hstate.add_wide_int (val);
1134 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1135 visit (OMP_CLAUSE_OPERAND (t, i));
1136 visit (OMP_CLAUSE_CHAIN (t));
1137 }
1138
1139 return hstate.end ();
1140
1141 #undef visit
1142 }
1143
1144 /* Compare two SCC entries by their hash value for qsorting them. */
1145
1146 int
1147 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1148 {
1149 const scc_entry *p1 = (const scc_entry *) p1_;
1150 const scc_entry *p2 = (const scc_entry *) p2_;
1151 if (p1->hash < p2->hash)
1152 return -1;
1153 else if (p1->hash > p2->hash)
1154 return 1;
1155 return 0;
1156 }
1157
1158 /* Return a hash value for the SCC on the SCC stack from FIRST with
1159 size SIZE. */
1160
1161 hashval_t
1162 DFS::hash_scc (struct output_block *ob,
1163 unsigned first, unsigned size)
1164 {
1165 unsigned int last_classes = 0, iterations = 0;
1166
1167 /* Compute hash values for the SCC members. */
1168 for (unsigned i = 0; i < size; ++i)
1169 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1170 sccstack[first+i].t);
1171
1172 if (size == 1)
1173 return sccstack[first].hash;
1174
1175 /* We aim to get unique hash for every tree within SCC and compute hash value
1176 of the whole SCC by combing all values together in an stable (entry point
1177 independent) order. This guarantees that the same SCC regions within
1178 different translation units will get the same hash values and therefore
1179 will be merged at WPA time.
1180
1181 Often the hashes are already unique. In that case we compute scc hash
1182 by combining individual hash values in an increasing order.
1183
1184 If thre are duplicates we seek at least one tree with unique hash (and
1185 pick one with minimal hash and this property). Then we obtain stable
1186 order by DFS walk starting from this unique tree and then use index
1187 within this order to make individual hash values unique.
1188
1189 If there is no tree with unique hash, we iteratively propagate the hash
1190 values across the internal edges of SCC. This usually quickly leads
1191 to unique hashes. Consider, for example, an SCC containing two pointers
1192 that are identical except for type they point and assume that these
1193 types are also part of the SCC.
1194 The propagation will add the points-to type information into their hash
1195 values. */
1196 do
1197 {
1198 /* Sort the SCC so we can easily see check for uniqueness. */
1199 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1200
1201 unsigned int classes = 1;
1202 int firstunique = -1;
1203
1204 /* Find tree with lowest unique hash (if it exists) and compute
1205 number of equivalence classes. */
1206 if (sccstack[first].hash != sccstack[first+1].hash)
1207 firstunique = 0;
1208 for (unsigned i = 1; i < size; ++i)
1209 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1210 {
1211 classes++;
1212 if (firstunique == -1
1213 && (i == size - 1
1214 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1215 firstunique = i;
1216 }
1217
1218 /* If we found tree with unique hash; stop the iteration. */
1219 if (firstunique != -1
1220 /* Also terminate if we run out of iterations or if the number of
1221 equivalence classes is no longer increasing.
1222 For example a cyclic list of trees that are all equivalent will
1223 never have unique entry point; we however do not build such SCCs
1224 in our IL. */
1225 || classes <= last_classes || iterations > 16)
1226 {
1227 hashval_t scc_hash;
1228
1229 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1230 starting from FIRSTUNIQUE to obstain stable order. */
1231 if (classes != size && firstunique != -1)
1232 {
1233 hash_map <tree, hashval_t> map(size*2);
1234
1235 /* Store hash values into a map, so we can associate them with
1236 reordered SCC. */
1237 for (unsigned i = 0; i < size; ++i)
1238 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1239
1240 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1241 gcc_assert (again.sccstack.length () == size);
1242
1243 memcpy (sccstack.address () + first,
1244 again.sccstack.address (),
1245 sizeof (scc_entry) * size);
1246
1247 /* Update hash values of individual members by hashing in the
1248 index within the stable order. This ensures uniqueness.
1249 Also compute the scc_hash by mixing in all hash values in the
1250 stable order we obtained. */
1251 sccstack[first].hash = *map.get (sccstack[first].t);
1252 scc_hash = sccstack[first].hash;
1253 for (unsigned i = 1; i < size; ++i)
1254 {
1255 sccstack[first+i].hash
1256 = iterative_hash_hashval_t (i,
1257 *map.get (sccstack[first+i].t));
1258 scc_hash = iterative_hash_hashval_t (scc_hash,
1259 sccstack[first+i].hash);
1260 }
1261 }
1262 /* If we got unique hash values for each tree, then sort already
1263 ensured entry point independent order. Only compute the final
1264 scc hash.
1265
1266 If we failed to find the unique entry point, we go by the same
1267 route. We will eventually introduce unwanted hash conflicts. */
1268 else
1269 {
1270 scc_hash = sccstack[first].hash;
1271 for (unsigned i = 1; i < size; ++i)
1272 scc_hash = iterative_hash_hashval_t (scc_hash,
1273 sccstack[first+i].hash);
1274 /* We can not 100% guarantee that the hash will not conflict in
1275 in a way so the unique hash is not found. This however
1276 should be extremely rare situation. ICE for now so possible
1277 issues are found and evaulated. */
1278 gcc_checking_assert (classes == size);
1279 }
1280
1281 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1282 hash into the hash of each of the elements. */
1283 for (unsigned i = 0; i < size; ++i)
1284 sccstack[first+i].hash
1285 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1286 return scc_hash;
1287 }
1288
1289 last_classes = classes;
1290 iterations++;
1291
1292 /* We failed to identify the entry point; propagate hash values across
1293 the edges. */
1294 {
1295 hash_map <tree, hashval_t> map(size*2);
1296 for (unsigned i = 0; i < size; ++i)
1297 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1298
1299 for (unsigned i = 0; i < size; i++)
1300 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1301 sccstack[first+i].t);
1302 }
1303 }
1304 while (true);
1305 }
1306
1307 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1308 already in the streamer cache. Main routine called for
1309 each visit of EXPR. */
1310
1311 void
1312 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1313 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1314 {
1315 unsigned ix;
1316 sccs **slot;
1317
1318 /* Handle special cases. */
1319 if (expr == NULL_TREE)
1320 return;
1321
1322 /* Do not DFS walk into indexable trees. */
1323 if (this_ref_p && tree_is_indexable (expr))
1324 return;
1325
1326 /* Check if we already streamed EXPR. */
1327 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1328 return;
1329
1330 slot = (sccs **)pointer_map_insert (sccstate, expr);
1331 sccs *cstate = *slot;
1332 if (!cstate)
1333 {
1334 scc_entry e = { expr, 0 };
1335 /* Not yet visited. DFS recurse and push it onto the stack. */
1336 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1337 sccstack.safe_push (e);
1338 cstate->dfsnum = next_dfs_num++;
1339 cstate->low = cstate->dfsnum;
1340
1341 if (streamer_handle_as_builtin_p (expr))
1342 ;
1343 else if (TREE_CODE (expr) == INTEGER_CST
1344 && !TREE_OVERFLOW (expr))
1345 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1346 else
1347 {
1348 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1349
1350 /* Walk any LTO-specific edges. */
1351 if (DECL_P (expr)
1352 && TREE_CODE (expr) != FUNCTION_DECL
1353 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1354 {
1355 /* Handle DECL_INITIAL for symbols. */
1356 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1357 expr);
1358 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1359 }
1360 }
1361
1362 /* See if we found an SCC. */
1363 if (cstate->low == cstate->dfsnum)
1364 {
1365 unsigned first, size;
1366 tree x;
1367
1368 /* If we are re-walking a single leaf-SCC just return and
1369 let the caller access the sccstack. */
1370 if (single_p)
1371 return;
1372
1373 /* Pop the SCC and compute its size. */
1374 first = sccstack.length ();
1375 do
1376 {
1377 x = sccstack[--first].t;
1378 }
1379 while (x != expr);
1380 size = sccstack.length () - first;
1381
1382 /* No need to compute hashes for LTRANS units, we don't perform
1383 any merging there. */
1384 hashval_t scc_hash = 0;
1385 unsigned scc_entry_len = 0;
1386 if (!flag_wpa)
1387 {
1388 scc_hash = hash_scc (ob, first, size);
1389
1390 /* Put the entries with the least number of collisions first. */
1391 unsigned entry_start = 0;
1392 scc_entry_len = size + 1;
1393 for (unsigned i = 0; i < size;)
1394 {
1395 unsigned from = i;
1396 for (i = i + 1; i < size
1397 && (sccstack[first + i].hash
1398 == sccstack[first + from].hash); ++i)
1399 ;
1400 if (i - from < scc_entry_len)
1401 {
1402 scc_entry_len = i - from;
1403 entry_start = from;
1404 }
1405 }
1406 for (unsigned i = 0; i < scc_entry_len; ++i)
1407 {
1408 scc_entry tem = sccstack[first + i];
1409 sccstack[first + i] = sccstack[first + entry_start + i];
1410 sccstack[first + entry_start + i] = tem;
1411 }
1412
1413 if (scc_entry_len == 1)
1414 ; /* We already sorted SCC deterministically in hash_scc. */
1415 else
1416 /* Check that we have only one SCC.
1417 Naturally we may have conflicts if hash function is not
1418 strong enough. Lets see how far this gets. */
1419 {
1420 #ifdef ENABLE_CHECKING
1421 gcc_unreachable ();
1422 #endif
1423 }
1424 }
1425
1426 /* Write LTO_tree_scc. */
1427 streamer_write_record_start (ob, LTO_tree_scc);
1428 streamer_write_uhwi (ob, size);
1429 streamer_write_uhwi (ob, scc_hash);
1430
1431 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1432 All INTEGER_CSTs need to be handled this way as we need
1433 their type to materialize them. Also builtins are handled
1434 this way.
1435 ??? We still wrap these in LTO_tree_scc so at the
1436 input side we can properly identify the tree we want
1437 to ultimatively return. */
1438 if (size == 1)
1439 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1440 else
1441 {
1442 /* Write the size of the SCC entry candidates. */
1443 streamer_write_uhwi (ob, scc_entry_len);
1444
1445 /* Write all headers and populate the streamer cache. */
1446 for (unsigned i = 0; i < size; ++i)
1447 {
1448 hashval_t hash = sccstack[first+i].hash;
1449 tree t = sccstack[first+i].t;
1450 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1451 t, hash, &ix);
1452 gcc_assert (!exists_p);
1453
1454 if (!lto_is_streamable (t))
1455 internal_error ("tree code %qs is not supported "
1456 "in LTO streams",
1457 get_tree_code_name (TREE_CODE (t)));
1458
1459 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1460
1461 /* Write the header, containing everything needed to
1462 materialize EXPR on the reading side. */
1463 streamer_write_tree_header (ob, t);
1464 }
1465
1466 /* Write the bitpacks and tree references. */
1467 for (unsigned i = 0; i < size; ++i)
1468 {
1469 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1470
1471 /* Mark the end of the tree. */
1472 streamer_write_zero (ob);
1473 }
1474 }
1475
1476 /* Finally truncate the vector. */
1477 sccstack.truncate (first);
1478
1479 if (from_state)
1480 from_state->low = MIN (from_state->low, cstate->low);
1481 return;
1482 }
1483
1484 if (from_state)
1485 from_state->low = MIN (from_state->low, cstate->low);
1486 }
1487 gcc_checking_assert (from_state);
1488 if (cstate->dfsnum < from_state->dfsnum)
1489 from_state->low = MIN (cstate->dfsnum, from_state->low);
1490 }
1491
1492
1493 /* Emit the physical representation of tree node EXPR to output block
1494 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1495 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1496
1497 void
1498 lto_output_tree (struct output_block *ob, tree expr,
1499 bool ref_p, bool this_ref_p)
1500 {
1501 unsigned ix;
1502 bool existed_p;
1503
1504 if (expr == NULL_TREE)
1505 {
1506 streamer_write_record_start (ob, LTO_null);
1507 return;
1508 }
1509
1510 if (this_ref_p && tree_is_indexable (expr))
1511 {
1512 lto_output_tree_ref (ob, expr);
1513 return;
1514 }
1515
1516 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1517 if (existed_p)
1518 {
1519 /* If a node has already been streamed out, make sure that
1520 we don't write it more than once. Otherwise, the reader
1521 will instantiate two different nodes for the same object. */
1522 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1523 streamer_write_uhwi (ob, ix);
1524 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1525 lto_tree_code_to_tag (TREE_CODE (expr)));
1526 lto_stats.num_pickle_refs_output++;
1527 }
1528 else
1529 {
1530 /* This is the first time we see EXPR, write all reachable
1531 trees to OB. */
1532 static bool in_dfs_walk;
1533
1534 /* Protect against recursion which means disconnect between
1535 what tree edges we walk in the DFS walk and what edges
1536 we stream out. */
1537 gcc_assert (!in_dfs_walk);
1538
1539 /* Start the DFS walk. */
1540 /* Save ob state ... */
1541 /* let's see ... */
1542 in_dfs_walk = true;
1543 DFS (ob, expr, ref_p, this_ref_p, false);
1544 in_dfs_walk = false;
1545
1546 /* Finally append a reference to the tree we were writing.
1547 ??? If expr ended up as a singleton we could have
1548 inlined it here and avoid outputting a reference. */
1549 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1550 gcc_assert (existed_p);
1551 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1552 streamer_write_uhwi (ob, ix);
1553 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1554 lto_tree_code_to_tag (TREE_CODE (expr)));
1555 lto_stats.num_pickle_refs_output++;
1556 }
1557 }
1558
1559
1560 /* Output to OB a list of try/catch handlers starting with FIRST. */
1561
1562 static void
1563 output_eh_try_list (struct output_block *ob, eh_catch first)
1564 {
1565 eh_catch n;
1566
1567 for (n = first; n; n = n->next_catch)
1568 {
1569 streamer_write_record_start (ob, LTO_eh_catch);
1570 stream_write_tree (ob, n->type_list, true);
1571 stream_write_tree (ob, n->filter_list, true);
1572 stream_write_tree (ob, n->label, true);
1573 }
1574
1575 streamer_write_record_start (ob, LTO_null);
1576 }
1577
1578
1579 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1580 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1581 detect EH region sharing. */
1582
1583 static void
1584 output_eh_region (struct output_block *ob, eh_region r)
1585 {
1586 enum LTO_tags tag;
1587
1588 if (r == NULL)
1589 {
1590 streamer_write_record_start (ob, LTO_null);
1591 return;
1592 }
1593
1594 if (r->type == ERT_CLEANUP)
1595 tag = LTO_ert_cleanup;
1596 else if (r->type == ERT_TRY)
1597 tag = LTO_ert_try;
1598 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1599 tag = LTO_ert_allowed_exceptions;
1600 else if (r->type == ERT_MUST_NOT_THROW)
1601 tag = LTO_ert_must_not_throw;
1602 else
1603 gcc_unreachable ();
1604
1605 streamer_write_record_start (ob, tag);
1606 streamer_write_hwi (ob, r->index);
1607
1608 if (r->outer)
1609 streamer_write_hwi (ob, r->outer->index);
1610 else
1611 streamer_write_zero (ob);
1612
1613 if (r->inner)
1614 streamer_write_hwi (ob, r->inner->index);
1615 else
1616 streamer_write_zero (ob);
1617
1618 if (r->next_peer)
1619 streamer_write_hwi (ob, r->next_peer->index);
1620 else
1621 streamer_write_zero (ob);
1622
1623 if (r->type == ERT_TRY)
1624 {
1625 output_eh_try_list (ob, r->u.eh_try.first_catch);
1626 }
1627 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1628 {
1629 stream_write_tree (ob, r->u.allowed.type_list, true);
1630 stream_write_tree (ob, r->u.allowed.label, true);
1631 streamer_write_uhwi (ob, r->u.allowed.filter);
1632 }
1633 else if (r->type == ERT_MUST_NOT_THROW)
1634 {
1635 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1636 bitpack_d bp = bitpack_create (ob->main_stream);
1637 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1638 streamer_write_bitpack (&bp);
1639 }
1640
1641 if (r->landing_pads)
1642 streamer_write_hwi (ob, r->landing_pads->index);
1643 else
1644 streamer_write_zero (ob);
1645 }
1646
1647
1648 /* Output landing pad LP to OB. */
1649
1650 static void
1651 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1652 {
1653 if (lp == NULL)
1654 {
1655 streamer_write_record_start (ob, LTO_null);
1656 return;
1657 }
1658
1659 streamer_write_record_start (ob, LTO_eh_landing_pad);
1660 streamer_write_hwi (ob, lp->index);
1661 if (lp->next_lp)
1662 streamer_write_hwi (ob, lp->next_lp->index);
1663 else
1664 streamer_write_zero (ob);
1665
1666 if (lp->region)
1667 streamer_write_hwi (ob, lp->region->index);
1668 else
1669 streamer_write_zero (ob);
1670
1671 stream_write_tree (ob, lp->post_landing_pad, true);
1672 }
1673
1674
1675 /* Output the existing eh_table to OB. */
1676
1677 static void
1678 output_eh_regions (struct output_block *ob, struct function *fn)
1679 {
1680 if (fn->eh && fn->eh->region_tree)
1681 {
1682 unsigned i;
1683 eh_region eh;
1684 eh_landing_pad lp;
1685 tree ttype;
1686
1687 streamer_write_record_start (ob, LTO_eh_table);
1688
1689 /* Emit the index of the root of the EH region tree. */
1690 streamer_write_hwi (ob, fn->eh->region_tree->index);
1691
1692 /* Emit all the EH regions in the region array. */
1693 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1694 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1695 output_eh_region (ob, eh);
1696
1697 /* Emit all landing pads. */
1698 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1699 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1700 output_eh_lp (ob, lp);
1701
1702 /* Emit all the runtime type data. */
1703 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1704 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1705 stream_write_tree (ob, ttype, true);
1706
1707 /* Emit the table of action chains. */
1708 if (targetm.arm_eabi_unwinder)
1709 {
1710 tree t;
1711 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1712 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1713 stream_write_tree (ob, t, true);
1714 }
1715 else
1716 {
1717 uchar c;
1718 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1719 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1720 streamer_write_char_stream (ob->main_stream, c);
1721 }
1722 }
1723
1724 /* The LTO_null either terminates the record or indicates that there
1725 are no eh_records at all. */
1726 streamer_write_record_start (ob, LTO_null);
1727 }
1728
1729
1730 /* Output all of the active ssa names to the ssa_names stream. */
1731
1732 static void
1733 output_ssa_names (struct output_block *ob, struct function *fn)
1734 {
1735 unsigned int i, len;
1736
1737 len = vec_safe_length (SSANAMES (fn));
1738 streamer_write_uhwi (ob, len);
1739
1740 for (i = 1; i < len; i++)
1741 {
1742 tree ptr = (*SSANAMES (fn))[i];
1743
1744 if (ptr == NULL_TREE
1745 || SSA_NAME_IN_FREE_LIST (ptr)
1746 || virtual_operand_p (ptr))
1747 continue;
1748
1749 streamer_write_uhwi (ob, i);
1750 streamer_write_char_stream (ob->main_stream,
1751 SSA_NAME_IS_DEFAULT_DEF (ptr));
1752 if (SSA_NAME_VAR (ptr))
1753 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1754 else
1755 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1756 stream_write_tree (ob, TREE_TYPE (ptr), true);
1757 }
1758
1759 streamer_write_zero (ob);
1760 }
1761
1762
1763 /* Output a wide-int. */
1764
1765 static void
1766 streamer_write_wi (struct output_block *ob,
1767 const widest_int &w)
1768 {
1769 int len = w.get_len ();
1770
1771 streamer_write_uhwi (ob, w.get_precision ());
1772 streamer_write_uhwi (ob, len);
1773 for (int i = 0; i < len; i++)
1774 streamer_write_hwi (ob, w.elt (i));
1775 }
1776
1777
1778 /* Output the cfg. */
1779
1780 static void
1781 output_cfg (struct output_block *ob, struct function *fn)
1782 {
1783 struct lto_output_stream *tmp_stream = ob->main_stream;
1784 basic_block bb;
1785
1786 ob->main_stream = ob->cfg_stream;
1787
1788 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1789 profile_status_for_fn (fn));
1790
1791 /* Output the number of the highest basic block. */
1792 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1793
1794 FOR_ALL_BB_FN (bb, fn)
1795 {
1796 edge_iterator ei;
1797 edge e;
1798
1799 streamer_write_hwi (ob, bb->index);
1800
1801 /* Output the successors and the edge flags. */
1802 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1803 FOR_EACH_EDGE (e, ei, bb->succs)
1804 {
1805 streamer_write_uhwi (ob, e->dest->index);
1806 streamer_write_hwi (ob, e->probability);
1807 streamer_write_gcov_count (ob, e->count);
1808 streamer_write_uhwi (ob, e->flags);
1809 }
1810 }
1811
1812 streamer_write_hwi (ob, -1);
1813
1814 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1815 while (bb->next_bb)
1816 {
1817 streamer_write_hwi (ob, bb->next_bb->index);
1818 bb = bb->next_bb;
1819 }
1820
1821 streamer_write_hwi (ob, -1);
1822
1823 /* ??? The cfgloop interface is tied to cfun. */
1824 gcc_assert (cfun == fn);
1825
1826 /* Output the number of loops. */
1827 streamer_write_uhwi (ob, number_of_loops (fn));
1828
1829 /* Output each loop, skipping the tree root which has number zero. */
1830 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1831 {
1832 struct loop *loop = get_loop (fn, i);
1833
1834 /* Write the index of the loop header. That's enough to rebuild
1835 the loop tree on the reader side. Stream -1 for an unused
1836 loop entry. */
1837 if (!loop)
1838 {
1839 streamer_write_hwi (ob, -1);
1840 continue;
1841 }
1842 else
1843 streamer_write_hwi (ob, loop->header->index);
1844
1845 /* Write everything copy_loop_info copies. */
1846 streamer_write_enum (ob->main_stream,
1847 loop_estimation, EST_LAST, loop->estimate_state);
1848 streamer_write_hwi (ob, loop->any_upper_bound);
1849 if (loop->any_upper_bound)
1850 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1851 streamer_write_hwi (ob, loop->any_estimate);
1852 if (loop->any_estimate)
1853 streamer_write_wi (ob, loop->nb_iterations_estimate);
1854
1855 /* Write OMP SIMD related info. */
1856 streamer_write_hwi (ob, loop->safelen);
1857 streamer_write_hwi (ob, loop->dont_vectorize);
1858 streamer_write_hwi (ob, loop->force_vectorize);
1859 stream_write_tree (ob, loop->simduid, true);
1860 }
1861
1862 ob->main_stream = tmp_stream;
1863 }
1864
1865
1866 /* Create the header in the file using OB. If the section type is for
1867 a function, set FN to the decl for that function. */
1868
1869 void
1870 produce_asm (struct output_block *ob, tree fn)
1871 {
1872 enum lto_section_type section_type = ob->section_type;
1873 struct lto_function_header header;
1874 char *section_name;
1875 struct lto_output_stream *header_stream;
1876
1877 if (section_type == LTO_section_function_body)
1878 {
1879 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1880 section_name = lto_get_section_name (section_type, name, NULL);
1881 }
1882 else
1883 section_name = lto_get_section_name (section_type, NULL, NULL);
1884
1885 lto_begin_section (section_name, !flag_wpa);
1886 free (section_name);
1887
1888 /* The entire header is stream computed here. */
1889 memset (&header, 0, sizeof (struct lto_function_header));
1890
1891 /* Write the header. */
1892 header.lto_header.major_version = LTO_major_version;
1893 header.lto_header.minor_version = LTO_minor_version;
1894
1895 header.compressed_size = 0;
1896
1897 if (section_type == LTO_section_function_body)
1898 header.cfg_size = ob->cfg_stream->total_size;
1899 header.main_size = ob->main_stream->total_size;
1900 header.string_size = ob->string_stream->total_size;
1901
1902 header_stream = XCNEW (struct lto_output_stream);
1903 lto_output_data_stream (header_stream, &header, sizeof header);
1904 lto_write_stream (header_stream);
1905 free (header_stream);
1906
1907 /* Put all of the gimple and the string table out the asm file as a
1908 block of text. */
1909 if (section_type == LTO_section_function_body)
1910 lto_write_stream (ob->cfg_stream);
1911 lto_write_stream (ob->main_stream);
1912 lto_write_stream (ob->string_stream);
1913
1914 lto_end_section ();
1915 }
1916
1917
1918 /* Output the base body of struct function FN using output block OB. */
1919
1920 static void
1921 output_struct_function_base (struct output_block *ob, struct function *fn)
1922 {
1923 struct bitpack_d bp;
1924 unsigned i;
1925 tree t;
1926
1927 /* Output the static chain and non-local goto save area. */
1928 stream_write_tree (ob, fn->static_chain_decl, true);
1929 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1930
1931 /* Output all the local variables in the function. */
1932 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1933 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1934 stream_write_tree (ob, t, true);
1935
1936 /* Output current IL state of the function. */
1937 streamer_write_uhwi (ob, fn->curr_properties);
1938
1939 /* Write all the attributes for FN. */
1940 bp = bitpack_create (ob->main_stream);
1941 bp_pack_value (&bp, fn->is_thunk, 1);
1942 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1943 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1944 bp_pack_value (&bp, fn->returns_struct, 1);
1945 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1946 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1947 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1948 bp_pack_value (&bp, fn->after_inlining, 1);
1949 bp_pack_value (&bp, fn->stdarg, 1);
1950 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1951 bp_pack_value (&bp, fn->calls_alloca, 1);
1952 bp_pack_value (&bp, fn->calls_setjmp, 1);
1953 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1954 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1955 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1956 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1957
1958 /* Output the function start and end loci. */
1959 stream_output_location (ob, &bp, fn->function_start_locus);
1960 stream_output_location (ob, &bp, fn->function_end_locus);
1961
1962 streamer_write_bitpack (&bp);
1963 }
1964
1965
1966 /* Output the body of function NODE->DECL. */
1967
1968 static void
1969 output_function (struct cgraph_node *node)
1970 {
1971 tree function;
1972 struct function *fn;
1973 basic_block bb;
1974 struct output_block *ob;
1975
1976 function = node->decl;
1977 fn = DECL_STRUCT_FUNCTION (function);
1978 ob = create_output_block (LTO_section_function_body);
1979
1980 clear_line_info (ob);
1981 ob->symbol = node;
1982
1983 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1984
1985 /* Set current_function_decl and cfun. */
1986 push_cfun (fn);
1987
1988 /* Make string 0 be a NULL string. */
1989 streamer_write_char_stream (ob->string_stream, 0);
1990
1991 streamer_write_record_start (ob, LTO_function);
1992
1993 /* Output decls for parameters and args. */
1994 stream_write_tree (ob, DECL_RESULT (function), true);
1995 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1996
1997 /* Output DECL_INITIAL for the function, which contains the tree of
1998 lexical scopes. */
1999 stream_write_tree (ob, DECL_INITIAL (function), true);
2000
2001 /* We also stream abstract functions where we stream only stuff needed for
2002 debug info. */
2003 if (gimple_has_body_p (function))
2004 {
2005 streamer_write_uhwi (ob, 1);
2006 output_struct_function_base (ob, fn);
2007
2008 /* Output all the SSA names used in the function. */
2009 output_ssa_names (ob, fn);
2010
2011 /* Output any exception handling regions. */
2012 output_eh_regions (ob, fn);
2013
2014
2015 /* We will renumber the statements. The code that does this uses
2016 the same ordering that we use for serializing them so we can use
2017 the same code on the other end and not have to write out the
2018 statement numbers. We do not assign UIDs to PHIs here because
2019 virtual PHIs get re-computed on-the-fly which would make numbers
2020 inconsistent. */
2021 set_gimple_stmt_max_uid (cfun, 0);
2022 FOR_ALL_BB_FN (bb, cfun)
2023 {
2024 gimple_stmt_iterator gsi;
2025 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2026 {
2027 gimple stmt = gsi_stmt (gsi);
2028
2029 /* Virtual PHIs are not going to be streamed. */
2030 if (!virtual_operand_p (gimple_phi_result (stmt)))
2031 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2032 }
2033 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2034 {
2035 gimple stmt = gsi_stmt (gsi);
2036 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2037 }
2038 }
2039 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2040 virtual phis now. */
2041 FOR_ALL_BB_FN (bb, cfun)
2042 {
2043 gimple_stmt_iterator gsi;
2044 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2045 {
2046 gimple stmt = gsi_stmt (gsi);
2047 if (virtual_operand_p (gimple_phi_result (stmt)))
2048 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2049 }
2050 }
2051
2052 /* Output the code for the function. */
2053 FOR_ALL_BB_FN (bb, fn)
2054 output_bb (ob, bb, fn);
2055
2056 /* The terminator for this function. */
2057 streamer_write_record_start (ob, LTO_null);
2058
2059 output_cfg (ob, fn);
2060
2061 pop_cfun ();
2062 }
2063 else
2064 streamer_write_uhwi (ob, 0);
2065
2066 /* Create a section to hold the pickled output of this function. */
2067 produce_asm (ob, function);
2068
2069 destroy_output_block (ob);
2070 }
2071
2072 /* Output the body of function NODE->DECL. */
2073
2074 static void
2075 output_constructor (struct varpool_node *node)
2076 {
2077 tree var = node->decl;
2078 struct output_block *ob;
2079
2080 ob = create_output_block (LTO_section_function_body);
2081
2082 clear_line_info (ob);
2083 ob->symbol = node;
2084
2085 /* Make string 0 be a NULL string. */
2086 streamer_write_char_stream (ob->string_stream, 0);
2087
2088 /* Output DECL_INITIAL for the function, which contains the tree of
2089 lexical scopes. */
2090 stream_write_tree (ob, DECL_INITIAL (var), true);
2091
2092 /* Create a section to hold the pickled output of this function. */
2093 produce_asm (ob, var);
2094
2095 destroy_output_block (ob);
2096 }
2097
2098
2099 /* Emit toplevel asms. */
2100
2101 void
2102 lto_output_toplevel_asms (void)
2103 {
2104 struct output_block *ob;
2105 struct asm_node *can;
2106 char *section_name;
2107 struct lto_output_stream *header_stream;
2108 struct lto_asm_header header;
2109
2110 if (! asm_nodes)
2111 return;
2112
2113 ob = create_output_block (LTO_section_asm);
2114
2115 /* Make string 0 be a NULL string. */
2116 streamer_write_char_stream (ob->string_stream, 0);
2117
2118 for (can = asm_nodes; can; can = can->next)
2119 {
2120 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2121 streamer_write_hwi (ob, can->order);
2122 }
2123
2124 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2125
2126 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2127 lto_begin_section (section_name, !flag_wpa);
2128 free (section_name);
2129
2130 /* The entire header stream is computed here. */
2131 memset (&header, 0, sizeof (header));
2132
2133 /* Write the header. */
2134 header.lto_header.major_version = LTO_major_version;
2135 header.lto_header.minor_version = LTO_minor_version;
2136
2137 header.main_size = ob->main_stream->total_size;
2138 header.string_size = ob->string_stream->total_size;
2139
2140 header_stream = XCNEW (struct lto_output_stream);
2141 lto_output_data_stream (header_stream, &header, sizeof (header));
2142 lto_write_stream (header_stream);
2143 free (header_stream);
2144
2145 /* Put all of the gimple and the string table out the asm file as a
2146 block of text. */
2147 lto_write_stream (ob->main_stream);
2148 lto_write_stream (ob->string_stream);
2149
2150 lto_end_section ();
2151
2152 destroy_output_block (ob);
2153 }
2154
2155
2156 /* Copy the function body or variable constructor of NODE without deserializing. */
2157
2158 static void
2159 copy_function_or_variable (struct symtab_node *node)
2160 {
2161 tree function = node->decl;
2162 struct lto_file_decl_data *file_data = node->lto_file_data;
2163 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
2164 const char *data;
2165 size_t len;
2166 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2167 char *section_name =
2168 lto_get_section_name (LTO_section_function_body, name, NULL);
2169 size_t i, j;
2170 struct lto_in_decl_state *in_state;
2171 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2172
2173 lto_begin_section (section_name, !flag_wpa);
2174 free (section_name);
2175
2176 /* We may have renamed the declaration, e.g., a static function. */
2177 name = lto_get_decl_name_mapping (file_data, name);
2178
2179 data = lto_get_section_data (file_data, LTO_section_function_body,
2180 name, &len);
2181 gcc_assert (data);
2182
2183 /* Do a bit copy of the function body. */
2184 lto_output_data_stream (output_stream, data, len);
2185 lto_write_stream (output_stream);
2186
2187 /* Copy decls. */
2188 in_state =
2189 lto_get_function_in_decl_state (node->lto_file_data, function);
2190 gcc_assert (in_state);
2191
2192 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2193 {
2194 size_t n = in_state->streams[i].size;
2195 tree *trees = in_state->streams[i].trees;
2196 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2197
2198 /* The out state must have the same indices and the in state.
2199 So just copy the vector. All the encoders in the in state
2200 must be empty where we reach here. */
2201 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2202 encoder->trees.reserve_exact (n);
2203 for (j = 0; j < n; j++)
2204 encoder->trees.safe_push (trees[j]);
2205 }
2206
2207 lto_free_section_data (file_data, LTO_section_function_body, name,
2208 data, len);
2209 free (output_stream);
2210 lto_end_section ();
2211 }
2212
2213 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2214
2215 static tree
2216 wrap_refs (tree *tp, int *ws, void *)
2217 {
2218 tree t = *tp;
2219 if (handled_component_p (t)
2220 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2221 {
2222 tree decl = TREE_OPERAND (t, 0);
2223 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2224 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2225 build1 (ADDR_EXPR, ptrtype, decl),
2226 build_int_cst (ptrtype, 0));
2227 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2228 *ws = 0;
2229 }
2230 else if (TREE_CODE (t) == CONSTRUCTOR)
2231 ;
2232 else if (!EXPR_P (t))
2233 *ws = 0;
2234 return NULL_TREE;
2235 }
2236
2237 /* Main entry point from the pass manager. */
2238
2239 void
2240 lto_output (void)
2241 {
2242 struct lto_out_decl_state *decl_state;
2243 #ifdef ENABLE_CHECKING
2244 bitmap output = lto_bitmap_alloc ();
2245 #endif
2246 int i, n_nodes;
2247 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2248
2249 /* Initialize the streamer. */
2250 lto_streamer_init ();
2251
2252 n_nodes = lto_symtab_encoder_size (encoder);
2253 /* Process only the functions with bodies. */
2254 for (i = 0; i < n_nodes; i++)
2255 {
2256 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2257 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2258 {
2259 if (lto_symtab_encoder_encode_body_p (encoder, node)
2260 && !node->alias)
2261 {
2262 #ifdef ENABLE_CHECKING
2263 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2264 bitmap_set_bit (output, DECL_UID (node->decl));
2265 #endif
2266 decl_state = lto_new_out_decl_state ();
2267 lto_push_out_decl_state (decl_state);
2268 if (gimple_has_body_p (node->decl) || !flag_wpa)
2269 output_function (node);
2270 else
2271 copy_function_or_variable (node);
2272 gcc_assert (lto_get_out_decl_state () == decl_state);
2273 lto_pop_out_decl_state ();
2274 lto_record_function_out_decl_state (node->decl, decl_state);
2275 }
2276 }
2277 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2278 {
2279 /* Wrap symbol references inside the ctor in a type
2280 preserving MEM_REF. */
2281 tree ctor = DECL_INITIAL (node->decl);
2282 if (ctor && !in_lto_p)
2283 walk_tree (&ctor, wrap_refs, NULL, NULL);
2284 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2285 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2286 && !node->alias)
2287 {
2288 timevar_push (TV_IPA_LTO_CTORS_OUT);
2289 #ifdef ENABLE_CHECKING
2290 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2291 bitmap_set_bit (output, DECL_UID (node->decl));
2292 #endif
2293 decl_state = lto_new_out_decl_state ();
2294 lto_push_out_decl_state (decl_state);
2295 if (DECL_INITIAL (node->decl) != error_mark_node
2296 || !flag_wpa)
2297 output_constructor (node);
2298 else
2299 copy_function_or_variable (node);
2300 gcc_assert (lto_get_out_decl_state () == decl_state);
2301 lto_pop_out_decl_state ();
2302 lto_record_function_out_decl_state (node->decl, decl_state);
2303 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2304 }
2305 }
2306 }
2307
2308 /* Emit the callgraph after emitting function bodies. This needs to
2309 be done now to make sure that all the statements in every function
2310 have been renumbered so that edges can be associated with call
2311 statements using the statement UIDs. */
2312 output_symtab ();
2313
2314 #ifdef ENABLE_CHECKING
2315 lto_bitmap_free (output);
2316 #endif
2317 }
2318
2319 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2320 from it and required for correct representation of its semantics.
2321 Each node in ENCODER must be a global declaration or a type. A node
2322 is written only once, even if it appears multiple times in the
2323 vector. Certain transitively-reachable nodes, such as those
2324 representing expressions, may be duplicated, but such nodes
2325 must not appear in ENCODER itself. */
2326
2327 static void
2328 write_global_stream (struct output_block *ob,
2329 struct lto_tree_ref_encoder *encoder)
2330 {
2331 tree t;
2332 size_t index;
2333 const size_t size = lto_tree_ref_encoder_size (encoder);
2334
2335 for (index = 0; index < size; index++)
2336 {
2337 t = lto_tree_ref_encoder_get_tree (encoder, index);
2338 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2339 stream_write_tree (ob, t, false);
2340 }
2341 }
2342
2343
2344 /* Write a sequence of indices into the globals vector corresponding
2345 to the trees in ENCODER. These are used by the reader to map the
2346 indices used to refer to global entities within function bodies to
2347 their referents. */
2348
2349 static void
2350 write_global_references (struct output_block *ob,
2351 struct lto_output_stream *ref_stream,
2352 struct lto_tree_ref_encoder *encoder)
2353 {
2354 tree t;
2355 uint32_t index;
2356 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2357
2358 /* Write size as 32-bit unsigned. */
2359 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2360
2361 for (index = 0; index < size; index++)
2362 {
2363 uint32_t slot_num;
2364
2365 t = lto_tree_ref_encoder_get_tree (encoder, index);
2366 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2367 gcc_assert (slot_num != (unsigned)-1);
2368 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2369 }
2370 }
2371
2372
2373 /* Write all the streams in an lto_out_decl_state STATE using
2374 output block OB and output stream OUT_STREAM. */
2375
2376 void
2377 lto_output_decl_state_streams (struct output_block *ob,
2378 struct lto_out_decl_state *state)
2379 {
2380 int i;
2381
2382 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2383 write_global_stream (ob, &state->streams[i]);
2384 }
2385
2386
2387 /* Write all the references in an lto_out_decl_state STATE using
2388 output block OB and output stream OUT_STREAM. */
2389
2390 void
2391 lto_output_decl_state_refs (struct output_block *ob,
2392 struct lto_output_stream *out_stream,
2393 struct lto_out_decl_state *state)
2394 {
2395 unsigned i;
2396 uint32_t ref;
2397 tree decl;
2398
2399 /* Write reference to FUNCTION_DECL. If there is not function,
2400 write reference to void_type_node. */
2401 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2402 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2403 gcc_assert (ref != (unsigned)-1);
2404 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2405
2406 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2407 write_global_references (ob, out_stream, &state->streams[i]);
2408 }
2409
2410
2411 /* Return the written size of STATE. */
2412
2413 static size_t
2414 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2415 {
2416 int i;
2417 size_t size;
2418
2419 size = sizeof (int32_t); /* fn_ref. */
2420 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2421 {
2422 size += sizeof (int32_t); /* vector size. */
2423 size += (lto_tree_ref_encoder_size (&state->streams[i])
2424 * sizeof (int32_t));
2425 }
2426 return size;
2427 }
2428
2429
2430 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2431 so far. */
2432
2433 static void
2434 write_symbol (struct streamer_tree_cache_d *cache,
2435 struct lto_output_stream *stream,
2436 tree t, struct pointer_set_t *seen, bool alias)
2437 {
2438 const char *name;
2439 enum gcc_plugin_symbol_kind kind;
2440 enum gcc_plugin_symbol_visibility visibility;
2441 unsigned slot_num;
2442 uint64_t size;
2443 const char *comdat;
2444 unsigned char c;
2445
2446 /* None of the following kinds of symbols are needed in the
2447 symbol table. */
2448 if (!TREE_PUBLIC (t)
2449 || is_builtin_fn (t)
2450 || DECL_ABSTRACT (t)
2451 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2452 return;
2453 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2454
2455 gcc_assert (TREE_CODE (t) == VAR_DECL
2456 || TREE_CODE (t) == FUNCTION_DECL);
2457
2458 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2459
2460 /* This behaves like assemble_name_raw in varasm.c, performing the
2461 same name manipulations that ASM_OUTPUT_LABELREF does. */
2462 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2463
2464 if (pointer_set_contains (seen, name))
2465 return;
2466 pointer_set_insert (seen, name);
2467
2468 streamer_tree_cache_lookup (cache, t, &slot_num);
2469 gcc_assert (slot_num != (unsigned)-1);
2470
2471 if (DECL_EXTERNAL (t))
2472 {
2473 if (DECL_WEAK (t))
2474 kind = GCCPK_WEAKUNDEF;
2475 else
2476 kind = GCCPK_UNDEF;
2477 }
2478 else
2479 {
2480 if (DECL_WEAK (t))
2481 kind = GCCPK_WEAKDEF;
2482 else if (DECL_COMMON (t))
2483 kind = GCCPK_COMMON;
2484 else
2485 kind = GCCPK_DEF;
2486
2487 /* When something is defined, it should have node attached. */
2488 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2489 || varpool_node::get (t)->definition);
2490 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2491 || (cgraph_node::get (t)
2492 && cgraph_node::get (t)->definition));
2493 }
2494
2495 /* Imitate what default_elf_asm_output_external do.
2496 When symbol is external, we need to output it with DEFAULT visibility
2497 when compiling with -fvisibility=default, while with HIDDEN visibility
2498 when symbol has attribute (visibility("hidden")) specified.
2499 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2500 right. */
2501
2502 if (DECL_EXTERNAL (t)
2503 && !targetm.binds_local_p (t))
2504 visibility = GCCPV_DEFAULT;
2505 else
2506 switch (DECL_VISIBILITY (t))
2507 {
2508 case VISIBILITY_DEFAULT:
2509 visibility = GCCPV_DEFAULT;
2510 break;
2511 case VISIBILITY_PROTECTED:
2512 visibility = GCCPV_PROTECTED;
2513 break;
2514 case VISIBILITY_HIDDEN:
2515 visibility = GCCPV_HIDDEN;
2516 break;
2517 case VISIBILITY_INTERNAL:
2518 visibility = GCCPV_INTERNAL;
2519 break;
2520 }
2521
2522 if (kind == GCCPK_COMMON
2523 && DECL_SIZE_UNIT (t)
2524 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2525 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2526 else
2527 size = 0;
2528
2529 if (DECL_ONE_ONLY (t))
2530 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2531 else
2532 comdat = "";
2533
2534 lto_output_data_stream (stream, name, strlen (name) + 1);
2535 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2536 c = (unsigned char) kind;
2537 lto_output_data_stream (stream, &c, 1);
2538 c = (unsigned char) visibility;
2539 lto_output_data_stream (stream, &c, 1);
2540 lto_output_data_stream (stream, &size, 8);
2541 lto_output_data_stream (stream, &slot_num, 4);
2542 }
2543
2544 /* Return true if NODE should appear in the plugin symbol table. */
2545
2546 bool
2547 output_symbol_p (symtab_node *node)
2548 {
2549 struct cgraph_node *cnode;
2550 if (!node->real_symbol_p ())
2551 return false;
2552 /* We keep external functions in symtab for sake of inlining
2553 and devirtualization. We do not want to see them in symbol table as
2554 references unless they are really used. */
2555 cnode = dyn_cast <cgraph_node *> (node);
2556 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2557 && cnode->callers)
2558 return true;
2559
2560 /* Ignore all references from external vars initializers - they are not really
2561 part of the compilation unit until they are used by folding. Some symbols,
2562 like references to external construction vtables can not be referred to at all.
2563 We decide this at can_refer_decl_in_current_unit_p. */
2564 if (!node->definition || DECL_EXTERNAL (node->decl))
2565 {
2566 int i;
2567 struct ipa_ref *ref;
2568 for (i = 0; node->iterate_referring (i, ref); i++)
2569 {
2570 if (ref->use == IPA_REF_ALIAS)
2571 continue;
2572 if (is_a <cgraph_node *> (ref->referring))
2573 return true;
2574 if (!DECL_EXTERNAL (ref->referring->decl))
2575 return true;
2576 }
2577 return false;
2578 }
2579 return true;
2580 }
2581
2582
2583 /* Write an IL symbol table to OB.
2584 SET and VSET are cgraph/varpool node sets we are outputting. */
2585
2586 static void
2587 produce_symtab (struct output_block *ob)
2588 {
2589 struct streamer_tree_cache_d *cache = ob->writer_cache;
2590 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2591 struct pointer_set_t *seen;
2592 struct lto_output_stream stream;
2593 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2594 lto_symtab_encoder_iterator lsei;
2595
2596 lto_begin_section (section_name, false);
2597 free (section_name);
2598
2599 seen = pointer_set_create ();
2600 memset (&stream, 0, sizeof (stream));
2601
2602 /* Write the symbol table.
2603 First write everything defined and then all declarations.
2604 This is necessary to handle cases where we have duplicated symbols. */
2605 for (lsei = lsei_start (encoder);
2606 !lsei_end_p (lsei); lsei_next (&lsei))
2607 {
2608 symtab_node *node = lsei_node (lsei);
2609
2610 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2611 continue;
2612 write_symbol (cache, &stream, node->decl, seen, false);
2613 }
2614 for (lsei = lsei_start (encoder);
2615 !lsei_end_p (lsei); lsei_next (&lsei))
2616 {
2617 symtab_node *node = lsei_node (lsei);
2618
2619 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2620 continue;
2621 write_symbol (cache, &stream, node->decl, seen, false);
2622 }
2623
2624 lto_write_stream (&stream);
2625 pointer_set_destroy (seen);
2626
2627 lto_end_section ();
2628 }
2629
2630
2631 /* This pass is run after all of the functions are serialized and all
2632 of the IPA passes have written their serialized forms. This pass
2633 causes the vector of all of the global decls and types used from
2634 this file to be written in to a section that can then be read in to
2635 recover these on other side. */
2636
2637 void
2638 produce_asm_for_decls (void)
2639 {
2640 struct lto_out_decl_state *out_state;
2641 struct lto_out_decl_state *fn_out_state;
2642 struct lto_decl_header header;
2643 char *section_name;
2644 struct output_block *ob;
2645 struct lto_output_stream *header_stream, *decl_state_stream;
2646 unsigned idx, num_fns;
2647 size_t decl_state_size;
2648 int32_t num_decl_states;
2649
2650 ob = create_output_block (LTO_section_decls);
2651 ob->global = true;
2652
2653 memset (&header, 0, sizeof (struct lto_decl_header));
2654
2655 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2656 lto_begin_section (section_name, !flag_wpa);
2657 free (section_name);
2658
2659 /* Make string 0 be a NULL string. */
2660 streamer_write_char_stream (ob->string_stream, 0);
2661
2662 gcc_assert (!alias_pairs);
2663
2664 /* Get rid of the global decl state hash tables to save some memory. */
2665 out_state = lto_get_out_decl_state ();
2666 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2667 if (out_state->streams[i].tree_hash_table)
2668 {
2669 delete out_state->streams[i].tree_hash_table;
2670 out_state->streams[i].tree_hash_table = NULL;
2671 }
2672
2673 /* Write the global symbols. */
2674 lto_output_decl_state_streams (ob, out_state);
2675 num_fns = lto_function_decl_states.length ();
2676 for (idx = 0; idx < num_fns; idx++)
2677 {
2678 fn_out_state =
2679 lto_function_decl_states[idx];
2680 lto_output_decl_state_streams (ob, fn_out_state);
2681 }
2682
2683 header.lto_header.major_version = LTO_major_version;
2684 header.lto_header.minor_version = LTO_minor_version;
2685
2686 /* Currently not used. This field would allow us to preallocate
2687 the globals vector, so that it need not be resized as it is extended. */
2688 header.num_nodes = -1;
2689
2690 /* Compute the total size of all decl out states. */
2691 decl_state_size = sizeof (int32_t);
2692 decl_state_size += lto_out_decl_state_written_size (out_state);
2693 for (idx = 0; idx < num_fns; idx++)
2694 {
2695 fn_out_state =
2696 lto_function_decl_states[idx];
2697 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2698 }
2699 header.decl_state_size = decl_state_size;
2700
2701 header.main_size = ob->main_stream->total_size;
2702 header.string_size = ob->string_stream->total_size;
2703
2704 header_stream = XCNEW (struct lto_output_stream);
2705 lto_output_data_stream (header_stream, &header, sizeof header);
2706 lto_write_stream (header_stream);
2707 free (header_stream);
2708
2709 /* Write the main out-decl state, followed by out-decl states of
2710 functions. */
2711 decl_state_stream = XCNEW (struct lto_output_stream);
2712 num_decl_states = num_fns + 1;
2713 lto_output_data_stream (decl_state_stream, &num_decl_states,
2714 sizeof (num_decl_states));
2715 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2716 for (idx = 0; idx < num_fns; idx++)
2717 {
2718 fn_out_state =
2719 lto_function_decl_states[idx];
2720 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2721 }
2722 lto_write_stream (decl_state_stream);
2723 free (decl_state_stream);
2724
2725 lto_write_stream (ob->main_stream);
2726 lto_write_stream (ob->string_stream);
2727
2728 lto_end_section ();
2729
2730 /* Write the symbol table. It is used by linker to determine dependencies
2731 and thus we can skip it for WPA. */
2732 if (!flag_wpa)
2733 produce_symtab (ob);
2734
2735 /* Write command line opts. */
2736 lto_write_options ();
2737
2738 /* Deallocate memory and clean up. */
2739 for (idx = 0; idx < num_fns; idx++)
2740 {
2741 fn_out_state =
2742 lto_function_decl_states[idx];
2743 lto_delete_out_decl_state (fn_out_state);
2744 }
2745 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2746 lto_function_decl_states.release ();
2747 destroy_output_block (ob);
2748 }