lto-streamer.h (struct output_block): Remove global.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "inchash.h"
48 #include "except.h"
49 #include "lto-symtab.h"
50 #include "lto-streamer.h"
51 #include "data-streamer.h"
52 #include "gimple-streamer.h"
53 #include "tree-streamer.h"
54 #include "streamer-hooks.h"
55 #include "cfgloop.h"
56 #include "builtins.h"
57
58
59 static void lto_write_tree (struct output_block*, tree, bool);
60
61 /* Clear the line info stored in DATA_IN. */
62
63 static void
64 clear_line_info (struct output_block *ob)
65 {
66 ob->current_file = NULL;
67 ob->current_line = 0;
68 ob->current_col = 0;
69 }
70
71
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
74
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
77 {
78 struct output_block *ob = XCNEW (struct output_block);
79
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
85
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
88
89 clear_line_info (ob);
90
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
93
94 return ob;
95 }
96
97
98 /* Destroy the output block OB. */
99
100 void
101 destroy_output_block (struct output_block *ob)
102 {
103 enum lto_section_type section_type = ob->section_type;
104
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
107
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
112
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
115
116 free (ob);
117 }
118
119
120 /* Look up NODE in the type table and write the index for it to OB. */
121
122 static void
123 output_type_ref (struct output_block *ob, tree node)
124 {
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
127 }
128
129
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
134
135 static bool
136 tree_is_indexable (tree t)
137 {
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
143 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
144 else if (TREE_CODE (t) == IMPORTED_DECL)
145 return false;
146 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
147 || TREE_CODE (t) == TYPE_DECL
148 || TREE_CODE (t) == CONST_DECL
149 || TREE_CODE (t) == NAMELIST_DECL)
150 && decl_function_context (t))
151 return false;
152 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
153 return false;
154 /* Variably modified types need to be streamed alongside function
155 bodies because they can refer to local entities. Together with
156 them we have to localize their members as well.
157 ??? In theory that includes non-FIELD_DECLs as well. */
158 else if (TYPE_P (t)
159 && variably_modified_type_p (t, NULL_TREE))
160 return false;
161 else if (TREE_CODE (t) == FIELD_DECL
162 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
163 return false;
164 else
165 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
166 }
167
168
169 /* Output info about new location into bitpack BP.
170 After outputting bitpack, lto_output_location_data has
171 to be done to output actual data. */
172
173 void
174 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
175 location_t loc)
176 {
177 expanded_location xloc;
178
179 loc = LOCATION_LOCUS (loc);
180 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
181 if (loc == UNKNOWN_LOCATION)
182 return;
183
184 xloc = expand_location (loc);
185
186 bp_pack_value (bp, ob->current_file != xloc.file, 1);
187 bp_pack_value (bp, ob->current_line != xloc.line, 1);
188 bp_pack_value (bp, ob->current_col != xloc.column, 1);
189
190 if (ob->current_file != xloc.file)
191 bp_pack_var_len_unsigned (bp,
192 streamer_string_index (ob, xloc.file,
193 strlen (xloc.file) + 1,
194 true));
195 ob->current_file = xloc.file;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 case PARM_DECL:
244 streamer_write_record_start (ob, LTO_global_decl_ref);
245 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case CONST_DECL:
249 streamer_write_record_start (ob, LTO_const_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case IMPORTED_DECL:
254 gcc_assert (decl_function_context (expr) == NULL);
255 streamer_write_record_start (ob, LTO_imported_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case TYPE_DECL:
260 streamer_write_record_start (ob, LTO_type_decl_ref);
261 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case NAMELIST_DECL:
265 streamer_write_record_start (ob, LTO_namelist_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 case NAMESPACE_DECL:
270 streamer_write_record_start (ob, LTO_namespace_decl_ref);
271 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case LABEL_DECL:
275 streamer_write_record_start (ob, LTO_label_decl_ref);
276 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
278
279 case RESULT_DECL:
280 streamer_write_record_start (ob, LTO_result_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
283
284 case TRANSLATION_UNIT_DECL:
285 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
286 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
288
289 default:
290 /* No other node is indexable, so it should have been handled by
291 lto_output_tree. */
292 gcc_unreachable ();
293 }
294 }
295
296
297 /* Return true if EXPR is a tree node that can be written to disk. */
298
299 static inline bool
300 lto_is_streamable (tree expr)
301 {
302 enum tree_code code = TREE_CODE (expr);
303
304 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
305 name version in lto_output_tree_ref (see output_ssa_names). */
306 return !is_lang_specific (expr)
307 && code != SSA_NAME
308 && code != CALL_EXPR
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
319 }
320
321
322 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
323
324 static tree
325 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
326 {
327 gcc_checking_assert (DECL_P (expr)
328 && TREE_CODE (expr) != FUNCTION_DECL
329 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
330
331 /* Handle DECL_INITIAL for symbols. */
332 tree initial = DECL_INITIAL (expr);
333 if (TREE_CODE (expr) == VAR_DECL
334 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
335 && !DECL_IN_CONSTANT_POOL (expr)
336 && initial)
337 {
338 varpool_node *vnode;
339 /* Extra section needs about 30 bytes; do not produce it for simple
340 scalar values. */
341 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
342 || !(vnode = varpool_node::get (expr))
343 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
344 initial = error_mark_node;
345 }
346
347 return initial;
348 }
349
350
351 /* Write a physical representation of tree node EXPR to output block
352 OB. If REF_P is true, the leaves of EXPR are emitted as references
353 via lto_output_tree_ref. IX is the index into the streamer cache
354 where EXPR is stored. */
355
356 static void
357 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
358 {
359 /* Pack all the non-pointer fields in EXPR into a bitpack and write
360 the resulting bitpack. */
361 bitpack_d bp = bitpack_create (ob->main_stream);
362 streamer_pack_tree_bitfields (ob, &bp, expr);
363 streamer_write_bitpack (&bp);
364
365 /* Write all the pointer fields in EXPR. */
366 streamer_write_tree_body (ob, expr, ref_p);
367
368 /* Write any LTO-specific data to OB. */
369 if (DECL_P (expr)
370 && TREE_CODE (expr) != FUNCTION_DECL
371 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
372 {
373 /* Handle DECL_INITIAL for symbols. */
374 tree initial = get_symbol_initial_value
375 (ob->decl_state->symtab_node_encoder, expr);
376 stream_write_tree (ob, initial, ref_p);
377 }
378 }
379
380 /* Write a physical representation of tree node EXPR to output block
381 OB. If REF_P is true, the leaves of EXPR are emitted as references
382 via lto_output_tree_ref. IX is the index into the streamer cache
383 where EXPR is stored. */
384
385 static void
386 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
387 {
388 if (!lto_is_streamable (expr))
389 internal_error ("tree code %qs is not supported in LTO streams",
390 get_tree_code_name (TREE_CODE (expr)));
391
392 /* Write the header, containing everything needed to materialize
393 EXPR on the reading side. */
394 streamer_write_tree_header (ob, expr);
395
396 lto_write_tree_1 (ob, expr, ref_p);
397
398 /* Mark the end of EXPR. */
399 streamer_write_zero (ob);
400 }
401
402 /* Emit the physical representation of tree node EXPR to output block
403 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
404 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
405
406 static void
407 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
408 bool ref_p, bool this_ref_p)
409 {
410 unsigned ix;
411
412 gcc_checking_assert (expr != NULL_TREE
413 && !(this_ref_p && tree_is_indexable (expr)));
414
415 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
416 expr, hash, &ix);
417 gcc_assert (!exists_p);
418 if (streamer_handle_as_builtin_p (expr))
419 {
420 /* MD and NORMAL builtins do not need to be written out
421 completely as they are always instantiated by the
422 compiler on startup. The only builtins that need to
423 be written out are BUILT_IN_FRONTEND. For all other
424 builtins, we simply write the class and code. */
425 streamer_write_builtin (ob, expr);
426 }
427 else if (TREE_CODE (expr) == INTEGER_CST
428 && !TREE_OVERFLOW (expr))
429 {
430 /* Shared INTEGER_CST nodes are special because they need their
431 original type to be materialized by the reader (to implement
432 TYPE_CACHED_VALUES). */
433 streamer_write_integer_cst (ob, expr, ref_p);
434 }
435 else
436 {
437 /* This is the first time we see EXPR, write its fields
438 to OB. */
439 lto_write_tree (ob, expr, ref_p);
440 }
441 }
442
443 class DFS
444 {
445 public:
446 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
447 bool single_p);
448 ~DFS ();
449
450 struct scc_entry
451 {
452 tree t;
453 hashval_t hash;
454 };
455 vec<scc_entry> sccstack;
456
457 private:
458 struct sccs
459 {
460 unsigned int dfsnum;
461 unsigned int low;
462 };
463
464 static int scc_entry_compare (const void *, const void *);
465
466 void DFS_write_tree_body (struct output_block *ob,
467 tree expr, sccs *expr_state, bool ref_p,
468 bool single_p);
469
470 void DFS_write_tree (struct output_block *ob, sccs *from_state,
471 tree expr, bool ref_p, bool this_ref_p,
472 bool single_p);
473 hashval_t
474 hash_scc (struct output_block *ob, unsigned first, unsigned size);
475
476 unsigned int next_dfs_num;
477 struct pointer_map_t *sccstate;
478 struct obstack sccstate_obstack;
479 };
480
481 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
482 bool single_p)
483 {
484 sccstack.create (0);
485 sccstate = pointer_map_create ();
486 gcc_obstack_init (&sccstate_obstack);
487 next_dfs_num = 1;
488 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
489 }
490
491 DFS::~DFS ()
492 {
493 sccstack.release ();
494 pointer_map_destroy (sccstate);
495 obstack_free (&sccstate_obstack, NULL);
496 }
497
498 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
499 DFS recurse for all tree edges originating from it. */
500
501 void
502 DFS::DFS_write_tree_body (struct output_block *ob,
503 tree expr, sccs *expr_state, bool ref_p,
504 bool single_p)
505 {
506 #define DFS_follow_tree_edge(DEST) \
507 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
508
509 enum tree_code code;
510
511 code = TREE_CODE (expr);
512
513 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
514 {
515 if (TREE_CODE (expr) != IDENTIFIER_NODE)
516 DFS_follow_tree_edge (TREE_TYPE (expr));
517 }
518
519 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
520 {
521 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
522 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
523 }
524
525 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
526 {
527 DFS_follow_tree_edge (TREE_REALPART (expr));
528 DFS_follow_tree_edge (TREE_IMAGPART (expr));
529 }
530
531 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
532 {
533 /* Drop names that were created for anonymous entities. */
534 if (DECL_NAME (expr)
535 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
536 && ANON_AGGRNAME_P (DECL_NAME (expr)))
537 ;
538 else
539 DFS_follow_tree_edge (DECL_NAME (expr));
540 DFS_follow_tree_edge (DECL_CONTEXT (expr));
541 }
542
543 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
544 {
545 DFS_follow_tree_edge (DECL_SIZE (expr));
546 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
547
548 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
549 special handling in LTO, it must be handled by streamer hooks. */
550
551 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
552
553 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
554 for early inlining so drop it on the floor instead of ICEing in
555 dwarf2out.c. */
556
557 if ((TREE_CODE (expr) == VAR_DECL
558 || TREE_CODE (expr) == PARM_DECL)
559 && DECL_HAS_VALUE_EXPR_P (expr))
560 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
561 if (TREE_CODE (expr) == VAR_DECL)
562 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
563 }
564
565 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
566 {
567 if (TREE_CODE (expr) == TYPE_DECL)
568 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
569 }
570
571 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
572 {
573 /* Make sure we don't inadvertently set the assembler name. */
574 if (DECL_ASSEMBLER_NAME_SET_P (expr))
575 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
576 }
577
578 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
579 {
580 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
581 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
582 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
583 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
584 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
585 }
586
587 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
588 {
589 DFS_follow_tree_edge (DECL_VINDEX (expr));
590 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
591 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
592 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
593 }
594
595 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
596 {
597 DFS_follow_tree_edge (TYPE_SIZE (expr));
598 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
599 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
600 DFS_follow_tree_edge (TYPE_NAME (expr));
601 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
602 reconstructed during fixup. */
603 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
604 during fixup. */
605 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
606 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
607 /* TYPE_CANONICAL is re-computed during type merging, so no need
608 to follow it here. */
609 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
610 }
611
612 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
613 {
614 if (TREE_CODE (expr) == ENUMERAL_TYPE)
615 DFS_follow_tree_edge (TYPE_VALUES (expr));
616 else if (TREE_CODE (expr) == ARRAY_TYPE)
617 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
618 else if (RECORD_OR_UNION_TYPE_P (expr))
619 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
620 DFS_follow_tree_edge (t);
621 else if (TREE_CODE (expr) == FUNCTION_TYPE
622 || TREE_CODE (expr) == METHOD_TYPE)
623 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
624
625 if (!POINTER_TYPE_P (expr))
626 DFS_follow_tree_edge (TYPE_MINVAL (expr));
627 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
628 if (RECORD_OR_UNION_TYPE_P (expr))
629 DFS_follow_tree_edge (TYPE_BINFO (expr));
630 }
631
632 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
633 {
634 DFS_follow_tree_edge (TREE_PURPOSE (expr));
635 DFS_follow_tree_edge (TREE_VALUE (expr));
636 DFS_follow_tree_edge (TREE_CHAIN (expr));
637 }
638
639 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
640 {
641 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
642 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
643 }
644
645 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
646 {
647 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
648 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
649 DFS_follow_tree_edge (TREE_BLOCK (expr));
650 }
651
652 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
653 {
654 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
655 /* ??? FIXME. See also streamer_write_chain. */
656 if (!(VAR_OR_FUNCTION_DECL_P (t)
657 && DECL_EXTERNAL (t)))
658 DFS_follow_tree_edge (t);
659
660 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
661
662 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
663 handle - those that represent inlined function scopes.
664 For the drop rest them on the floor instead of ICEing
665 in dwarf2out.c. */
666 if (inlined_function_outer_scope_p (expr))
667 {
668 tree ultimate_origin = block_ultimate_origin (expr);
669 DFS_follow_tree_edge (ultimate_origin);
670 }
671 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
672 information for early inlined BLOCKs so drop it on the floor instead
673 of ICEing in dwarf2out.c. */
674
675 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
676 streaming time. */
677
678 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
679 list is re-constructed from BLOCK_SUPERCONTEXT. */
680 }
681
682 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
683 {
684 unsigned i;
685 tree t;
686
687 /* Note that the number of BINFO slots has already been emitted in
688 EXPR's header (see streamer_write_tree_header) because this length
689 is needed to build the empty BINFO node on the reader side. */
690 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
691 DFS_follow_tree_edge (t);
692 DFS_follow_tree_edge (BINFO_OFFSET (expr));
693 DFS_follow_tree_edge (BINFO_VTABLE (expr));
694 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
695
696 /* The number of BINFO_BASE_ACCESSES has already been emitted in
697 EXPR's bitfield section. */
698 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
699 DFS_follow_tree_edge (t);
700
701 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
702 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
703 }
704
705 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
706 {
707 unsigned i;
708 tree index, value;
709
710 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
711 {
712 DFS_follow_tree_edge (index);
713 DFS_follow_tree_edge (value);
714 }
715 }
716
717 if (code == OMP_CLAUSE)
718 {
719 int i;
720 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
721 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
722 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
723 }
724
725 #undef DFS_follow_tree_edge
726 }
727
728 /* Return a hash value for the tree T.
729 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
730 may hold hash values if trees inside current SCC. */
731
732 static hashval_t
733 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
734 {
735 inchash hstate;
736
737 #define visit(SIBLING) \
738 do { \
739 unsigned ix; \
740 if (!SIBLING) \
741 hstate.add_int (0); \
742 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
743 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
744 else if (map) \
745 hstate.add_int (*map->get (SIBLING)); \
746 else \
747 hstate.add_int (1); \
748 } while (0)
749
750 /* Hash TS_BASE. */
751 enum tree_code code = TREE_CODE (t);
752 hstate.add_int (code);
753 if (!TYPE_P (t))
754 {
755 hstate.add_flag (TREE_SIDE_EFFECTS (t));
756 hstate.add_flag (TREE_CONSTANT (t));
757 hstate.add_flag (TREE_READONLY (t));
758 hstate.add_flag (TREE_PUBLIC (t));
759 }
760 hstate.add_flag (TREE_ADDRESSABLE (t));
761 hstate.add_flag (TREE_THIS_VOLATILE (t));
762 if (DECL_P (t))
763 hstate.add_flag (DECL_UNSIGNED (t));
764 else if (TYPE_P (t))
765 hstate.add_flag (TYPE_UNSIGNED (t));
766 if (TYPE_P (t))
767 hstate.add_flag (TYPE_ARTIFICIAL (t));
768 else
769 hstate.add_flag (TREE_NO_WARNING (t));
770 hstate.add_flag (TREE_NOTHROW (t));
771 hstate.add_flag (TREE_STATIC (t));
772 hstate.add_flag (TREE_PROTECTED (t));
773 hstate.add_flag (TREE_DEPRECATED (t));
774 if (code != TREE_BINFO)
775 hstate.add_flag (TREE_PRIVATE (t));
776 if (TYPE_P (t))
777 {
778 hstate.add_flag (TYPE_SATURATING (t));
779 hstate.add_flag (TYPE_ADDR_SPACE (t));
780 }
781 else if (code == SSA_NAME)
782 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
783 hstate.commit_flag ();
784
785 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
786 {
787 int i;
788 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
789 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
790 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
791 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
792 }
793
794 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
795 {
796 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
797 hstate.add_flag (r.cl);
798 hstate.add_flag (r.sign);
799 hstate.add_flag (r.signalling);
800 hstate.add_flag (r.canonical);
801 hstate.commit_flag ();
802 hstate.add_int (r.uexp);
803 hstate.add (r.sig, sizeof (r.sig));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
807 {
808 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
809 hstate.add_int (f.mode);
810 hstate.add_int (f.data.low);
811 hstate.add_int (f.data.high);
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
815 {
816 hstate.add_wide_int (DECL_MODE (t));
817 hstate.add_flag (DECL_NONLOCAL (t));
818 hstate.add_flag (DECL_VIRTUAL_P (t));
819 hstate.add_flag (DECL_IGNORED_P (t));
820 hstate.add_flag (DECL_ABSTRACT (t));
821 hstate.add_flag (DECL_ARTIFICIAL (t));
822 hstate.add_flag (DECL_USER_ALIGN (t));
823 hstate.add_flag (DECL_PRESERVE_P (t));
824 hstate.add_flag (DECL_EXTERNAL (t));
825 hstate.add_flag (DECL_GIMPLE_REG_P (t));
826 hstate.commit_flag ();
827 hstate.add_int (DECL_ALIGN (t));
828 if (code == LABEL_DECL)
829 {
830 hstate.add_int (EH_LANDING_PAD_NR (t));
831 hstate.add_int (LABEL_DECL_UID (t));
832 }
833 else if (code == FIELD_DECL)
834 {
835 hstate.add_flag (DECL_PACKED (t));
836 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
837 hstate.add_int (DECL_OFFSET_ALIGN (t));
838 }
839 else if (code == VAR_DECL)
840 {
841 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
842 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
843 }
844 if (code == RESULT_DECL
845 || code == PARM_DECL
846 || code == VAR_DECL)
847 {
848 hstate.add_flag (DECL_BY_REFERENCE (t));
849 if (code == VAR_DECL
850 || code == PARM_DECL)
851 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
852 }
853 hstate.commit_flag ();
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
857 hstate.add_int (DECL_REGISTER (t));
858
859 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
860 {
861 hstate.add_flag (DECL_COMMON (t));
862 hstate.add_flag (DECL_DLLIMPORT_P (t));
863 hstate.add_flag (DECL_WEAK (t));
864 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
865 hstate.add_flag (DECL_COMDAT (t));
866 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
867 hstate.add_int (DECL_VISIBILITY (t));
868 if (code == VAR_DECL)
869 {
870 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
871 hstate.add_flag (DECL_HARD_REGISTER (t));
872 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
873 }
874 if (TREE_CODE (t) == FUNCTION_DECL)
875 {
876 hstate.add_flag (DECL_FINAL_P (t));
877 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
878 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
879 }
880 hstate.commit_flag ();
881 }
882
883 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
884 {
885 hstate.add_int (DECL_BUILT_IN_CLASS (t));
886 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
887 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
888 hstate.add_flag (DECL_UNINLINABLE (t));
889 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
890 hstate.add_flag (DECL_IS_NOVOPS (t));
891 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
892 hstate.add_flag (DECL_IS_MALLOC (t));
893 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
894 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
895 hstate.add_flag (DECL_STATIC_CHAIN (t));
896 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
897 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
898 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
899 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
900 hstate.add_flag (DECL_PURE_P (t));
901 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
902 hstate.commit_flag ();
903 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
904 hstate.add_int (DECL_FUNCTION_CODE (t));
905 }
906
907 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
908 {
909 hstate.add_wide_int (TYPE_MODE (t));
910 hstate.add_flag (TYPE_STRING_FLAG (t));
911 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
912 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
913 hstate.add_flag (TYPE_PACKED (t));
914 hstate.add_flag (TYPE_RESTRICT (t));
915 hstate.add_flag (TYPE_USER_ALIGN (t));
916 hstate.add_flag (TYPE_READONLY (t));
917 if (RECORD_OR_UNION_TYPE_P (t))
918 {
919 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
920 hstate.add_flag (TYPE_FINAL_P (t));
921 }
922 else if (code == ARRAY_TYPE)
923 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
924 hstate.commit_flag ();
925 hstate.add_int (TYPE_PRECISION (t));
926 hstate.add_int (TYPE_ALIGN (t));
927 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
928 || (!in_lto_p
929 && get_alias_set (t) == 0))
930 ? 0 : -1);
931 }
932
933 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
934 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
935 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
936
937 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
938 gcc_unreachable ();
939
940 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
941 hstate.add (t, sizeof (struct cl_optimization));
942
943 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
944 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
945
946 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
947 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
948
949 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
950 {
951 if (code != IDENTIFIER_NODE)
952 visit (TREE_TYPE (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
956 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
957 visit (VECTOR_CST_ELT (t, i));
958
959 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
960 {
961 visit (TREE_REALPART (t));
962 visit (TREE_IMAGPART (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
966 {
967 /* Drop names that were created for anonymous entities. */
968 if (DECL_NAME (t)
969 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
970 && ANON_AGGRNAME_P (DECL_NAME (t)))
971 ;
972 else
973 visit (DECL_NAME (t));
974 if (DECL_FILE_SCOPE_P (t))
975 ;
976 else
977 visit (DECL_CONTEXT (t));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
981 {
982 visit (DECL_SIZE (t));
983 visit (DECL_SIZE_UNIT (t));
984 visit (DECL_ATTRIBUTES (t));
985 if ((code == VAR_DECL
986 || code == PARM_DECL)
987 && DECL_HAS_VALUE_EXPR_P (t))
988 visit (DECL_VALUE_EXPR (t));
989 if (code == VAR_DECL
990 && DECL_HAS_DEBUG_EXPR_P (t))
991 visit (DECL_DEBUG_EXPR (t));
992 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
993 be able to call get_symbol_initial_value. */
994 }
995
996 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
997 {
998 if (code == TYPE_DECL)
999 visit (DECL_ORIGINAL_TYPE (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1003 {
1004 if (DECL_ASSEMBLER_NAME_SET_P (t))
1005 visit (DECL_ASSEMBLER_NAME (t));
1006 }
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1009 {
1010 visit (DECL_FIELD_OFFSET (t));
1011 visit (DECL_BIT_FIELD_TYPE (t));
1012 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1013 visit (DECL_FIELD_BIT_OFFSET (t));
1014 visit (DECL_FCONTEXT (t));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1018 {
1019 visit (DECL_VINDEX (t));
1020 visit (DECL_FUNCTION_PERSONALITY (t));
1021 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
1022 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1026 {
1027 visit (TYPE_SIZE (t));
1028 visit (TYPE_SIZE_UNIT (t));
1029 visit (TYPE_ATTRIBUTES (t));
1030 visit (TYPE_NAME (t));
1031 visit (TYPE_MAIN_VARIANT (t));
1032 if (TYPE_FILE_SCOPE_P (t))
1033 ;
1034 else
1035 visit (TYPE_CONTEXT (t));
1036 visit (TYPE_STUB_DECL (t));
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1040 {
1041 if (code == ENUMERAL_TYPE)
1042 visit (TYPE_VALUES (t));
1043 else if (code == ARRAY_TYPE)
1044 visit (TYPE_DOMAIN (t));
1045 else if (RECORD_OR_UNION_TYPE_P (t))
1046 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1047 visit (f);
1048 else if (code == FUNCTION_TYPE
1049 || code == METHOD_TYPE)
1050 visit (TYPE_ARG_TYPES (t));
1051 if (!POINTER_TYPE_P (t))
1052 visit (TYPE_MINVAL (t));
1053 visit (TYPE_MAXVAL (t));
1054 if (RECORD_OR_UNION_TYPE_P (t))
1055 visit (TYPE_BINFO (t));
1056 }
1057
1058 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1059 {
1060 visit (TREE_PURPOSE (t));
1061 visit (TREE_VALUE (t));
1062 visit (TREE_CHAIN (t));
1063 }
1064
1065 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1066 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1067 visit (TREE_VEC_ELT (t, i));
1068
1069 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1070 {
1071 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1072 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1073 visit (TREE_OPERAND (t, i));
1074 }
1075
1076 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1077 {
1078 unsigned i;
1079 tree b;
1080 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1081 visit (b);
1082 visit (BINFO_OFFSET (t));
1083 visit (BINFO_VTABLE (t));
1084 visit (BINFO_VPTR_FIELD (t));
1085 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1086 visit (b);
1087 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1088 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1089 }
1090
1091 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1092 {
1093 unsigned i;
1094 tree index, value;
1095 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1097 {
1098 visit (index);
1099 visit (value);
1100 }
1101 }
1102
1103 if (code == OMP_CLAUSE)
1104 {
1105 int i;
1106 HOST_WIDE_INT val;
1107
1108 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1109 switch (OMP_CLAUSE_CODE (t))
1110 {
1111 case OMP_CLAUSE_DEFAULT:
1112 val = OMP_CLAUSE_DEFAULT_KIND (t);
1113 break;
1114 case OMP_CLAUSE_SCHEDULE:
1115 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1116 break;
1117 case OMP_CLAUSE_DEPEND:
1118 val = OMP_CLAUSE_DEPEND_KIND (t);
1119 break;
1120 case OMP_CLAUSE_MAP:
1121 val = OMP_CLAUSE_MAP_KIND (t);
1122 break;
1123 case OMP_CLAUSE_PROC_BIND:
1124 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1125 break;
1126 case OMP_CLAUSE_REDUCTION:
1127 val = OMP_CLAUSE_REDUCTION_CODE (t);
1128 break;
1129 default:
1130 val = 0;
1131 break;
1132 }
1133 hstate.add_wide_int (val);
1134 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1135 visit (OMP_CLAUSE_OPERAND (t, i));
1136 visit (OMP_CLAUSE_CHAIN (t));
1137 }
1138
1139 return hstate.end ();
1140
1141 #undef visit
1142 }
1143
1144 /* Compare two SCC entries by their hash value for qsorting them. */
1145
1146 int
1147 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1148 {
1149 const scc_entry *p1 = (const scc_entry *) p1_;
1150 const scc_entry *p2 = (const scc_entry *) p2_;
1151 if (p1->hash < p2->hash)
1152 return -1;
1153 else if (p1->hash > p2->hash)
1154 return 1;
1155 return 0;
1156 }
1157
1158 /* Return a hash value for the SCC on the SCC stack from FIRST with
1159 size SIZE. */
1160
1161 hashval_t
1162 DFS::hash_scc (struct output_block *ob,
1163 unsigned first, unsigned size)
1164 {
1165 unsigned int last_classes = 0, iterations = 0;
1166
1167 /* Compute hash values for the SCC members. */
1168 for (unsigned i = 0; i < size; ++i)
1169 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1170 sccstack[first+i].t);
1171
1172 if (size == 1)
1173 return sccstack[first].hash;
1174
1175 /* We aim to get unique hash for every tree within SCC and compute hash value
1176 of the whole SCC by combing all values together in an stable (entry point
1177 independent) order. This guarantees that the same SCC regions within
1178 different translation units will get the same hash values and therefore
1179 will be merged at WPA time.
1180
1181 Often the hashes are already unique. In that case we compute scc hash
1182 by combining individual hash values in an increasing order.
1183
1184 If thre are duplicates we seek at least one tree with unique hash (and
1185 pick one with minimal hash and this property). Then we obtain stable
1186 order by DFS walk starting from this unique tree and then use index
1187 within this order to make individual hash values unique.
1188
1189 If there is no tree with unique hash, we iteratively propagate the hash
1190 values across the internal edges of SCC. This usually quickly leads
1191 to unique hashes. Consider, for example, an SCC containing two pointers
1192 that are identical except for type they point and assume that these
1193 types are also part of the SCC.
1194 The propagation will add the points-to type information into their hash
1195 values. */
1196 do
1197 {
1198 /* Sort the SCC so we can easily see check for uniqueness. */
1199 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1200
1201 unsigned int classes = 1;
1202 int firstunique = -1;
1203
1204 /* Find tree with lowest unique hash (if it exists) and compute
1205 number of equivalence classes. */
1206 if (sccstack[first].hash != sccstack[first+1].hash)
1207 firstunique = 0;
1208 for (unsigned i = 1; i < size; ++i)
1209 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1210 {
1211 classes++;
1212 if (firstunique == -1
1213 && (i == size - 1
1214 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1215 firstunique = i;
1216 }
1217
1218 /* If we found tree with unique hash; stop the iteration. */
1219 if (firstunique != -1
1220 /* Also terminate if we run out of iterations or if the number of
1221 equivalence classes is no longer increasing.
1222 For example a cyclic list of trees that are all equivalent will
1223 never have unique entry point; we however do not build such SCCs
1224 in our IL. */
1225 || classes <= last_classes || iterations > 16)
1226 {
1227 hashval_t scc_hash;
1228
1229 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1230 starting from FIRSTUNIQUE to obstain stable order. */
1231 if (classes != size && firstunique != -1)
1232 {
1233 hash_map <tree, hashval_t> map(size*2);
1234
1235 /* Store hash values into a map, so we can associate them with
1236 reordered SCC. */
1237 for (unsigned i = 0; i < size; ++i)
1238 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1239
1240 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1241 gcc_assert (again.sccstack.length () == size);
1242
1243 memcpy (sccstack.address () + first,
1244 again.sccstack.address (),
1245 sizeof (scc_entry) * size);
1246
1247 /* Update hash values of individual members by hashing in the
1248 index within the stable order. This ensures uniqueness.
1249 Also compute the scc_hash by mixing in all hash values in the
1250 stable order we obtained. */
1251 sccstack[first].hash = *map.get (sccstack[first].t);
1252 scc_hash = sccstack[first].hash;
1253 for (unsigned i = 1; i < size; ++i)
1254 {
1255 sccstack[first+i].hash
1256 = iterative_hash_hashval_t (i,
1257 *map.get (sccstack[first+i].t));
1258 scc_hash = iterative_hash_hashval_t (scc_hash,
1259 sccstack[first+i].hash);
1260 }
1261 }
1262 /* If we got unique hash values for each tree, then sort already
1263 ensured entry point independent order. Only compute the final
1264 scc hash.
1265
1266 If we failed to find the unique entry point, we go by the same
1267 route. We will eventually introduce unwanted hash conflicts. */
1268 else
1269 {
1270 scc_hash = sccstack[first].hash;
1271 for (unsigned i = 1; i < size; ++i)
1272 scc_hash = iterative_hash_hashval_t (scc_hash,
1273 sccstack[first+i].hash);
1274 /* We can not 100% guarantee that the hash will not conflict in
1275 in a way so the unique hash is not found. This however
1276 should be extremely rare situation. ICE for now so possible
1277 issues are found and evaulated. */
1278 gcc_checking_assert (classes == size);
1279 }
1280
1281 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1282 hash into the hash of each of the elements. */
1283 for (unsigned i = 0; i < size; ++i)
1284 sccstack[first+i].hash
1285 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1286 return scc_hash;
1287 }
1288
1289 last_classes = classes;
1290 iterations++;
1291
1292 /* We failed to identify the entry point; propagate hash values across
1293 the edges. */
1294 {
1295 hash_map <tree, hashval_t> map(size*2);
1296 for (unsigned i = 0; i < size; ++i)
1297 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1298
1299 for (unsigned i = 0; i < size; i++)
1300 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1301 sccstack[first+i].t);
1302 }
1303 }
1304 while (true);
1305 }
1306
1307 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1308 already in the streamer cache. Main routine called for
1309 each visit of EXPR. */
1310
1311 void
1312 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1313 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1314 {
1315 unsigned ix;
1316 sccs **slot;
1317
1318 /* Handle special cases. */
1319 if (expr == NULL_TREE)
1320 return;
1321
1322 /* Do not DFS walk into indexable trees. */
1323 if (this_ref_p && tree_is_indexable (expr))
1324 return;
1325
1326 /* Check if we already streamed EXPR. */
1327 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1328 return;
1329
1330 slot = (sccs **)pointer_map_insert (sccstate, expr);
1331 sccs *cstate = *slot;
1332 if (!cstate)
1333 {
1334 scc_entry e = { expr, 0 };
1335 /* Not yet visited. DFS recurse and push it onto the stack. */
1336 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1337 sccstack.safe_push (e);
1338 cstate->dfsnum = next_dfs_num++;
1339 cstate->low = cstate->dfsnum;
1340
1341 if (streamer_handle_as_builtin_p (expr))
1342 ;
1343 else if (TREE_CODE (expr) == INTEGER_CST
1344 && !TREE_OVERFLOW (expr))
1345 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1346 else
1347 {
1348 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1349
1350 /* Walk any LTO-specific edges. */
1351 if (DECL_P (expr)
1352 && TREE_CODE (expr) != FUNCTION_DECL
1353 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1354 {
1355 /* Handle DECL_INITIAL for symbols. */
1356 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1357 expr);
1358 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1359 }
1360 }
1361
1362 /* See if we found an SCC. */
1363 if (cstate->low == cstate->dfsnum)
1364 {
1365 unsigned first, size;
1366 tree x;
1367
1368 /* If we are re-walking a single leaf-SCC just return and
1369 let the caller access the sccstack. */
1370 if (single_p)
1371 return;
1372
1373 /* Pop the SCC and compute its size. */
1374 first = sccstack.length ();
1375 do
1376 {
1377 x = sccstack[--first].t;
1378 }
1379 while (x != expr);
1380 size = sccstack.length () - first;
1381
1382 /* No need to compute hashes for LTRANS units, we don't perform
1383 any merging there. */
1384 hashval_t scc_hash = 0;
1385 unsigned scc_entry_len = 0;
1386 if (!flag_wpa)
1387 {
1388 scc_hash = hash_scc (ob, first, size);
1389
1390 /* Put the entries with the least number of collisions first. */
1391 unsigned entry_start = 0;
1392 scc_entry_len = size + 1;
1393 for (unsigned i = 0; i < size;)
1394 {
1395 unsigned from = i;
1396 for (i = i + 1; i < size
1397 && (sccstack[first + i].hash
1398 == sccstack[first + from].hash); ++i)
1399 ;
1400 if (i - from < scc_entry_len)
1401 {
1402 scc_entry_len = i - from;
1403 entry_start = from;
1404 }
1405 }
1406 for (unsigned i = 0; i < scc_entry_len; ++i)
1407 {
1408 scc_entry tem = sccstack[first + i];
1409 sccstack[first + i] = sccstack[first + entry_start + i];
1410 sccstack[first + entry_start + i] = tem;
1411 }
1412
1413 if (scc_entry_len == 1)
1414 ; /* We already sorted SCC deterministically in hash_scc. */
1415 else
1416 /* Check that we have only one SCC.
1417 Naturally we may have conflicts if hash function is not
1418 strong enough. Lets see how far this gets. */
1419 {
1420 #ifdef ENABLE_CHECKING
1421 gcc_unreachable ();
1422 #endif
1423 }
1424 }
1425
1426 /* Write LTO_tree_scc. */
1427 streamer_write_record_start (ob, LTO_tree_scc);
1428 streamer_write_uhwi (ob, size);
1429 streamer_write_uhwi (ob, scc_hash);
1430
1431 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1432 All INTEGER_CSTs need to be handled this way as we need
1433 their type to materialize them. Also builtins are handled
1434 this way.
1435 ??? We still wrap these in LTO_tree_scc so at the
1436 input side we can properly identify the tree we want
1437 to ultimatively return. */
1438 if (size == 1)
1439 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1440 else
1441 {
1442 /* Write the size of the SCC entry candidates. */
1443 streamer_write_uhwi (ob, scc_entry_len);
1444
1445 /* Write all headers and populate the streamer cache. */
1446 for (unsigned i = 0; i < size; ++i)
1447 {
1448 hashval_t hash = sccstack[first+i].hash;
1449 tree t = sccstack[first+i].t;
1450 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1451 t, hash, &ix);
1452 gcc_assert (!exists_p);
1453
1454 if (!lto_is_streamable (t))
1455 internal_error ("tree code %qs is not supported "
1456 "in LTO streams",
1457 get_tree_code_name (TREE_CODE (t)));
1458
1459 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1460
1461 /* Write the header, containing everything needed to
1462 materialize EXPR on the reading side. */
1463 streamer_write_tree_header (ob, t);
1464 }
1465
1466 /* Write the bitpacks and tree references. */
1467 for (unsigned i = 0; i < size; ++i)
1468 {
1469 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1470
1471 /* Mark the end of the tree. */
1472 streamer_write_zero (ob);
1473 }
1474 }
1475
1476 /* Finally truncate the vector. */
1477 sccstack.truncate (first);
1478
1479 if (from_state)
1480 from_state->low = MIN (from_state->low, cstate->low);
1481 return;
1482 }
1483
1484 if (from_state)
1485 from_state->low = MIN (from_state->low, cstate->low);
1486 }
1487 gcc_checking_assert (from_state);
1488 if (cstate->dfsnum < from_state->dfsnum)
1489 from_state->low = MIN (cstate->dfsnum, from_state->low);
1490 }
1491
1492
1493 /* Emit the physical representation of tree node EXPR to output block
1494 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1495 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1496
1497 void
1498 lto_output_tree (struct output_block *ob, tree expr,
1499 bool ref_p, bool this_ref_p)
1500 {
1501 unsigned ix;
1502 bool existed_p;
1503
1504 if (expr == NULL_TREE)
1505 {
1506 streamer_write_record_start (ob, LTO_null);
1507 return;
1508 }
1509
1510 if (this_ref_p && tree_is_indexable (expr))
1511 {
1512 lto_output_tree_ref (ob, expr);
1513 return;
1514 }
1515
1516 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1517 if (existed_p)
1518 {
1519 /* If a node has already been streamed out, make sure that
1520 we don't write it more than once. Otherwise, the reader
1521 will instantiate two different nodes for the same object. */
1522 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1523 streamer_write_uhwi (ob, ix);
1524 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1525 lto_tree_code_to_tag (TREE_CODE (expr)));
1526 lto_stats.num_pickle_refs_output++;
1527 }
1528 else
1529 {
1530 /* This is the first time we see EXPR, write all reachable
1531 trees to OB. */
1532 static bool in_dfs_walk;
1533
1534 /* Protect against recursion which means disconnect between
1535 what tree edges we walk in the DFS walk and what edges
1536 we stream out. */
1537 gcc_assert (!in_dfs_walk);
1538
1539 /* Start the DFS walk. */
1540 /* Save ob state ... */
1541 /* let's see ... */
1542 in_dfs_walk = true;
1543 DFS (ob, expr, ref_p, this_ref_p, false);
1544 in_dfs_walk = false;
1545
1546 /* Finally append a reference to the tree we were writing.
1547 ??? If expr ended up as a singleton we could have
1548 inlined it here and avoid outputting a reference. */
1549 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1550 gcc_assert (existed_p);
1551 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1552 streamer_write_uhwi (ob, ix);
1553 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1554 lto_tree_code_to_tag (TREE_CODE (expr)));
1555 lto_stats.num_pickle_refs_output++;
1556 }
1557 }
1558
1559
1560 /* Output to OB a list of try/catch handlers starting with FIRST. */
1561
1562 static void
1563 output_eh_try_list (struct output_block *ob, eh_catch first)
1564 {
1565 eh_catch n;
1566
1567 for (n = first; n; n = n->next_catch)
1568 {
1569 streamer_write_record_start (ob, LTO_eh_catch);
1570 stream_write_tree (ob, n->type_list, true);
1571 stream_write_tree (ob, n->filter_list, true);
1572 stream_write_tree (ob, n->label, true);
1573 }
1574
1575 streamer_write_record_start (ob, LTO_null);
1576 }
1577
1578
1579 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1580 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1581 detect EH region sharing. */
1582
1583 static void
1584 output_eh_region (struct output_block *ob, eh_region r)
1585 {
1586 enum LTO_tags tag;
1587
1588 if (r == NULL)
1589 {
1590 streamer_write_record_start (ob, LTO_null);
1591 return;
1592 }
1593
1594 if (r->type == ERT_CLEANUP)
1595 tag = LTO_ert_cleanup;
1596 else if (r->type == ERT_TRY)
1597 tag = LTO_ert_try;
1598 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1599 tag = LTO_ert_allowed_exceptions;
1600 else if (r->type == ERT_MUST_NOT_THROW)
1601 tag = LTO_ert_must_not_throw;
1602 else
1603 gcc_unreachable ();
1604
1605 streamer_write_record_start (ob, tag);
1606 streamer_write_hwi (ob, r->index);
1607
1608 if (r->outer)
1609 streamer_write_hwi (ob, r->outer->index);
1610 else
1611 streamer_write_zero (ob);
1612
1613 if (r->inner)
1614 streamer_write_hwi (ob, r->inner->index);
1615 else
1616 streamer_write_zero (ob);
1617
1618 if (r->next_peer)
1619 streamer_write_hwi (ob, r->next_peer->index);
1620 else
1621 streamer_write_zero (ob);
1622
1623 if (r->type == ERT_TRY)
1624 {
1625 output_eh_try_list (ob, r->u.eh_try.first_catch);
1626 }
1627 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1628 {
1629 stream_write_tree (ob, r->u.allowed.type_list, true);
1630 stream_write_tree (ob, r->u.allowed.label, true);
1631 streamer_write_uhwi (ob, r->u.allowed.filter);
1632 }
1633 else if (r->type == ERT_MUST_NOT_THROW)
1634 {
1635 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1636 bitpack_d bp = bitpack_create (ob->main_stream);
1637 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1638 streamer_write_bitpack (&bp);
1639 }
1640
1641 if (r->landing_pads)
1642 streamer_write_hwi (ob, r->landing_pads->index);
1643 else
1644 streamer_write_zero (ob);
1645 }
1646
1647
1648 /* Output landing pad LP to OB. */
1649
1650 static void
1651 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1652 {
1653 if (lp == NULL)
1654 {
1655 streamer_write_record_start (ob, LTO_null);
1656 return;
1657 }
1658
1659 streamer_write_record_start (ob, LTO_eh_landing_pad);
1660 streamer_write_hwi (ob, lp->index);
1661 if (lp->next_lp)
1662 streamer_write_hwi (ob, lp->next_lp->index);
1663 else
1664 streamer_write_zero (ob);
1665
1666 if (lp->region)
1667 streamer_write_hwi (ob, lp->region->index);
1668 else
1669 streamer_write_zero (ob);
1670
1671 stream_write_tree (ob, lp->post_landing_pad, true);
1672 }
1673
1674
1675 /* Output the existing eh_table to OB. */
1676
1677 static void
1678 output_eh_regions (struct output_block *ob, struct function *fn)
1679 {
1680 if (fn->eh && fn->eh->region_tree)
1681 {
1682 unsigned i;
1683 eh_region eh;
1684 eh_landing_pad lp;
1685 tree ttype;
1686
1687 streamer_write_record_start (ob, LTO_eh_table);
1688
1689 /* Emit the index of the root of the EH region tree. */
1690 streamer_write_hwi (ob, fn->eh->region_tree->index);
1691
1692 /* Emit all the EH regions in the region array. */
1693 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1694 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1695 output_eh_region (ob, eh);
1696
1697 /* Emit all landing pads. */
1698 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1699 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1700 output_eh_lp (ob, lp);
1701
1702 /* Emit all the runtime type data. */
1703 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1704 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1705 stream_write_tree (ob, ttype, true);
1706
1707 /* Emit the table of action chains. */
1708 if (targetm.arm_eabi_unwinder)
1709 {
1710 tree t;
1711 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1712 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1713 stream_write_tree (ob, t, true);
1714 }
1715 else
1716 {
1717 uchar c;
1718 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1719 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1720 streamer_write_char_stream (ob->main_stream, c);
1721 }
1722 }
1723
1724 /* The LTO_null either terminates the record or indicates that there
1725 are no eh_records at all. */
1726 streamer_write_record_start (ob, LTO_null);
1727 }
1728
1729
1730 /* Output all of the active ssa names to the ssa_names stream. */
1731
1732 static void
1733 output_ssa_names (struct output_block *ob, struct function *fn)
1734 {
1735 unsigned int i, len;
1736
1737 len = vec_safe_length (SSANAMES (fn));
1738 streamer_write_uhwi (ob, len);
1739
1740 for (i = 1; i < len; i++)
1741 {
1742 tree ptr = (*SSANAMES (fn))[i];
1743
1744 if (ptr == NULL_TREE
1745 || SSA_NAME_IN_FREE_LIST (ptr)
1746 || virtual_operand_p (ptr))
1747 continue;
1748
1749 streamer_write_uhwi (ob, i);
1750 streamer_write_char_stream (ob->main_stream,
1751 SSA_NAME_IS_DEFAULT_DEF (ptr));
1752 if (SSA_NAME_VAR (ptr))
1753 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1754 else
1755 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1756 stream_write_tree (ob, TREE_TYPE (ptr), true);
1757 }
1758
1759 streamer_write_zero (ob);
1760 }
1761
1762
1763 /* Output a wide-int. */
1764
1765 static void
1766 streamer_write_wi (struct output_block *ob,
1767 const widest_int &w)
1768 {
1769 int len = w.get_len ();
1770
1771 streamer_write_uhwi (ob, w.get_precision ());
1772 streamer_write_uhwi (ob, len);
1773 for (int i = 0; i < len; i++)
1774 streamer_write_hwi (ob, w.elt (i));
1775 }
1776
1777
1778 /* Output the cfg. */
1779
1780 static void
1781 output_cfg (struct output_block *ob, struct function *fn)
1782 {
1783 struct lto_output_stream *tmp_stream = ob->main_stream;
1784 basic_block bb;
1785
1786 ob->main_stream = ob->cfg_stream;
1787
1788 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1789 profile_status_for_fn (fn));
1790
1791 /* Output the number of the highest basic block. */
1792 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1793
1794 FOR_ALL_BB_FN (bb, fn)
1795 {
1796 edge_iterator ei;
1797 edge e;
1798
1799 streamer_write_hwi (ob, bb->index);
1800
1801 /* Output the successors and the edge flags. */
1802 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1803 FOR_EACH_EDGE (e, ei, bb->succs)
1804 {
1805 streamer_write_uhwi (ob, e->dest->index);
1806 streamer_write_hwi (ob, e->probability);
1807 streamer_write_gcov_count (ob, e->count);
1808 streamer_write_uhwi (ob, e->flags);
1809 }
1810 }
1811
1812 streamer_write_hwi (ob, -1);
1813
1814 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1815 while (bb->next_bb)
1816 {
1817 streamer_write_hwi (ob, bb->next_bb->index);
1818 bb = bb->next_bb;
1819 }
1820
1821 streamer_write_hwi (ob, -1);
1822
1823 /* ??? The cfgloop interface is tied to cfun. */
1824 gcc_assert (cfun == fn);
1825
1826 /* Output the number of loops. */
1827 streamer_write_uhwi (ob, number_of_loops (fn));
1828
1829 /* Output each loop, skipping the tree root which has number zero. */
1830 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1831 {
1832 struct loop *loop = get_loop (fn, i);
1833
1834 /* Write the index of the loop header. That's enough to rebuild
1835 the loop tree on the reader side. Stream -1 for an unused
1836 loop entry. */
1837 if (!loop)
1838 {
1839 streamer_write_hwi (ob, -1);
1840 continue;
1841 }
1842 else
1843 streamer_write_hwi (ob, loop->header->index);
1844
1845 /* Write everything copy_loop_info copies. */
1846 streamer_write_enum (ob->main_stream,
1847 loop_estimation, EST_LAST, loop->estimate_state);
1848 streamer_write_hwi (ob, loop->any_upper_bound);
1849 if (loop->any_upper_bound)
1850 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1851 streamer_write_hwi (ob, loop->any_estimate);
1852 if (loop->any_estimate)
1853 streamer_write_wi (ob, loop->nb_iterations_estimate);
1854
1855 /* Write OMP SIMD related info. */
1856 streamer_write_hwi (ob, loop->safelen);
1857 streamer_write_hwi (ob, loop->dont_vectorize);
1858 streamer_write_hwi (ob, loop->force_vectorize);
1859 stream_write_tree (ob, loop->simduid, true);
1860 }
1861
1862 ob->main_stream = tmp_stream;
1863 }
1864
1865
1866 /* Create the header in the file using OB. If the section type is for
1867 a function, set FN to the decl for that function. */
1868
1869 void
1870 produce_asm (struct output_block *ob, tree fn)
1871 {
1872 enum lto_section_type section_type = ob->section_type;
1873 struct lto_function_header header;
1874 char *section_name;
1875
1876 if (section_type == LTO_section_function_body)
1877 {
1878 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1879 section_name = lto_get_section_name (section_type, name, NULL);
1880 }
1881 else
1882 section_name = lto_get_section_name (section_type, NULL, NULL);
1883
1884 lto_begin_section (section_name, !flag_wpa);
1885 free (section_name);
1886
1887 /* The entire header is stream computed here. */
1888 memset (&header, 0, sizeof (struct lto_function_header));
1889
1890 /* Write the header. */
1891 header.lto_header.major_version = LTO_major_version;
1892 header.lto_header.minor_version = LTO_minor_version;
1893
1894 header.compressed_size = 0;
1895
1896 if (section_type == LTO_section_function_body)
1897 header.cfg_size = ob->cfg_stream->total_size;
1898 header.main_size = ob->main_stream->total_size;
1899 header.string_size = ob->string_stream->total_size;
1900 lto_write_data (&header, sizeof header);
1901
1902 /* Put all of the gimple and the string table out the asm file as a
1903 block of text. */
1904 if (section_type == LTO_section_function_body)
1905 lto_write_stream (ob->cfg_stream);
1906 lto_write_stream (ob->main_stream);
1907 lto_write_stream (ob->string_stream);
1908
1909 lto_end_section ();
1910 }
1911
1912
1913 /* Output the base body of struct function FN using output block OB. */
1914
1915 static void
1916 output_struct_function_base (struct output_block *ob, struct function *fn)
1917 {
1918 struct bitpack_d bp;
1919 unsigned i;
1920 tree t;
1921
1922 /* Output the static chain and non-local goto save area. */
1923 stream_write_tree (ob, fn->static_chain_decl, true);
1924 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1925
1926 /* Output all the local variables in the function. */
1927 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1928 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1929 stream_write_tree (ob, t, true);
1930
1931 /* Output current IL state of the function. */
1932 streamer_write_uhwi (ob, fn->curr_properties);
1933
1934 /* Write all the attributes for FN. */
1935 bp = bitpack_create (ob->main_stream);
1936 bp_pack_value (&bp, fn->is_thunk, 1);
1937 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1938 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1939 bp_pack_value (&bp, fn->returns_struct, 1);
1940 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1941 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1942 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1943 bp_pack_value (&bp, fn->after_inlining, 1);
1944 bp_pack_value (&bp, fn->stdarg, 1);
1945 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1946 bp_pack_value (&bp, fn->calls_alloca, 1);
1947 bp_pack_value (&bp, fn->calls_setjmp, 1);
1948 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1949 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1950 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1951 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1952
1953 /* Output the function start and end loci. */
1954 stream_output_location (ob, &bp, fn->function_start_locus);
1955 stream_output_location (ob, &bp, fn->function_end_locus);
1956
1957 streamer_write_bitpack (&bp);
1958 }
1959
1960
1961 /* Output the body of function NODE->DECL. */
1962
1963 static void
1964 output_function (struct cgraph_node *node)
1965 {
1966 tree function;
1967 struct function *fn;
1968 basic_block bb;
1969 struct output_block *ob;
1970
1971 function = node->decl;
1972 fn = DECL_STRUCT_FUNCTION (function);
1973 ob = create_output_block (LTO_section_function_body);
1974
1975 clear_line_info (ob);
1976 ob->symbol = node;
1977
1978 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1979
1980 /* Set current_function_decl and cfun. */
1981 push_cfun (fn);
1982
1983 /* Make string 0 be a NULL string. */
1984 streamer_write_char_stream (ob->string_stream, 0);
1985
1986 streamer_write_record_start (ob, LTO_function);
1987
1988 /* Output decls for parameters and args. */
1989 stream_write_tree (ob, DECL_RESULT (function), true);
1990 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1991
1992 /* Output DECL_INITIAL for the function, which contains the tree of
1993 lexical scopes. */
1994 stream_write_tree (ob, DECL_INITIAL (function), true);
1995
1996 /* We also stream abstract functions where we stream only stuff needed for
1997 debug info. */
1998 if (gimple_has_body_p (function))
1999 {
2000 streamer_write_uhwi (ob, 1);
2001 output_struct_function_base (ob, fn);
2002
2003 /* Output all the SSA names used in the function. */
2004 output_ssa_names (ob, fn);
2005
2006 /* Output any exception handling regions. */
2007 output_eh_regions (ob, fn);
2008
2009
2010 /* We will renumber the statements. The code that does this uses
2011 the same ordering that we use for serializing them so we can use
2012 the same code on the other end and not have to write out the
2013 statement numbers. We do not assign UIDs to PHIs here because
2014 virtual PHIs get re-computed on-the-fly which would make numbers
2015 inconsistent. */
2016 set_gimple_stmt_max_uid (cfun, 0);
2017 FOR_ALL_BB_FN (bb, cfun)
2018 {
2019 gimple_stmt_iterator gsi;
2020 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2021 {
2022 gimple stmt = gsi_stmt (gsi);
2023
2024 /* Virtual PHIs are not going to be streamed. */
2025 if (!virtual_operand_p (gimple_phi_result (stmt)))
2026 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2027 }
2028 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2029 {
2030 gimple stmt = gsi_stmt (gsi);
2031 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2032 }
2033 }
2034 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2035 virtual phis now. */
2036 FOR_ALL_BB_FN (bb, cfun)
2037 {
2038 gimple_stmt_iterator gsi;
2039 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2040 {
2041 gimple stmt = gsi_stmt (gsi);
2042 if (virtual_operand_p (gimple_phi_result (stmt)))
2043 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2044 }
2045 }
2046
2047 /* Output the code for the function. */
2048 FOR_ALL_BB_FN (bb, fn)
2049 output_bb (ob, bb, fn);
2050
2051 /* The terminator for this function. */
2052 streamer_write_record_start (ob, LTO_null);
2053
2054 output_cfg (ob, fn);
2055
2056 pop_cfun ();
2057 }
2058 else
2059 streamer_write_uhwi (ob, 0);
2060
2061 /* Create a section to hold the pickled output of this function. */
2062 produce_asm (ob, function);
2063
2064 destroy_output_block (ob);
2065 }
2066
2067 /* Output the body of function NODE->DECL. */
2068
2069 static void
2070 output_constructor (struct varpool_node *node)
2071 {
2072 tree var = node->decl;
2073 struct output_block *ob;
2074
2075 ob = create_output_block (LTO_section_function_body);
2076
2077 clear_line_info (ob);
2078 ob->symbol = node;
2079
2080 /* Make string 0 be a NULL string. */
2081 streamer_write_char_stream (ob->string_stream, 0);
2082
2083 /* Output DECL_INITIAL for the function, which contains the tree of
2084 lexical scopes. */
2085 stream_write_tree (ob, DECL_INITIAL (var), true);
2086
2087 /* Create a section to hold the pickled output of this function. */
2088 produce_asm (ob, var);
2089
2090 destroy_output_block (ob);
2091 }
2092
2093
2094 /* Emit toplevel asms. */
2095
2096 void
2097 lto_output_toplevel_asms (void)
2098 {
2099 struct output_block *ob;
2100 struct asm_node *can;
2101 char *section_name;
2102 struct lto_asm_header header;
2103
2104 if (! asm_nodes)
2105 return;
2106
2107 ob = create_output_block (LTO_section_asm);
2108
2109 /* Make string 0 be a NULL string. */
2110 streamer_write_char_stream (ob->string_stream, 0);
2111
2112 for (can = asm_nodes; can; can = can->next)
2113 {
2114 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2115 streamer_write_hwi (ob, can->order);
2116 }
2117
2118 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2119
2120 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2121 lto_begin_section (section_name, !flag_wpa);
2122 free (section_name);
2123
2124 /* The entire header stream is computed here. */
2125 memset (&header, 0, sizeof (header));
2126
2127 /* Write the header. */
2128 header.lto_header.major_version = LTO_major_version;
2129 header.lto_header.minor_version = LTO_minor_version;
2130
2131 header.main_size = ob->main_stream->total_size;
2132 header.string_size = ob->string_stream->total_size;
2133 lto_write_data (&header, sizeof header);
2134
2135 /* Put all of the gimple and the string table out the asm file as a
2136 block of text. */
2137 lto_write_stream (ob->main_stream);
2138 lto_write_stream (ob->string_stream);
2139
2140 lto_end_section ();
2141
2142 destroy_output_block (ob);
2143 }
2144
2145
2146 /* Copy the function body or variable constructor of NODE without deserializing. */
2147
2148 static void
2149 copy_function_or_variable (struct symtab_node *node)
2150 {
2151 tree function = node->decl;
2152 struct lto_file_decl_data *file_data = node->lto_file_data;
2153 const char *data;
2154 size_t len;
2155 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2156 char *section_name =
2157 lto_get_section_name (LTO_section_function_body, name, NULL);
2158 size_t i, j;
2159 struct lto_in_decl_state *in_state;
2160 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2161
2162 lto_begin_section (section_name, !flag_wpa);
2163 free (section_name);
2164
2165 /* We may have renamed the declaration, e.g., a static function. */
2166 name = lto_get_decl_name_mapping (file_data, name);
2167
2168 data = lto_get_section_data (file_data, LTO_section_function_body,
2169 name, &len);
2170 gcc_assert (data);
2171
2172 /* Do a bit copy of the function body. */
2173 lto_write_data (data, len);
2174
2175 /* Copy decls. */
2176 in_state =
2177 lto_get_function_in_decl_state (node->lto_file_data, function);
2178 gcc_assert (in_state);
2179
2180 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2181 {
2182 size_t n = in_state->streams[i].size;
2183 tree *trees = in_state->streams[i].trees;
2184 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2185
2186 /* The out state must have the same indices and the in state.
2187 So just copy the vector. All the encoders in the in state
2188 must be empty where we reach here. */
2189 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2190 encoder->trees.reserve_exact (n);
2191 for (j = 0; j < n; j++)
2192 encoder->trees.safe_push (trees[j]);
2193 }
2194
2195 lto_free_section_data (file_data, LTO_section_function_body, name,
2196 data, len);
2197 lto_end_section ();
2198 }
2199
2200 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2201
2202 static tree
2203 wrap_refs (tree *tp, int *ws, void *)
2204 {
2205 tree t = *tp;
2206 if (handled_component_p (t)
2207 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2208 {
2209 tree decl = TREE_OPERAND (t, 0);
2210 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2211 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2212 build1 (ADDR_EXPR, ptrtype, decl),
2213 build_int_cst (ptrtype, 0));
2214 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2215 *ws = 0;
2216 }
2217 else if (TREE_CODE (t) == CONSTRUCTOR)
2218 ;
2219 else if (!EXPR_P (t))
2220 *ws = 0;
2221 return NULL_TREE;
2222 }
2223
2224 /* Main entry point from the pass manager. */
2225
2226 void
2227 lto_output (void)
2228 {
2229 struct lto_out_decl_state *decl_state;
2230 #ifdef ENABLE_CHECKING
2231 bitmap output = lto_bitmap_alloc ();
2232 #endif
2233 int i, n_nodes;
2234 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2235
2236 /* Initialize the streamer. */
2237 lto_streamer_init ();
2238
2239 n_nodes = lto_symtab_encoder_size (encoder);
2240 /* Process only the functions with bodies. */
2241 for (i = 0; i < n_nodes; i++)
2242 {
2243 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2244 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2245 {
2246 if (lto_symtab_encoder_encode_body_p (encoder, node)
2247 && !node->alias)
2248 {
2249 #ifdef ENABLE_CHECKING
2250 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2251 bitmap_set_bit (output, DECL_UID (node->decl));
2252 #endif
2253 decl_state = lto_new_out_decl_state ();
2254 lto_push_out_decl_state (decl_state);
2255 if (gimple_has_body_p (node->decl) || !flag_wpa)
2256 output_function (node);
2257 else
2258 copy_function_or_variable (node);
2259 gcc_assert (lto_get_out_decl_state () == decl_state);
2260 lto_pop_out_decl_state ();
2261 lto_record_function_out_decl_state (node->decl, decl_state);
2262 }
2263 }
2264 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2265 {
2266 /* Wrap symbol references inside the ctor in a type
2267 preserving MEM_REF. */
2268 tree ctor = DECL_INITIAL (node->decl);
2269 if (ctor && !in_lto_p)
2270 walk_tree (&ctor, wrap_refs, NULL, NULL);
2271 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2272 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2273 && !node->alias)
2274 {
2275 timevar_push (TV_IPA_LTO_CTORS_OUT);
2276 #ifdef ENABLE_CHECKING
2277 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2278 bitmap_set_bit (output, DECL_UID (node->decl));
2279 #endif
2280 decl_state = lto_new_out_decl_state ();
2281 lto_push_out_decl_state (decl_state);
2282 if (DECL_INITIAL (node->decl) != error_mark_node
2283 || !flag_wpa)
2284 output_constructor (node);
2285 else
2286 copy_function_or_variable (node);
2287 gcc_assert (lto_get_out_decl_state () == decl_state);
2288 lto_pop_out_decl_state ();
2289 lto_record_function_out_decl_state (node->decl, decl_state);
2290 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2291 }
2292 }
2293 }
2294
2295 /* Emit the callgraph after emitting function bodies. This needs to
2296 be done now to make sure that all the statements in every function
2297 have been renumbered so that edges can be associated with call
2298 statements using the statement UIDs. */
2299 output_symtab ();
2300
2301 #ifdef ENABLE_CHECKING
2302 lto_bitmap_free (output);
2303 #endif
2304 }
2305
2306 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2307 from it and required for correct representation of its semantics.
2308 Each node in ENCODER must be a global declaration or a type. A node
2309 is written only once, even if it appears multiple times in the
2310 vector. Certain transitively-reachable nodes, such as those
2311 representing expressions, may be duplicated, but such nodes
2312 must not appear in ENCODER itself. */
2313
2314 static void
2315 write_global_stream (struct output_block *ob,
2316 struct lto_tree_ref_encoder *encoder)
2317 {
2318 tree t;
2319 size_t index;
2320 const size_t size = lto_tree_ref_encoder_size (encoder);
2321
2322 for (index = 0; index < size; index++)
2323 {
2324 t = lto_tree_ref_encoder_get_tree (encoder, index);
2325 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2326 stream_write_tree (ob, t, false);
2327 }
2328 }
2329
2330
2331 /* Write a sequence of indices into the globals vector corresponding
2332 to the trees in ENCODER. These are used by the reader to map the
2333 indices used to refer to global entities within function bodies to
2334 their referents. */
2335
2336 static void
2337 write_global_references (struct output_block *ob,
2338 struct lto_tree_ref_encoder *encoder)
2339 {
2340 tree t;
2341 uint32_t index;
2342 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2343
2344 /* Write size and slot indexes as 32-bit unsigned numbers. */
2345 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2346 data[0] = size;
2347
2348 for (index = 0; index < size; index++)
2349 {
2350 uint32_t slot_num;
2351
2352 t = lto_tree_ref_encoder_get_tree (encoder, index);
2353 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2354 gcc_assert (slot_num != (unsigned)-1);
2355 data[index + 1] = slot_num;
2356 }
2357
2358 lto_write_data (data, sizeof (int32_t) * (size + 1));
2359 free (data);
2360 }
2361
2362
2363 /* Write all the streams in an lto_out_decl_state STATE using
2364 output block OB and output stream OUT_STREAM. */
2365
2366 void
2367 lto_output_decl_state_streams (struct output_block *ob,
2368 struct lto_out_decl_state *state)
2369 {
2370 int i;
2371
2372 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2373 write_global_stream (ob, &state->streams[i]);
2374 }
2375
2376
2377 /* Write all the references in an lto_out_decl_state STATE using
2378 output block OB and output stream OUT_STREAM. */
2379
2380 void
2381 lto_output_decl_state_refs (struct output_block *ob,
2382 struct lto_out_decl_state *state)
2383 {
2384 unsigned i;
2385 uint32_t ref;
2386 tree decl;
2387
2388 /* Write reference to FUNCTION_DECL. If there is not function,
2389 write reference to void_type_node. */
2390 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2391 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2392 gcc_assert (ref != (unsigned)-1);
2393 lto_write_data (&ref, sizeof (uint32_t));
2394
2395 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2396 write_global_references (ob, &state->streams[i]);
2397 }
2398
2399
2400 /* Return the written size of STATE. */
2401
2402 static size_t
2403 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2404 {
2405 int i;
2406 size_t size;
2407
2408 size = sizeof (int32_t); /* fn_ref. */
2409 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2410 {
2411 size += sizeof (int32_t); /* vector size. */
2412 size += (lto_tree_ref_encoder_size (&state->streams[i])
2413 * sizeof (int32_t));
2414 }
2415 return size;
2416 }
2417
2418
2419 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2420 so far. */
2421
2422 static void
2423 write_symbol (struct streamer_tree_cache_d *cache,
2424 tree t, struct pointer_set_t *seen, bool alias)
2425 {
2426 const char *name;
2427 enum gcc_plugin_symbol_kind kind;
2428 enum gcc_plugin_symbol_visibility visibility;
2429 unsigned slot_num;
2430 uint64_t size;
2431 const char *comdat;
2432 unsigned char c;
2433
2434 /* None of the following kinds of symbols are needed in the
2435 symbol table. */
2436 if (!TREE_PUBLIC (t)
2437 || is_builtin_fn (t)
2438 || DECL_ABSTRACT (t)
2439 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2440 return;
2441 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2442
2443 gcc_assert (TREE_CODE (t) == VAR_DECL
2444 || TREE_CODE (t) == FUNCTION_DECL);
2445
2446 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2447
2448 /* This behaves like assemble_name_raw in varasm.c, performing the
2449 same name manipulations that ASM_OUTPUT_LABELREF does. */
2450 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2451
2452 if (pointer_set_contains (seen, name))
2453 return;
2454 pointer_set_insert (seen, name);
2455
2456 streamer_tree_cache_lookup (cache, t, &slot_num);
2457 gcc_assert (slot_num != (unsigned)-1);
2458
2459 if (DECL_EXTERNAL (t))
2460 {
2461 if (DECL_WEAK (t))
2462 kind = GCCPK_WEAKUNDEF;
2463 else
2464 kind = GCCPK_UNDEF;
2465 }
2466 else
2467 {
2468 if (DECL_WEAK (t))
2469 kind = GCCPK_WEAKDEF;
2470 else if (DECL_COMMON (t))
2471 kind = GCCPK_COMMON;
2472 else
2473 kind = GCCPK_DEF;
2474
2475 /* When something is defined, it should have node attached. */
2476 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2477 || varpool_node::get (t)->definition);
2478 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2479 || (cgraph_node::get (t)
2480 && cgraph_node::get (t)->definition));
2481 }
2482
2483 /* Imitate what default_elf_asm_output_external do.
2484 When symbol is external, we need to output it with DEFAULT visibility
2485 when compiling with -fvisibility=default, while with HIDDEN visibility
2486 when symbol has attribute (visibility("hidden")) specified.
2487 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2488 right. */
2489
2490 if (DECL_EXTERNAL (t)
2491 && !targetm.binds_local_p (t))
2492 visibility = GCCPV_DEFAULT;
2493 else
2494 switch (DECL_VISIBILITY (t))
2495 {
2496 case VISIBILITY_DEFAULT:
2497 visibility = GCCPV_DEFAULT;
2498 break;
2499 case VISIBILITY_PROTECTED:
2500 visibility = GCCPV_PROTECTED;
2501 break;
2502 case VISIBILITY_HIDDEN:
2503 visibility = GCCPV_HIDDEN;
2504 break;
2505 case VISIBILITY_INTERNAL:
2506 visibility = GCCPV_INTERNAL;
2507 break;
2508 }
2509
2510 if (kind == GCCPK_COMMON
2511 && DECL_SIZE_UNIT (t)
2512 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2513 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2514 else
2515 size = 0;
2516
2517 if (DECL_ONE_ONLY (t))
2518 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2519 else
2520 comdat = "";
2521
2522 lto_write_data (name, strlen (name) + 1);
2523 lto_write_data (comdat, strlen (comdat) + 1);
2524 c = (unsigned char) kind;
2525 lto_write_data (&c, 1);
2526 c = (unsigned char) visibility;
2527 lto_write_data (&c, 1);
2528 lto_write_data (&size, 8);
2529 lto_write_data (&slot_num, 4);
2530 }
2531
2532 /* Return true if NODE should appear in the plugin symbol table. */
2533
2534 bool
2535 output_symbol_p (symtab_node *node)
2536 {
2537 struct cgraph_node *cnode;
2538 if (!node->real_symbol_p ())
2539 return false;
2540 /* We keep external functions in symtab for sake of inlining
2541 and devirtualization. We do not want to see them in symbol table as
2542 references unless they are really used. */
2543 cnode = dyn_cast <cgraph_node *> (node);
2544 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2545 && cnode->callers)
2546 return true;
2547
2548 /* Ignore all references from external vars initializers - they are not really
2549 part of the compilation unit until they are used by folding. Some symbols,
2550 like references to external construction vtables can not be referred to at all.
2551 We decide this at can_refer_decl_in_current_unit_p. */
2552 if (!node->definition || DECL_EXTERNAL (node->decl))
2553 {
2554 int i;
2555 struct ipa_ref *ref;
2556 for (i = 0; node->iterate_referring (i, ref); i++)
2557 {
2558 if (ref->use == IPA_REF_ALIAS)
2559 continue;
2560 if (is_a <cgraph_node *> (ref->referring))
2561 return true;
2562 if (!DECL_EXTERNAL (ref->referring->decl))
2563 return true;
2564 }
2565 return false;
2566 }
2567 return true;
2568 }
2569
2570
2571 /* Write an IL symbol table to OB.
2572 SET and VSET are cgraph/varpool node sets we are outputting. */
2573
2574 static void
2575 produce_symtab (struct output_block *ob)
2576 {
2577 struct streamer_tree_cache_d *cache = ob->writer_cache;
2578 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2579 struct pointer_set_t *seen;
2580 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2581 lto_symtab_encoder_iterator lsei;
2582
2583 lto_begin_section (section_name, false);
2584 free (section_name);
2585
2586 seen = pointer_set_create ();
2587
2588 /* Write the symbol table.
2589 First write everything defined and then all declarations.
2590 This is necessary to handle cases where we have duplicated symbols. */
2591 for (lsei = lsei_start (encoder);
2592 !lsei_end_p (lsei); lsei_next (&lsei))
2593 {
2594 symtab_node *node = lsei_node (lsei);
2595
2596 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2597 continue;
2598 write_symbol (cache, node->decl, seen, false);
2599 }
2600 for (lsei = lsei_start (encoder);
2601 !lsei_end_p (lsei); lsei_next (&lsei))
2602 {
2603 symtab_node *node = lsei_node (lsei);
2604
2605 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2606 continue;
2607 write_symbol (cache, node->decl, seen, false);
2608 }
2609
2610 pointer_set_destroy (seen);
2611
2612 lto_end_section ();
2613 }
2614
2615
2616 /* This pass is run after all of the functions are serialized and all
2617 of the IPA passes have written their serialized forms. This pass
2618 causes the vector of all of the global decls and types used from
2619 this file to be written in to a section that can then be read in to
2620 recover these on other side. */
2621
2622 void
2623 produce_asm_for_decls (void)
2624 {
2625 struct lto_out_decl_state *out_state;
2626 struct lto_out_decl_state *fn_out_state;
2627 struct lto_decl_header header;
2628 char *section_name;
2629 struct output_block *ob;
2630 unsigned idx, num_fns;
2631 size_t decl_state_size;
2632 int32_t num_decl_states;
2633
2634 ob = create_output_block (LTO_section_decls);
2635
2636 memset (&header, 0, sizeof (struct lto_decl_header));
2637
2638 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2639 lto_begin_section (section_name, !flag_wpa);
2640 free (section_name);
2641
2642 /* Make string 0 be a NULL string. */
2643 streamer_write_char_stream (ob->string_stream, 0);
2644
2645 gcc_assert (!alias_pairs);
2646
2647 /* Get rid of the global decl state hash tables to save some memory. */
2648 out_state = lto_get_out_decl_state ();
2649 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2650 if (out_state->streams[i].tree_hash_table)
2651 {
2652 delete out_state->streams[i].tree_hash_table;
2653 out_state->streams[i].tree_hash_table = NULL;
2654 }
2655
2656 /* Write the global symbols. */
2657 lto_output_decl_state_streams (ob, out_state);
2658 num_fns = lto_function_decl_states.length ();
2659 for (idx = 0; idx < num_fns; idx++)
2660 {
2661 fn_out_state =
2662 lto_function_decl_states[idx];
2663 lto_output_decl_state_streams (ob, fn_out_state);
2664 }
2665
2666 header.lto_header.major_version = LTO_major_version;
2667 header.lto_header.minor_version = LTO_minor_version;
2668
2669 /* Currently not used. This field would allow us to preallocate
2670 the globals vector, so that it need not be resized as it is extended. */
2671 header.num_nodes = -1;
2672
2673 /* Compute the total size of all decl out states. */
2674 decl_state_size = sizeof (int32_t);
2675 decl_state_size += lto_out_decl_state_written_size (out_state);
2676 for (idx = 0; idx < num_fns; idx++)
2677 {
2678 fn_out_state =
2679 lto_function_decl_states[idx];
2680 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2681 }
2682 header.decl_state_size = decl_state_size;
2683
2684 header.main_size = ob->main_stream->total_size;
2685 header.string_size = ob->string_stream->total_size;
2686
2687 lto_write_data (&header, sizeof header);
2688
2689 /* Write the main out-decl state, followed by out-decl states of
2690 functions. */
2691 num_decl_states = num_fns + 1;
2692 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2693 lto_output_decl_state_refs (ob, out_state);
2694 for (idx = 0; idx < num_fns; idx++)
2695 {
2696 fn_out_state = lto_function_decl_states[idx];
2697 lto_output_decl_state_refs (ob, fn_out_state);
2698 }
2699
2700 lto_write_stream (ob->main_stream);
2701 lto_write_stream (ob->string_stream);
2702
2703 lto_end_section ();
2704
2705 /* Write the symbol table. It is used by linker to determine dependencies
2706 and thus we can skip it for WPA. */
2707 if (!flag_wpa)
2708 produce_symtab (ob);
2709
2710 /* Write command line opts. */
2711 lto_write_options ();
2712
2713 /* Deallocate memory and clean up. */
2714 for (idx = 0; idx < num_fns; idx++)
2715 {
2716 fn_out_state =
2717 lto_function_decl_states[idx];
2718 lto_delete_out_decl_state (fn_out_state);
2719 }
2720 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2721 lto_function_decl_states.release ();
2722 destroy_output_block (ob);
2723 }