convert the rest of the users of pointer_map to hash_map
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-pass.h"
46 #include "function.h"
47 #include "diagnostic-core.h"
48 #include "inchash.h"
49 #include "except.h"
50 #include "lto-symtab.h"
51 #include "lto-streamer.h"
52 #include "data-streamer.h"
53 #include "gimple-streamer.h"
54 #include "tree-streamer.h"
55 #include "streamer-hooks.h"
56 #include "cfgloop.h"
57 #include "builtins.h"
58
59
60 static void lto_write_tree (struct output_block*, tree, bool);
61
62 /* Clear the line info stored in DATA_IN. */
63
64 static void
65 clear_line_info (struct output_block *ob)
66 {
67 ob->current_file = NULL;
68 ob->current_line = 0;
69 ob->current_col = 0;
70 }
71
72
73 /* Create the output block and return it. SECTION_TYPE is
74 LTO_section_function_body or LTO_static_initializer. */
75
76 struct output_block *
77 create_output_block (enum lto_section_type section_type)
78 {
79 struct output_block *ob = XCNEW (struct output_block);
80
81 ob->section_type = section_type;
82 ob->decl_state = lto_get_out_decl_state ();
83 ob->main_stream = XCNEW (struct lto_output_stream);
84 ob->string_stream = XCNEW (struct lto_output_stream);
85 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86
87 if (section_type == LTO_section_function_body)
88 ob->cfg_stream = XCNEW (struct lto_output_stream);
89
90 clear_line_info (ob);
91
92 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
93 gcc_obstack_init (&ob->obstack);
94
95 return ob;
96 }
97
98
99 /* Destroy the output block OB. */
100
101 void
102 destroy_output_block (struct output_block *ob)
103 {
104 enum lto_section_type section_type = ob->section_type;
105
106 delete ob->string_hash_table;
107 ob->string_hash_table = NULL;
108
109 free (ob->main_stream);
110 free (ob->string_stream);
111 if (section_type == LTO_section_function_body)
112 free (ob->cfg_stream);
113
114 streamer_tree_cache_delete (ob->writer_cache);
115 obstack_free (&ob->obstack, NULL);
116
117 free (ob);
118 }
119
120
121 /* Look up NODE in the type table and write the index for it to OB. */
122
123 static void
124 output_type_ref (struct output_block *ob, tree node)
125 {
126 streamer_write_record_start (ob, LTO_type_ref);
127 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
128 }
129
130
131 /* Return true if tree node T is written to various tables. For these
132 nodes, we sometimes want to write their phyiscal representation
133 (via lto_output_tree), and sometimes we need to emit an index
134 reference into a table (via lto_output_tree_ref). */
135
136 static bool
137 tree_is_indexable (tree t)
138 {
139 /* Parameters and return values of functions of variably modified types
140 must go to global stream, because they may be used in the type
141 definition. */
142 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
167 }
168
169
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
173
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
177 {
178 expanded_location xloc;
179
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
182 if (loc == UNKNOWN_LOCATION)
183 return;
184
185 xloc = expand_location (loc);
186
187 bp_pack_value (bp, ob->current_file != xloc.file, 1);
188 bp_pack_value (bp, ob->current_line != xloc.line, 1);
189 bp_pack_value (bp, ob->current_col != xloc.column, 1);
190
191 if (ob->current_file != xloc.file)
192 bp_pack_var_len_unsigned (bp,
193 streamer_string_index (ob, xloc.file,
194 strlen (xloc.file) + 1,
195 true));
196 ob->current_file = xloc.file;
197
198 if (ob->current_line != xloc.line)
199 bp_pack_var_len_unsigned (bp, xloc.line);
200 ob->current_line = xloc.line;
201
202 if (ob->current_col != xloc.column)
203 bp_pack_var_len_unsigned (bp, xloc.column);
204 ob->current_col = xloc.column;
205 }
206
207
208 /* If EXPR is an indexable tree node, output a reference to it to
209 output block OB. Otherwise, output the physical representation of
210 EXPR to OB. */
211
212 static void
213 lto_output_tree_ref (struct output_block *ob, tree expr)
214 {
215 enum tree_code code;
216
217 if (TYPE_P (expr))
218 {
219 output_type_ref (ob, expr);
220 return;
221 }
222
223 code = TREE_CODE (expr);
224 switch (code)
225 {
226 case SSA_NAME:
227 streamer_write_record_start (ob, LTO_ssa_name_ref);
228 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
229 break;
230
231 case FIELD_DECL:
232 streamer_write_record_start (ob, LTO_field_decl_ref);
233 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case FUNCTION_DECL:
237 streamer_write_record_start (ob, LTO_function_decl_ref);
238 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
239 break;
240
241 case VAR_DECL:
242 case DEBUG_EXPR_DECL:
243 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
244 case PARM_DECL:
245 streamer_write_record_start (ob, LTO_global_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case CONST_DECL:
250 streamer_write_record_start (ob, LTO_const_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case IMPORTED_DECL:
255 gcc_assert (decl_function_context (expr) == NULL);
256 streamer_write_record_start (ob, LTO_imported_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case TYPE_DECL:
261 streamer_write_record_start (ob, LTO_type_decl_ref);
262 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMELIST_DECL:
266 streamer_write_record_start (ob, LTO_namelist_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case NAMESPACE_DECL:
271 streamer_write_record_start (ob, LTO_namespace_decl_ref);
272 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case LABEL_DECL:
276 streamer_write_record_start (ob, LTO_label_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case RESULT_DECL:
281 streamer_write_record_start (ob, LTO_result_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case TRANSLATION_UNIT_DECL:
286 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 default:
291 /* No other node is indexable, so it should have been handled by
292 lto_output_tree. */
293 gcc_unreachable ();
294 }
295 }
296
297
298 /* Return true if EXPR is a tree node that can be written to disk. */
299
300 static inline bool
301 lto_is_streamable (tree expr)
302 {
303 enum tree_code code = TREE_CODE (expr);
304
305 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
306 name version in lto_output_tree_ref (see output_ssa_names). */
307 return !is_lang_specific (expr)
308 && code != SSA_NAME
309 && code != CALL_EXPR
310 && code != LANG_TYPE
311 && code != MODIFY_EXPR
312 && code != INIT_EXPR
313 && code != TARGET_EXPR
314 && code != BIND_EXPR
315 && code != WITH_CLEANUP_EXPR
316 && code != STATEMENT_LIST
317 && (code == CASE_LABEL_EXPR
318 || code == DECL_EXPR
319 || TREE_CODE_CLASS (code) != tcc_statement);
320 }
321
322
323 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
324
325 static tree
326 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
327 {
328 gcc_checking_assert (DECL_P (expr)
329 && TREE_CODE (expr) != FUNCTION_DECL
330 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
331
332 /* Handle DECL_INITIAL for symbols. */
333 tree initial = DECL_INITIAL (expr);
334 if (TREE_CODE (expr) == VAR_DECL
335 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
336 && !DECL_IN_CONSTANT_POOL (expr)
337 && initial)
338 {
339 varpool_node *vnode;
340 /* Extra section needs about 30 bytes; do not produce it for simple
341 scalar values. */
342 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
343 || !(vnode = varpool_node::get (expr))
344 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
345 initial = error_mark_node;
346 }
347
348 return initial;
349 }
350
351
352 /* Write a physical representation of tree node EXPR to output block
353 OB. If REF_P is true, the leaves of EXPR are emitted as references
354 via lto_output_tree_ref. IX is the index into the streamer cache
355 where EXPR is stored. */
356
357 static void
358 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
359 {
360 /* Pack all the non-pointer fields in EXPR into a bitpack and write
361 the resulting bitpack. */
362 bitpack_d bp = bitpack_create (ob->main_stream);
363 streamer_pack_tree_bitfields (ob, &bp, expr);
364 streamer_write_bitpack (&bp);
365
366 /* Write all the pointer fields in EXPR. */
367 streamer_write_tree_body (ob, expr, ref_p);
368
369 /* Write any LTO-specific data to OB. */
370 if (DECL_P (expr)
371 && TREE_CODE (expr) != FUNCTION_DECL
372 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
373 {
374 /* Handle DECL_INITIAL for symbols. */
375 tree initial = get_symbol_initial_value
376 (ob->decl_state->symtab_node_encoder, expr);
377 stream_write_tree (ob, initial, ref_p);
378 }
379 }
380
381 /* Write a physical representation of tree node EXPR to output block
382 OB. If REF_P is true, the leaves of EXPR are emitted as references
383 via lto_output_tree_ref. IX is the index into the streamer cache
384 where EXPR is stored. */
385
386 static void
387 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
388 {
389 if (!lto_is_streamable (expr))
390 internal_error ("tree code %qs is not supported in LTO streams",
391 get_tree_code_name (TREE_CODE (expr)));
392
393 /* Write the header, containing everything needed to materialize
394 EXPR on the reading side. */
395 streamer_write_tree_header (ob, expr);
396
397 lto_write_tree_1 (ob, expr, ref_p);
398
399 /* Mark the end of EXPR. */
400 streamer_write_zero (ob);
401 }
402
403 /* Emit the physical representation of tree node EXPR to output block
404 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
405 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
406
407 static void
408 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
409 bool ref_p, bool this_ref_p)
410 {
411 unsigned ix;
412
413 gcc_checking_assert (expr != NULL_TREE
414 && !(this_ref_p && tree_is_indexable (expr)));
415
416 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
417 expr, hash, &ix);
418 gcc_assert (!exists_p);
419 if (streamer_handle_as_builtin_p (expr))
420 {
421 /* MD and NORMAL builtins do not need to be written out
422 completely as they are always instantiated by the
423 compiler on startup. The only builtins that need to
424 be written out are BUILT_IN_FRONTEND. For all other
425 builtins, we simply write the class and code. */
426 streamer_write_builtin (ob, expr);
427 }
428 else if (TREE_CODE (expr) == INTEGER_CST
429 && !TREE_OVERFLOW (expr))
430 {
431 /* Shared INTEGER_CST nodes are special because they need their
432 original type to be materialized by the reader (to implement
433 TYPE_CACHED_VALUES). */
434 streamer_write_integer_cst (ob, expr, ref_p);
435 }
436 else
437 {
438 /* This is the first time we see EXPR, write its fields
439 to OB. */
440 lto_write_tree (ob, expr, ref_p);
441 }
442 }
443
444 class DFS
445 {
446 public:
447 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
448 bool single_p);
449 ~DFS ();
450
451 struct scc_entry
452 {
453 tree t;
454 hashval_t hash;
455 };
456 vec<scc_entry> sccstack;
457
458 private:
459 struct sccs
460 {
461 unsigned int dfsnum;
462 unsigned int low;
463 };
464
465 static int scc_entry_compare (const void *, const void *);
466
467 void DFS_write_tree_body (struct output_block *ob,
468 tree expr, sccs *expr_state, bool ref_p,
469 bool single_p);
470
471 void DFS_write_tree (struct output_block *ob, sccs *from_state,
472 tree expr, bool ref_p, bool this_ref_p,
473 bool single_p);
474 hashval_t
475 hash_scc (struct output_block *ob, unsigned first, unsigned size);
476
477 unsigned int next_dfs_num;
478 hash_map<tree, sccs *> sccstate;
479 struct obstack sccstate_obstack;
480 };
481
482 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
483 bool single_p)
484 {
485 sccstack.create (0);
486 gcc_obstack_init (&sccstate_obstack);
487 next_dfs_num = 1;
488 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
489 }
490
491 DFS::~DFS ()
492 {
493 sccstack.release ();
494 obstack_free (&sccstate_obstack, NULL);
495 }
496
497 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
498 DFS recurse for all tree edges originating from it. */
499
500 void
501 DFS::DFS_write_tree_body (struct output_block *ob,
502 tree expr, sccs *expr_state, bool ref_p,
503 bool single_p)
504 {
505 #define DFS_follow_tree_edge(DEST) \
506 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p, single_p)
507
508 enum tree_code code;
509
510 code = TREE_CODE (expr);
511
512 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
513 {
514 if (TREE_CODE (expr) != IDENTIFIER_NODE)
515 DFS_follow_tree_edge (TREE_TYPE (expr));
516 }
517
518 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
519 {
520 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
521 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
522 }
523
524 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
525 {
526 DFS_follow_tree_edge (TREE_REALPART (expr));
527 DFS_follow_tree_edge (TREE_IMAGPART (expr));
528 }
529
530 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
531 {
532 /* Drop names that were created for anonymous entities. */
533 if (DECL_NAME (expr)
534 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
535 && ANON_AGGRNAME_P (DECL_NAME (expr)))
536 ;
537 else
538 DFS_follow_tree_edge (DECL_NAME (expr));
539 DFS_follow_tree_edge (DECL_CONTEXT (expr));
540 }
541
542 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
543 {
544 DFS_follow_tree_edge (DECL_SIZE (expr));
545 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
546
547 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
548 special handling in LTO, it must be handled by streamer hooks. */
549
550 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
551
552 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
553 for early inlining so drop it on the floor instead of ICEing in
554 dwarf2out.c. */
555
556 if ((TREE_CODE (expr) == VAR_DECL
557 || TREE_CODE (expr) == PARM_DECL)
558 && DECL_HAS_VALUE_EXPR_P (expr))
559 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
560 if (TREE_CODE (expr) == VAR_DECL)
561 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
562 }
563
564 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
565 {
566 if (TREE_CODE (expr) == TYPE_DECL)
567 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
568 }
569
570 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
571 {
572 /* Make sure we don't inadvertently set the assembler name. */
573 if (DECL_ASSEMBLER_NAME_SET_P (expr))
574 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
575 }
576
577 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
578 {
579 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
580 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
581 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
582 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
583 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
584 }
585
586 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
587 {
588 DFS_follow_tree_edge (DECL_VINDEX (expr));
589 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
590 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
591 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
595 {
596 DFS_follow_tree_edge (TYPE_SIZE (expr));
597 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
598 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
599 DFS_follow_tree_edge (TYPE_NAME (expr));
600 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
601 reconstructed during fixup. */
602 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
603 during fixup. */
604 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
605 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
606 /* TYPE_CANONICAL is re-computed during type merging, so no need
607 to follow it here. */
608 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
609 }
610
611 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
612 {
613 if (TREE_CODE (expr) == ENUMERAL_TYPE)
614 DFS_follow_tree_edge (TYPE_VALUES (expr));
615 else if (TREE_CODE (expr) == ARRAY_TYPE)
616 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
617 else if (RECORD_OR_UNION_TYPE_P (expr))
618 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
619 DFS_follow_tree_edge (t);
620 else if (TREE_CODE (expr) == FUNCTION_TYPE
621 || TREE_CODE (expr) == METHOD_TYPE)
622 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
623
624 if (!POINTER_TYPE_P (expr))
625 DFS_follow_tree_edge (TYPE_MINVAL (expr));
626 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
627 if (RECORD_OR_UNION_TYPE_P (expr))
628 DFS_follow_tree_edge (TYPE_BINFO (expr));
629 }
630
631 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
632 {
633 DFS_follow_tree_edge (TREE_PURPOSE (expr));
634 DFS_follow_tree_edge (TREE_VALUE (expr));
635 DFS_follow_tree_edge (TREE_CHAIN (expr));
636 }
637
638 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
639 {
640 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
641 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
642 }
643
644 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
645 {
646 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
647 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
648 DFS_follow_tree_edge (TREE_BLOCK (expr));
649 }
650
651 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
652 {
653 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
654 /* ??? FIXME. See also streamer_write_chain. */
655 if (!(VAR_OR_FUNCTION_DECL_P (t)
656 && DECL_EXTERNAL (t)))
657 DFS_follow_tree_edge (t);
658
659 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
660
661 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
662 handle - those that represent inlined function scopes.
663 For the drop rest them on the floor instead of ICEing
664 in dwarf2out.c. */
665 if (inlined_function_outer_scope_p (expr))
666 {
667 tree ultimate_origin = block_ultimate_origin (expr);
668 DFS_follow_tree_edge (ultimate_origin);
669 }
670 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
671 information for early inlined BLOCKs so drop it on the floor instead
672 of ICEing in dwarf2out.c. */
673
674 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
675 streaming time. */
676
677 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
678 list is re-constructed from BLOCK_SUPERCONTEXT. */
679 }
680
681 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
682 {
683 unsigned i;
684 tree t;
685
686 /* Note that the number of BINFO slots has already been emitted in
687 EXPR's header (see streamer_write_tree_header) because this length
688 is needed to build the empty BINFO node on the reader side. */
689 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
690 DFS_follow_tree_edge (t);
691 DFS_follow_tree_edge (BINFO_OFFSET (expr));
692 DFS_follow_tree_edge (BINFO_VTABLE (expr));
693 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
694
695 /* The number of BINFO_BASE_ACCESSES has already been emitted in
696 EXPR's bitfield section. */
697 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
698 DFS_follow_tree_edge (t);
699
700 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
701 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
702 }
703
704 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
705 {
706 unsigned i;
707 tree index, value;
708
709 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
710 {
711 DFS_follow_tree_edge (index);
712 DFS_follow_tree_edge (value);
713 }
714 }
715
716 if (code == OMP_CLAUSE)
717 {
718 int i;
719 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
720 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
721 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
722 }
723
724 #undef DFS_follow_tree_edge
725 }
726
727 /* Return a hash value for the tree T.
728 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
729 may hold hash values if trees inside current SCC. */
730
731 static hashval_t
732 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
733 {
734 inchash::hash hstate;
735
736 #define visit(SIBLING) \
737 do { \
738 unsigned ix; \
739 if (!SIBLING) \
740 hstate.add_int (0); \
741 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
742 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
743 else if (map) \
744 hstate.add_int (*map->get (SIBLING)); \
745 else \
746 hstate.add_int (1); \
747 } while (0)
748
749 /* Hash TS_BASE. */
750 enum tree_code code = TREE_CODE (t);
751 hstate.add_int (code);
752 if (!TYPE_P (t))
753 {
754 hstate.add_flag (TREE_SIDE_EFFECTS (t));
755 hstate.add_flag (TREE_CONSTANT (t));
756 hstate.add_flag (TREE_READONLY (t));
757 hstate.add_flag (TREE_PUBLIC (t));
758 }
759 hstate.add_flag (TREE_ADDRESSABLE (t));
760 hstate.add_flag (TREE_THIS_VOLATILE (t));
761 if (DECL_P (t))
762 hstate.add_flag (DECL_UNSIGNED (t));
763 else if (TYPE_P (t))
764 hstate.add_flag (TYPE_UNSIGNED (t));
765 if (TYPE_P (t))
766 hstate.add_flag (TYPE_ARTIFICIAL (t));
767 else
768 hstate.add_flag (TREE_NO_WARNING (t));
769 hstate.add_flag (TREE_NOTHROW (t));
770 hstate.add_flag (TREE_STATIC (t));
771 hstate.add_flag (TREE_PROTECTED (t));
772 hstate.add_flag (TREE_DEPRECATED (t));
773 if (code != TREE_BINFO)
774 hstate.add_flag (TREE_PRIVATE (t));
775 if (TYPE_P (t))
776 {
777 hstate.add_flag (TYPE_SATURATING (t));
778 hstate.add_flag (TYPE_ADDR_SPACE (t));
779 }
780 else if (code == SSA_NAME)
781 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
782 hstate.commit_flag ();
783
784 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
785 {
786 int i;
787 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
788 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
789 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
790 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
791 }
792
793 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
794 {
795 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
796 hstate.add_flag (r.cl);
797 hstate.add_flag (r.sign);
798 hstate.add_flag (r.signalling);
799 hstate.add_flag (r.canonical);
800 hstate.commit_flag ();
801 hstate.add_int (r.uexp);
802 hstate.add (r.sig, sizeof (r.sig));
803 }
804
805 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
806 {
807 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
808 hstate.add_int (f.mode);
809 hstate.add_int (f.data.low);
810 hstate.add_int (f.data.high);
811 }
812
813 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
814 {
815 hstate.add_wide_int (DECL_MODE (t));
816 hstate.add_flag (DECL_NONLOCAL (t));
817 hstate.add_flag (DECL_VIRTUAL_P (t));
818 hstate.add_flag (DECL_IGNORED_P (t));
819 hstate.add_flag (DECL_ABSTRACT (t));
820 hstate.add_flag (DECL_ARTIFICIAL (t));
821 hstate.add_flag (DECL_USER_ALIGN (t));
822 hstate.add_flag (DECL_PRESERVE_P (t));
823 hstate.add_flag (DECL_EXTERNAL (t));
824 hstate.add_flag (DECL_GIMPLE_REG_P (t));
825 hstate.commit_flag ();
826 hstate.add_int (DECL_ALIGN (t));
827 if (code == LABEL_DECL)
828 {
829 hstate.add_int (EH_LANDING_PAD_NR (t));
830 hstate.add_int (LABEL_DECL_UID (t));
831 }
832 else if (code == FIELD_DECL)
833 {
834 hstate.add_flag (DECL_PACKED (t));
835 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
836 hstate.add_int (DECL_OFFSET_ALIGN (t));
837 }
838 else if (code == VAR_DECL)
839 {
840 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
841 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
842 }
843 if (code == RESULT_DECL
844 || code == PARM_DECL
845 || code == VAR_DECL)
846 {
847 hstate.add_flag (DECL_BY_REFERENCE (t));
848 if (code == VAR_DECL
849 || code == PARM_DECL)
850 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
851 }
852 hstate.commit_flag ();
853 }
854
855 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
856 hstate.add_int (DECL_REGISTER (t));
857
858 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
859 {
860 hstate.add_flag (DECL_COMMON (t));
861 hstate.add_flag (DECL_DLLIMPORT_P (t));
862 hstate.add_flag (DECL_WEAK (t));
863 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
864 hstate.add_flag (DECL_COMDAT (t));
865 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
866 hstate.add_int (DECL_VISIBILITY (t));
867 if (code == VAR_DECL)
868 {
869 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
870 hstate.add_flag (DECL_HARD_REGISTER (t));
871 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
872 }
873 if (TREE_CODE (t) == FUNCTION_DECL)
874 {
875 hstate.add_flag (DECL_FINAL_P (t));
876 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
877 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
878 }
879 hstate.commit_flag ();
880 }
881
882 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
883 {
884 hstate.add_int (DECL_BUILT_IN_CLASS (t));
885 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
886 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
887 hstate.add_flag (DECL_UNINLINABLE (t));
888 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
889 hstate.add_flag (DECL_IS_NOVOPS (t));
890 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
891 hstate.add_flag (DECL_IS_MALLOC (t));
892 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
893 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
894 hstate.add_flag (DECL_STATIC_CHAIN (t));
895 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
896 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
897 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
898 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
899 hstate.add_flag (DECL_PURE_P (t));
900 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
901 hstate.commit_flag ();
902 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
903 hstate.add_int (DECL_FUNCTION_CODE (t));
904 }
905
906 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
907 {
908 hstate.add_wide_int (TYPE_MODE (t));
909 hstate.add_flag (TYPE_STRING_FLAG (t));
910 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
911 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
912 hstate.add_flag (TYPE_PACKED (t));
913 hstate.add_flag (TYPE_RESTRICT (t));
914 hstate.add_flag (TYPE_USER_ALIGN (t));
915 hstate.add_flag (TYPE_READONLY (t));
916 if (RECORD_OR_UNION_TYPE_P (t))
917 {
918 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
919 hstate.add_flag (TYPE_FINAL_P (t));
920 }
921 else if (code == ARRAY_TYPE)
922 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
923 hstate.commit_flag ();
924 hstate.add_int (TYPE_PRECISION (t));
925 hstate.add_int (TYPE_ALIGN (t));
926 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
927 || (!in_lto_p
928 && get_alias_set (t) == 0))
929 ? 0 : -1);
930 }
931
932 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
933 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
934 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
935
936 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
937 gcc_unreachable ();
938
939 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
940 hstate.add (t, sizeof (struct cl_optimization));
941
942 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
943 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
944
945 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
946 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
947
948 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
949 {
950 if (code != IDENTIFIER_NODE)
951 visit (TREE_TYPE (t));
952 }
953
954 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
955 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
956 visit (VECTOR_CST_ELT (t, i));
957
958 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
959 {
960 visit (TREE_REALPART (t));
961 visit (TREE_IMAGPART (t));
962 }
963
964 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
965 {
966 /* Drop names that were created for anonymous entities. */
967 if (DECL_NAME (t)
968 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
969 && ANON_AGGRNAME_P (DECL_NAME (t)))
970 ;
971 else
972 visit (DECL_NAME (t));
973 if (DECL_FILE_SCOPE_P (t))
974 ;
975 else
976 visit (DECL_CONTEXT (t));
977 }
978
979 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
980 {
981 visit (DECL_SIZE (t));
982 visit (DECL_SIZE_UNIT (t));
983 visit (DECL_ATTRIBUTES (t));
984 if ((code == VAR_DECL
985 || code == PARM_DECL)
986 && DECL_HAS_VALUE_EXPR_P (t))
987 visit (DECL_VALUE_EXPR (t));
988 if (code == VAR_DECL
989 && DECL_HAS_DEBUG_EXPR_P (t))
990 visit (DECL_DEBUG_EXPR (t));
991 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
992 be able to call get_symbol_initial_value. */
993 }
994
995 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
996 {
997 if (code == TYPE_DECL)
998 visit (DECL_ORIGINAL_TYPE (t));
999 }
1000
1001 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1002 {
1003 if (DECL_ASSEMBLER_NAME_SET_P (t))
1004 visit (DECL_ASSEMBLER_NAME (t));
1005 }
1006
1007 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1008 {
1009 visit (DECL_FIELD_OFFSET (t));
1010 visit (DECL_BIT_FIELD_TYPE (t));
1011 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1012 visit (DECL_FIELD_BIT_OFFSET (t));
1013 visit (DECL_FCONTEXT (t));
1014 }
1015
1016 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1017 {
1018 visit (DECL_VINDEX (t));
1019 visit (DECL_FUNCTION_PERSONALITY (t));
1020 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
1021 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1022 }
1023
1024 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1025 {
1026 visit (TYPE_SIZE (t));
1027 visit (TYPE_SIZE_UNIT (t));
1028 visit (TYPE_ATTRIBUTES (t));
1029 visit (TYPE_NAME (t));
1030 visit (TYPE_MAIN_VARIANT (t));
1031 if (TYPE_FILE_SCOPE_P (t))
1032 ;
1033 else
1034 visit (TYPE_CONTEXT (t));
1035 visit (TYPE_STUB_DECL (t));
1036 }
1037
1038 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1039 {
1040 if (code == ENUMERAL_TYPE)
1041 visit (TYPE_VALUES (t));
1042 else if (code == ARRAY_TYPE)
1043 visit (TYPE_DOMAIN (t));
1044 else if (RECORD_OR_UNION_TYPE_P (t))
1045 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1046 visit (f);
1047 else if (code == FUNCTION_TYPE
1048 || code == METHOD_TYPE)
1049 visit (TYPE_ARG_TYPES (t));
1050 if (!POINTER_TYPE_P (t))
1051 visit (TYPE_MINVAL (t));
1052 visit (TYPE_MAXVAL (t));
1053 if (RECORD_OR_UNION_TYPE_P (t))
1054 visit (TYPE_BINFO (t));
1055 }
1056
1057 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1058 {
1059 visit (TREE_PURPOSE (t));
1060 visit (TREE_VALUE (t));
1061 visit (TREE_CHAIN (t));
1062 }
1063
1064 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1065 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1066 visit (TREE_VEC_ELT (t, i));
1067
1068 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1069 {
1070 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1071 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1072 visit (TREE_OPERAND (t, i));
1073 }
1074
1075 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1076 {
1077 unsigned i;
1078 tree b;
1079 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1080 visit (b);
1081 visit (BINFO_OFFSET (t));
1082 visit (BINFO_VTABLE (t));
1083 visit (BINFO_VPTR_FIELD (t));
1084 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1085 visit (b);
1086 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1087 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1088 }
1089
1090 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1091 {
1092 unsigned i;
1093 tree index, value;
1094 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1096 {
1097 visit (index);
1098 visit (value);
1099 }
1100 }
1101
1102 if (code == OMP_CLAUSE)
1103 {
1104 int i;
1105 HOST_WIDE_INT val;
1106
1107 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1108 switch (OMP_CLAUSE_CODE (t))
1109 {
1110 case OMP_CLAUSE_DEFAULT:
1111 val = OMP_CLAUSE_DEFAULT_KIND (t);
1112 break;
1113 case OMP_CLAUSE_SCHEDULE:
1114 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1115 break;
1116 case OMP_CLAUSE_DEPEND:
1117 val = OMP_CLAUSE_DEPEND_KIND (t);
1118 break;
1119 case OMP_CLAUSE_MAP:
1120 val = OMP_CLAUSE_MAP_KIND (t);
1121 break;
1122 case OMP_CLAUSE_PROC_BIND:
1123 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1124 break;
1125 case OMP_CLAUSE_REDUCTION:
1126 val = OMP_CLAUSE_REDUCTION_CODE (t);
1127 break;
1128 default:
1129 val = 0;
1130 break;
1131 }
1132 hstate.add_wide_int (val);
1133 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1134 visit (OMP_CLAUSE_OPERAND (t, i));
1135 visit (OMP_CLAUSE_CHAIN (t));
1136 }
1137
1138 return hstate.end ();
1139
1140 #undef visit
1141 }
1142
1143 /* Compare two SCC entries by their hash value for qsorting them. */
1144
1145 int
1146 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1147 {
1148 const scc_entry *p1 = (const scc_entry *) p1_;
1149 const scc_entry *p2 = (const scc_entry *) p2_;
1150 if (p1->hash < p2->hash)
1151 return -1;
1152 else if (p1->hash > p2->hash)
1153 return 1;
1154 return 0;
1155 }
1156
1157 /* Return a hash value for the SCC on the SCC stack from FIRST with
1158 size SIZE. */
1159
1160 hashval_t
1161 DFS::hash_scc (struct output_block *ob,
1162 unsigned first, unsigned size)
1163 {
1164 unsigned int last_classes = 0, iterations = 0;
1165
1166 /* Compute hash values for the SCC members. */
1167 for (unsigned i = 0; i < size; ++i)
1168 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1169 sccstack[first+i].t);
1170
1171 if (size == 1)
1172 return sccstack[first].hash;
1173
1174 /* We aim to get unique hash for every tree within SCC and compute hash value
1175 of the whole SCC by combing all values together in an stable (entry point
1176 independent) order. This guarantees that the same SCC regions within
1177 different translation units will get the same hash values and therefore
1178 will be merged at WPA time.
1179
1180 Often the hashes are already unique. In that case we compute scc hash
1181 by combining individual hash values in an increasing order.
1182
1183 If thre are duplicates we seek at least one tree with unique hash (and
1184 pick one with minimal hash and this property). Then we obtain stable
1185 order by DFS walk starting from this unique tree and then use index
1186 within this order to make individual hash values unique.
1187
1188 If there is no tree with unique hash, we iteratively propagate the hash
1189 values across the internal edges of SCC. This usually quickly leads
1190 to unique hashes. Consider, for example, an SCC containing two pointers
1191 that are identical except for type they point and assume that these
1192 types are also part of the SCC.
1193 The propagation will add the points-to type information into their hash
1194 values. */
1195 do
1196 {
1197 /* Sort the SCC so we can easily see check for uniqueness. */
1198 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1199
1200 unsigned int classes = 1;
1201 int firstunique = -1;
1202
1203 /* Find tree with lowest unique hash (if it exists) and compute
1204 number of equivalence classes. */
1205 if (sccstack[first].hash != sccstack[first+1].hash)
1206 firstunique = 0;
1207 for (unsigned i = 1; i < size; ++i)
1208 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1209 {
1210 classes++;
1211 if (firstunique == -1
1212 && (i == size - 1
1213 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1214 firstunique = i;
1215 }
1216
1217 /* If we found tree with unique hash; stop the iteration. */
1218 if (firstunique != -1
1219 /* Also terminate if we run out of iterations or if the number of
1220 equivalence classes is no longer increasing.
1221 For example a cyclic list of trees that are all equivalent will
1222 never have unique entry point; we however do not build such SCCs
1223 in our IL. */
1224 || classes <= last_classes || iterations > 16)
1225 {
1226 hashval_t scc_hash;
1227
1228 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1229 starting from FIRSTUNIQUE to obstain stable order. */
1230 if (classes != size && firstunique != -1)
1231 {
1232 hash_map <tree, hashval_t> map(size*2);
1233
1234 /* Store hash values into a map, so we can associate them with
1235 reordered SCC. */
1236 for (unsigned i = 0; i < size; ++i)
1237 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1238
1239 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1240 gcc_assert (again.sccstack.length () == size);
1241
1242 memcpy (sccstack.address () + first,
1243 again.sccstack.address (),
1244 sizeof (scc_entry) * size);
1245
1246 /* Update hash values of individual members by hashing in the
1247 index within the stable order. This ensures uniqueness.
1248 Also compute the scc_hash by mixing in all hash values in the
1249 stable order we obtained. */
1250 sccstack[first].hash = *map.get (sccstack[first].t);
1251 scc_hash = sccstack[first].hash;
1252 for (unsigned i = 1; i < size; ++i)
1253 {
1254 sccstack[first+i].hash
1255 = iterative_hash_hashval_t (i,
1256 *map.get (sccstack[first+i].t));
1257 scc_hash = iterative_hash_hashval_t (scc_hash,
1258 sccstack[first+i].hash);
1259 }
1260 }
1261 /* If we got unique hash values for each tree, then sort already
1262 ensured entry point independent order. Only compute the final
1263 scc hash.
1264
1265 If we failed to find the unique entry point, we go by the same
1266 route. We will eventually introduce unwanted hash conflicts. */
1267 else
1268 {
1269 scc_hash = sccstack[first].hash;
1270 for (unsigned i = 1; i < size; ++i)
1271 scc_hash = iterative_hash_hashval_t (scc_hash,
1272 sccstack[first+i].hash);
1273 /* We can not 100% guarantee that the hash will not conflict in
1274 in a way so the unique hash is not found. This however
1275 should be extremely rare situation. ICE for now so possible
1276 issues are found and evaulated. */
1277 gcc_checking_assert (classes == size);
1278 }
1279
1280 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1281 hash into the hash of each of the elements. */
1282 for (unsigned i = 0; i < size; ++i)
1283 sccstack[first+i].hash
1284 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1285 return scc_hash;
1286 }
1287
1288 last_classes = classes;
1289 iterations++;
1290
1291 /* We failed to identify the entry point; propagate hash values across
1292 the edges. */
1293 {
1294 hash_map <tree, hashval_t> map(size*2);
1295 for (unsigned i = 0; i < size; ++i)
1296 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1297
1298 for (unsigned i = 0; i < size; i++)
1299 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1300 sccstack[first+i].t);
1301 }
1302 }
1303 while (true);
1304 }
1305
1306 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1307 already in the streamer cache. Main routine called for
1308 each visit of EXPR. */
1309
1310 void
1311 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1312 tree expr, bool ref_p, bool this_ref_p, bool single_p)
1313 {
1314 unsigned ix;
1315
1316 /* Handle special cases. */
1317 if (expr == NULL_TREE)
1318 return;
1319
1320 /* Do not DFS walk into indexable trees. */
1321 if (this_ref_p && tree_is_indexable (expr))
1322 return;
1323
1324 /* Check if we already streamed EXPR. */
1325 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1326 return;
1327
1328 sccs **slot = &sccstate.get_or_insert (expr);
1329 sccs *cstate = *slot;
1330 if (!cstate)
1331 {
1332 scc_entry e = { expr, 0 };
1333 /* Not yet visited. DFS recurse and push it onto the stack. */
1334 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1335 sccstack.safe_push (e);
1336 cstate->dfsnum = next_dfs_num++;
1337 cstate->low = cstate->dfsnum;
1338
1339 if (streamer_handle_as_builtin_p (expr))
1340 ;
1341 else if (TREE_CODE (expr) == INTEGER_CST
1342 && !TREE_OVERFLOW (expr))
1343 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p, single_p);
1344 else
1345 {
1346 DFS_write_tree_body (ob, expr, cstate, ref_p, single_p);
1347
1348 /* Walk any LTO-specific edges. */
1349 if (DECL_P (expr)
1350 && TREE_CODE (expr) != FUNCTION_DECL
1351 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1352 {
1353 /* Handle DECL_INITIAL for symbols. */
1354 tree initial = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
1355 expr);
1356 DFS_write_tree (ob, cstate, initial, ref_p, ref_p, single_p);
1357 }
1358 }
1359
1360 /* See if we found an SCC. */
1361 if (cstate->low == cstate->dfsnum)
1362 {
1363 unsigned first, size;
1364 tree x;
1365
1366 /* If we are re-walking a single leaf-SCC just return and
1367 let the caller access the sccstack. */
1368 if (single_p)
1369 return;
1370
1371 /* Pop the SCC and compute its size. */
1372 first = sccstack.length ();
1373 do
1374 {
1375 x = sccstack[--first].t;
1376 }
1377 while (x != expr);
1378 size = sccstack.length () - first;
1379
1380 /* No need to compute hashes for LTRANS units, we don't perform
1381 any merging there. */
1382 hashval_t scc_hash = 0;
1383 unsigned scc_entry_len = 0;
1384 if (!flag_wpa)
1385 {
1386 scc_hash = hash_scc (ob, first, size);
1387
1388 /* Put the entries with the least number of collisions first. */
1389 unsigned entry_start = 0;
1390 scc_entry_len = size + 1;
1391 for (unsigned i = 0; i < size;)
1392 {
1393 unsigned from = i;
1394 for (i = i + 1; i < size
1395 && (sccstack[first + i].hash
1396 == sccstack[first + from].hash); ++i)
1397 ;
1398 if (i - from < scc_entry_len)
1399 {
1400 scc_entry_len = i - from;
1401 entry_start = from;
1402 }
1403 }
1404 for (unsigned i = 0; i < scc_entry_len; ++i)
1405 {
1406 scc_entry tem = sccstack[first + i];
1407 sccstack[first + i] = sccstack[first + entry_start + i];
1408 sccstack[first + entry_start + i] = tem;
1409 }
1410
1411 if (scc_entry_len == 1)
1412 ; /* We already sorted SCC deterministically in hash_scc. */
1413 else
1414 /* Check that we have only one SCC.
1415 Naturally we may have conflicts if hash function is not
1416 strong enough. Lets see how far this gets. */
1417 {
1418 #ifdef ENABLE_CHECKING
1419 gcc_unreachable ();
1420 #endif
1421 }
1422 }
1423
1424 /* Write LTO_tree_scc. */
1425 streamer_write_record_start (ob, LTO_tree_scc);
1426 streamer_write_uhwi (ob, size);
1427 streamer_write_uhwi (ob, scc_hash);
1428
1429 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1430 All INTEGER_CSTs need to be handled this way as we need
1431 their type to materialize them. Also builtins are handled
1432 this way.
1433 ??? We still wrap these in LTO_tree_scc so at the
1434 input side we can properly identify the tree we want
1435 to ultimatively return. */
1436 if (size == 1)
1437 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1438 else
1439 {
1440 /* Write the size of the SCC entry candidates. */
1441 streamer_write_uhwi (ob, scc_entry_len);
1442
1443 /* Write all headers and populate the streamer cache. */
1444 for (unsigned i = 0; i < size; ++i)
1445 {
1446 hashval_t hash = sccstack[first+i].hash;
1447 tree t = sccstack[first+i].t;
1448 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1449 t, hash, &ix);
1450 gcc_assert (!exists_p);
1451
1452 if (!lto_is_streamable (t))
1453 internal_error ("tree code %qs is not supported "
1454 "in LTO streams",
1455 get_tree_code_name (TREE_CODE (t)));
1456
1457 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1458
1459 /* Write the header, containing everything needed to
1460 materialize EXPR on the reading side. */
1461 streamer_write_tree_header (ob, t);
1462 }
1463
1464 /* Write the bitpacks and tree references. */
1465 for (unsigned i = 0; i < size; ++i)
1466 {
1467 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1468
1469 /* Mark the end of the tree. */
1470 streamer_write_zero (ob);
1471 }
1472 }
1473
1474 /* Finally truncate the vector. */
1475 sccstack.truncate (first);
1476
1477 if (from_state)
1478 from_state->low = MIN (from_state->low, cstate->low);
1479 return;
1480 }
1481
1482 if (from_state)
1483 from_state->low = MIN (from_state->low, cstate->low);
1484 }
1485 gcc_checking_assert (from_state);
1486 if (cstate->dfsnum < from_state->dfsnum)
1487 from_state->low = MIN (cstate->dfsnum, from_state->low);
1488 }
1489
1490
1491 /* Emit the physical representation of tree node EXPR to output block
1492 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1493 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1494
1495 void
1496 lto_output_tree (struct output_block *ob, tree expr,
1497 bool ref_p, bool this_ref_p)
1498 {
1499 unsigned ix;
1500 bool existed_p;
1501
1502 if (expr == NULL_TREE)
1503 {
1504 streamer_write_record_start (ob, LTO_null);
1505 return;
1506 }
1507
1508 if (this_ref_p && tree_is_indexable (expr))
1509 {
1510 lto_output_tree_ref (ob, expr);
1511 return;
1512 }
1513
1514 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1515 if (existed_p)
1516 {
1517 /* If a node has already been streamed out, make sure that
1518 we don't write it more than once. Otherwise, the reader
1519 will instantiate two different nodes for the same object. */
1520 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1521 streamer_write_uhwi (ob, ix);
1522 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1523 lto_tree_code_to_tag (TREE_CODE (expr)));
1524 lto_stats.num_pickle_refs_output++;
1525 }
1526 else
1527 {
1528 /* This is the first time we see EXPR, write all reachable
1529 trees to OB. */
1530 static bool in_dfs_walk;
1531
1532 /* Protect against recursion which means disconnect between
1533 what tree edges we walk in the DFS walk and what edges
1534 we stream out. */
1535 gcc_assert (!in_dfs_walk);
1536
1537 /* Start the DFS walk. */
1538 /* Save ob state ... */
1539 /* let's see ... */
1540 in_dfs_walk = true;
1541 DFS (ob, expr, ref_p, this_ref_p, false);
1542 in_dfs_walk = false;
1543
1544 /* Finally append a reference to the tree we were writing.
1545 ??? If expr ended up as a singleton we could have
1546 inlined it here and avoid outputting a reference. */
1547 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1548 gcc_assert (existed_p);
1549 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1550 streamer_write_uhwi (ob, ix);
1551 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1552 lto_tree_code_to_tag (TREE_CODE (expr)));
1553 lto_stats.num_pickle_refs_output++;
1554 }
1555 }
1556
1557
1558 /* Output to OB a list of try/catch handlers starting with FIRST. */
1559
1560 static void
1561 output_eh_try_list (struct output_block *ob, eh_catch first)
1562 {
1563 eh_catch n;
1564
1565 for (n = first; n; n = n->next_catch)
1566 {
1567 streamer_write_record_start (ob, LTO_eh_catch);
1568 stream_write_tree (ob, n->type_list, true);
1569 stream_write_tree (ob, n->filter_list, true);
1570 stream_write_tree (ob, n->label, true);
1571 }
1572
1573 streamer_write_record_start (ob, LTO_null);
1574 }
1575
1576
1577 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1578 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1579 detect EH region sharing. */
1580
1581 static void
1582 output_eh_region (struct output_block *ob, eh_region r)
1583 {
1584 enum LTO_tags tag;
1585
1586 if (r == NULL)
1587 {
1588 streamer_write_record_start (ob, LTO_null);
1589 return;
1590 }
1591
1592 if (r->type == ERT_CLEANUP)
1593 tag = LTO_ert_cleanup;
1594 else if (r->type == ERT_TRY)
1595 tag = LTO_ert_try;
1596 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1597 tag = LTO_ert_allowed_exceptions;
1598 else if (r->type == ERT_MUST_NOT_THROW)
1599 tag = LTO_ert_must_not_throw;
1600 else
1601 gcc_unreachable ();
1602
1603 streamer_write_record_start (ob, tag);
1604 streamer_write_hwi (ob, r->index);
1605
1606 if (r->outer)
1607 streamer_write_hwi (ob, r->outer->index);
1608 else
1609 streamer_write_zero (ob);
1610
1611 if (r->inner)
1612 streamer_write_hwi (ob, r->inner->index);
1613 else
1614 streamer_write_zero (ob);
1615
1616 if (r->next_peer)
1617 streamer_write_hwi (ob, r->next_peer->index);
1618 else
1619 streamer_write_zero (ob);
1620
1621 if (r->type == ERT_TRY)
1622 {
1623 output_eh_try_list (ob, r->u.eh_try.first_catch);
1624 }
1625 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1626 {
1627 stream_write_tree (ob, r->u.allowed.type_list, true);
1628 stream_write_tree (ob, r->u.allowed.label, true);
1629 streamer_write_uhwi (ob, r->u.allowed.filter);
1630 }
1631 else if (r->type == ERT_MUST_NOT_THROW)
1632 {
1633 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1634 bitpack_d bp = bitpack_create (ob->main_stream);
1635 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1636 streamer_write_bitpack (&bp);
1637 }
1638
1639 if (r->landing_pads)
1640 streamer_write_hwi (ob, r->landing_pads->index);
1641 else
1642 streamer_write_zero (ob);
1643 }
1644
1645
1646 /* Output landing pad LP to OB. */
1647
1648 static void
1649 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1650 {
1651 if (lp == NULL)
1652 {
1653 streamer_write_record_start (ob, LTO_null);
1654 return;
1655 }
1656
1657 streamer_write_record_start (ob, LTO_eh_landing_pad);
1658 streamer_write_hwi (ob, lp->index);
1659 if (lp->next_lp)
1660 streamer_write_hwi (ob, lp->next_lp->index);
1661 else
1662 streamer_write_zero (ob);
1663
1664 if (lp->region)
1665 streamer_write_hwi (ob, lp->region->index);
1666 else
1667 streamer_write_zero (ob);
1668
1669 stream_write_tree (ob, lp->post_landing_pad, true);
1670 }
1671
1672
1673 /* Output the existing eh_table to OB. */
1674
1675 static void
1676 output_eh_regions (struct output_block *ob, struct function *fn)
1677 {
1678 if (fn->eh && fn->eh->region_tree)
1679 {
1680 unsigned i;
1681 eh_region eh;
1682 eh_landing_pad lp;
1683 tree ttype;
1684
1685 streamer_write_record_start (ob, LTO_eh_table);
1686
1687 /* Emit the index of the root of the EH region tree. */
1688 streamer_write_hwi (ob, fn->eh->region_tree->index);
1689
1690 /* Emit all the EH regions in the region array. */
1691 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1692 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1693 output_eh_region (ob, eh);
1694
1695 /* Emit all landing pads. */
1696 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1697 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1698 output_eh_lp (ob, lp);
1699
1700 /* Emit all the runtime type data. */
1701 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1702 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1703 stream_write_tree (ob, ttype, true);
1704
1705 /* Emit the table of action chains. */
1706 if (targetm.arm_eabi_unwinder)
1707 {
1708 tree t;
1709 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1710 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1711 stream_write_tree (ob, t, true);
1712 }
1713 else
1714 {
1715 uchar c;
1716 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1717 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1718 streamer_write_char_stream (ob->main_stream, c);
1719 }
1720 }
1721
1722 /* The LTO_null either terminates the record or indicates that there
1723 are no eh_records at all. */
1724 streamer_write_record_start (ob, LTO_null);
1725 }
1726
1727
1728 /* Output all of the active ssa names to the ssa_names stream. */
1729
1730 static void
1731 output_ssa_names (struct output_block *ob, struct function *fn)
1732 {
1733 unsigned int i, len;
1734
1735 len = vec_safe_length (SSANAMES (fn));
1736 streamer_write_uhwi (ob, len);
1737
1738 for (i = 1; i < len; i++)
1739 {
1740 tree ptr = (*SSANAMES (fn))[i];
1741
1742 if (ptr == NULL_TREE
1743 || SSA_NAME_IN_FREE_LIST (ptr)
1744 || virtual_operand_p (ptr))
1745 continue;
1746
1747 streamer_write_uhwi (ob, i);
1748 streamer_write_char_stream (ob->main_stream,
1749 SSA_NAME_IS_DEFAULT_DEF (ptr));
1750 if (SSA_NAME_VAR (ptr))
1751 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1752 else
1753 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1754 stream_write_tree (ob, TREE_TYPE (ptr), true);
1755 }
1756
1757 streamer_write_zero (ob);
1758 }
1759
1760
1761 /* Output a wide-int. */
1762
1763 static void
1764 streamer_write_wi (struct output_block *ob,
1765 const widest_int &w)
1766 {
1767 int len = w.get_len ();
1768
1769 streamer_write_uhwi (ob, w.get_precision ());
1770 streamer_write_uhwi (ob, len);
1771 for (int i = 0; i < len; i++)
1772 streamer_write_hwi (ob, w.elt (i));
1773 }
1774
1775
1776 /* Output the cfg. */
1777
1778 static void
1779 output_cfg (struct output_block *ob, struct function *fn)
1780 {
1781 struct lto_output_stream *tmp_stream = ob->main_stream;
1782 basic_block bb;
1783
1784 ob->main_stream = ob->cfg_stream;
1785
1786 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1787 profile_status_for_fn (fn));
1788
1789 /* Output the number of the highest basic block. */
1790 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1791
1792 FOR_ALL_BB_FN (bb, fn)
1793 {
1794 edge_iterator ei;
1795 edge e;
1796
1797 streamer_write_hwi (ob, bb->index);
1798
1799 /* Output the successors and the edge flags. */
1800 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1801 FOR_EACH_EDGE (e, ei, bb->succs)
1802 {
1803 streamer_write_uhwi (ob, e->dest->index);
1804 streamer_write_hwi (ob, e->probability);
1805 streamer_write_gcov_count (ob, e->count);
1806 streamer_write_uhwi (ob, e->flags);
1807 }
1808 }
1809
1810 streamer_write_hwi (ob, -1);
1811
1812 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1813 while (bb->next_bb)
1814 {
1815 streamer_write_hwi (ob, bb->next_bb->index);
1816 bb = bb->next_bb;
1817 }
1818
1819 streamer_write_hwi (ob, -1);
1820
1821 /* ??? The cfgloop interface is tied to cfun. */
1822 gcc_assert (cfun == fn);
1823
1824 /* Output the number of loops. */
1825 streamer_write_uhwi (ob, number_of_loops (fn));
1826
1827 /* Output each loop, skipping the tree root which has number zero. */
1828 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1829 {
1830 struct loop *loop = get_loop (fn, i);
1831
1832 /* Write the index of the loop header. That's enough to rebuild
1833 the loop tree on the reader side. Stream -1 for an unused
1834 loop entry. */
1835 if (!loop)
1836 {
1837 streamer_write_hwi (ob, -1);
1838 continue;
1839 }
1840 else
1841 streamer_write_hwi (ob, loop->header->index);
1842
1843 /* Write everything copy_loop_info copies. */
1844 streamer_write_enum (ob->main_stream,
1845 loop_estimation, EST_LAST, loop->estimate_state);
1846 streamer_write_hwi (ob, loop->any_upper_bound);
1847 if (loop->any_upper_bound)
1848 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1849 streamer_write_hwi (ob, loop->any_estimate);
1850 if (loop->any_estimate)
1851 streamer_write_wi (ob, loop->nb_iterations_estimate);
1852
1853 /* Write OMP SIMD related info. */
1854 streamer_write_hwi (ob, loop->safelen);
1855 streamer_write_hwi (ob, loop->dont_vectorize);
1856 streamer_write_hwi (ob, loop->force_vectorize);
1857 stream_write_tree (ob, loop->simduid, true);
1858 }
1859
1860 ob->main_stream = tmp_stream;
1861 }
1862
1863
1864 /* Create the header in the file using OB. If the section type is for
1865 a function, set FN to the decl for that function. */
1866
1867 void
1868 produce_asm (struct output_block *ob, tree fn)
1869 {
1870 enum lto_section_type section_type = ob->section_type;
1871 struct lto_function_header header;
1872 char *section_name;
1873
1874 if (section_type == LTO_section_function_body)
1875 {
1876 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1877 section_name = lto_get_section_name (section_type, name, NULL);
1878 }
1879 else
1880 section_name = lto_get_section_name (section_type, NULL, NULL);
1881
1882 lto_begin_section (section_name, !flag_wpa);
1883 free (section_name);
1884
1885 /* The entire header is stream computed here. */
1886 memset (&header, 0, sizeof (struct lto_function_header));
1887
1888 /* Write the header. */
1889 header.lto_header.major_version = LTO_major_version;
1890 header.lto_header.minor_version = LTO_minor_version;
1891
1892 header.compressed_size = 0;
1893
1894 if (section_type == LTO_section_function_body)
1895 header.cfg_size = ob->cfg_stream->total_size;
1896 header.main_size = ob->main_stream->total_size;
1897 header.string_size = ob->string_stream->total_size;
1898 lto_write_data (&header, sizeof header);
1899
1900 /* Put all of the gimple and the string table out the asm file as a
1901 block of text. */
1902 if (section_type == LTO_section_function_body)
1903 lto_write_stream (ob->cfg_stream);
1904 lto_write_stream (ob->main_stream);
1905 lto_write_stream (ob->string_stream);
1906
1907 lto_end_section ();
1908 }
1909
1910
1911 /* Output the base body of struct function FN using output block OB. */
1912
1913 static void
1914 output_struct_function_base (struct output_block *ob, struct function *fn)
1915 {
1916 struct bitpack_d bp;
1917 unsigned i;
1918 tree t;
1919
1920 /* Output the static chain and non-local goto save area. */
1921 stream_write_tree (ob, fn->static_chain_decl, true);
1922 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1923
1924 /* Output all the local variables in the function. */
1925 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1926 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1927 stream_write_tree (ob, t, true);
1928
1929 /* Output current IL state of the function. */
1930 streamer_write_uhwi (ob, fn->curr_properties);
1931
1932 /* Write all the attributes for FN. */
1933 bp = bitpack_create (ob->main_stream);
1934 bp_pack_value (&bp, fn->is_thunk, 1);
1935 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1936 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1937 bp_pack_value (&bp, fn->returns_struct, 1);
1938 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1939 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1940 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1941 bp_pack_value (&bp, fn->after_inlining, 1);
1942 bp_pack_value (&bp, fn->stdarg, 1);
1943 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1944 bp_pack_value (&bp, fn->calls_alloca, 1);
1945 bp_pack_value (&bp, fn->calls_setjmp, 1);
1946 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1947 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1948 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1949 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1950
1951 /* Output the function start and end loci. */
1952 stream_output_location (ob, &bp, fn->function_start_locus);
1953 stream_output_location (ob, &bp, fn->function_end_locus);
1954
1955 streamer_write_bitpack (&bp);
1956 }
1957
1958
1959 /* Output the body of function NODE->DECL. */
1960
1961 static void
1962 output_function (struct cgraph_node *node)
1963 {
1964 tree function;
1965 struct function *fn;
1966 basic_block bb;
1967 struct output_block *ob;
1968
1969 function = node->decl;
1970 fn = DECL_STRUCT_FUNCTION (function);
1971 ob = create_output_block (LTO_section_function_body);
1972
1973 clear_line_info (ob);
1974 ob->symbol = node;
1975
1976 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1977
1978 /* Set current_function_decl and cfun. */
1979 push_cfun (fn);
1980
1981 /* Make string 0 be a NULL string. */
1982 streamer_write_char_stream (ob->string_stream, 0);
1983
1984 streamer_write_record_start (ob, LTO_function);
1985
1986 /* Output decls for parameters and args. */
1987 stream_write_tree (ob, DECL_RESULT (function), true);
1988 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1989
1990 /* Output DECL_INITIAL for the function, which contains the tree of
1991 lexical scopes. */
1992 stream_write_tree (ob, DECL_INITIAL (function), true);
1993
1994 /* We also stream abstract functions where we stream only stuff needed for
1995 debug info. */
1996 if (gimple_has_body_p (function))
1997 {
1998 streamer_write_uhwi (ob, 1);
1999 output_struct_function_base (ob, fn);
2000
2001 /* Output all the SSA names used in the function. */
2002 output_ssa_names (ob, fn);
2003
2004 /* Output any exception handling regions. */
2005 output_eh_regions (ob, fn);
2006
2007
2008 /* We will renumber the statements. The code that does this uses
2009 the same ordering that we use for serializing them so we can use
2010 the same code on the other end and not have to write out the
2011 statement numbers. We do not assign UIDs to PHIs here because
2012 virtual PHIs get re-computed on-the-fly which would make numbers
2013 inconsistent. */
2014 set_gimple_stmt_max_uid (cfun, 0);
2015 FOR_ALL_BB_FN (bb, cfun)
2016 {
2017 gimple_stmt_iterator gsi;
2018 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2019 {
2020 gimple stmt = gsi_stmt (gsi);
2021
2022 /* Virtual PHIs are not going to be streamed. */
2023 if (!virtual_operand_p (gimple_phi_result (stmt)))
2024 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2025 }
2026 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2027 {
2028 gimple stmt = gsi_stmt (gsi);
2029 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2030 }
2031 }
2032 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2033 virtual phis now. */
2034 FOR_ALL_BB_FN (bb, cfun)
2035 {
2036 gimple_stmt_iterator gsi;
2037 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2038 {
2039 gimple stmt = gsi_stmt (gsi);
2040 if (virtual_operand_p (gimple_phi_result (stmt)))
2041 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2042 }
2043 }
2044
2045 /* Output the code for the function. */
2046 FOR_ALL_BB_FN (bb, fn)
2047 output_bb (ob, bb, fn);
2048
2049 /* The terminator for this function. */
2050 streamer_write_record_start (ob, LTO_null);
2051
2052 output_cfg (ob, fn);
2053
2054 pop_cfun ();
2055 }
2056 else
2057 streamer_write_uhwi (ob, 0);
2058
2059 /* Create a section to hold the pickled output of this function. */
2060 produce_asm (ob, function);
2061
2062 destroy_output_block (ob);
2063 }
2064
2065 /* Output the body of function NODE->DECL. */
2066
2067 static void
2068 output_constructor (struct varpool_node *node)
2069 {
2070 tree var = node->decl;
2071 struct output_block *ob;
2072
2073 ob = create_output_block (LTO_section_function_body);
2074
2075 clear_line_info (ob);
2076 ob->symbol = node;
2077
2078 /* Make string 0 be a NULL string. */
2079 streamer_write_char_stream (ob->string_stream, 0);
2080
2081 /* Output DECL_INITIAL for the function, which contains the tree of
2082 lexical scopes. */
2083 stream_write_tree (ob, DECL_INITIAL (var), true);
2084
2085 /* Create a section to hold the pickled output of this function. */
2086 produce_asm (ob, var);
2087
2088 destroy_output_block (ob);
2089 }
2090
2091
2092 /* Emit toplevel asms. */
2093
2094 void
2095 lto_output_toplevel_asms (void)
2096 {
2097 struct output_block *ob;
2098 struct asm_node *can;
2099 char *section_name;
2100 struct lto_asm_header header;
2101
2102 if (! asm_nodes)
2103 return;
2104
2105 ob = create_output_block (LTO_section_asm);
2106
2107 /* Make string 0 be a NULL string. */
2108 streamer_write_char_stream (ob->string_stream, 0);
2109
2110 for (can = asm_nodes; can; can = can->next)
2111 {
2112 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2113 streamer_write_hwi (ob, can->order);
2114 }
2115
2116 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2117
2118 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2119 lto_begin_section (section_name, !flag_wpa);
2120 free (section_name);
2121
2122 /* The entire header stream is computed here. */
2123 memset (&header, 0, sizeof (header));
2124
2125 /* Write the header. */
2126 header.lto_header.major_version = LTO_major_version;
2127 header.lto_header.minor_version = LTO_minor_version;
2128
2129 header.main_size = ob->main_stream->total_size;
2130 header.string_size = ob->string_stream->total_size;
2131 lto_write_data (&header, sizeof header);
2132
2133 /* Put all of the gimple and the string table out the asm file as a
2134 block of text. */
2135 lto_write_stream (ob->main_stream);
2136 lto_write_stream (ob->string_stream);
2137
2138 lto_end_section ();
2139
2140 destroy_output_block (ob);
2141 }
2142
2143
2144 /* Copy the function body or variable constructor of NODE without deserializing. */
2145
2146 static void
2147 copy_function_or_variable (struct symtab_node *node)
2148 {
2149 tree function = node->decl;
2150 struct lto_file_decl_data *file_data = node->lto_file_data;
2151 const char *data;
2152 size_t len;
2153 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2154 char *section_name =
2155 lto_get_section_name (LTO_section_function_body, name, NULL);
2156 size_t i, j;
2157 struct lto_in_decl_state *in_state;
2158 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2159
2160 lto_begin_section (section_name, !flag_wpa);
2161 free (section_name);
2162
2163 /* We may have renamed the declaration, e.g., a static function. */
2164 name = lto_get_decl_name_mapping (file_data, name);
2165
2166 data = lto_get_section_data (file_data, LTO_section_function_body,
2167 name, &len);
2168 gcc_assert (data);
2169
2170 /* Do a bit copy of the function body. */
2171 lto_write_data (data, len);
2172
2173 /* Copy decls. */
2174 in_state =
2175 lto_get_function_in_decl_state (node->lto_file_data, function);
2176 gcc_assert (in_state);
2177
2178 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2179 {
2180 size_t n = in_state->streams[i].size;
2181 tree *trees = in_state->streams[i].trees;
2182 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2183
2184 /* The out state must have the same indices and the in state.
2185 So just copy the vector. All the encoders in the in state
2186 must be empty where we reach here. */
2187 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2188 encoder->trees.reserve_exact (n);
2189 for (j = 0; j < n; j++)
2190 encoder->trees.safe_push (trees[j]);
2191 }
2192
2193 lto_free_section_data (file_data, LTO_section_function_body, name,
2194 data, len);
2195 lto_end_section ();
2196 }
2197
2198 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2199
2200 static tree
2201 wrap_refs (tree *tp, int *ws, void *)
2202 {
2203 tree t = *tp;
2204 if (handled_component_p (t)
2205 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2206 {
2207 tree decl = TREE_OPERAND (t, 0);
2208 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2209 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2210 build1 (ADDR_EXPR, ptrtype, decl),
2211 build_int_cst (ptrtype, 0));
2212 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2213 *ws = 0;
2214 }
2215 else if (TREE_CODE (t) == CONSTRUCTOR)
2216 ;
2217 else if (!EXPR_P (t))
2218 *ws = 0;
2219 return NULL_TREE;
2220 }
2221
2222 /* Main entry point from the pass manager. */
2223
2224 void
2225 lto_output (void)
2226 {
2227 struct lto_out_decl_state *decl_state;
2228 #ifdef ENABLE_CHECKING
2229 bitmap output = lto_bitmap_alloc ();
2230 #endif
2231 int i, n_nodes;
2232 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2233
2234 /* Initialize the streamer. */
2235 lto_streamer_init ();
2236
2237 n_nodes = lto_symtab_encoder_size (encoder);
2238 /* Process only the functions with bodies. */
2239 for (i = 0; i < n_nodes; i++)
2240 {
2241 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2242 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2243 {
2244 if (lto_symtab_encoder_encode_body_p (encoder, node)
2245 && !node->alias)
2246 {
2247 #ifdef ENABLE_CHECKING
2248 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2249 bitmap_set_bit (output, DECL_UID (node->decl));
2250 #endif
2251 decl_state = lto_new_out_decl_state ();
2252 lto_push_out_decl_state (decl_state);
2253 if (gimple_has_body_p (node->decl) || !flag_wpa)
2254 output_function (node);
2255 else
2256 copy_function_or_variable (node);
2257 gcc_assert (lto_get_out_decl_state () == decl_state);
2258 lto_pop_out_decl_state ();
2259 lto_record_function_out_decl_state (node->decl, decl_state);
2260 }
2261 }
2262 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2263 {
2264 /* Wrap symbol references inside the ctor in a type
2265 preserving MEM_REF. */
2266 tree ctor = DECL_INITIAL (node->decl);
2267 if (ctor && !in_lto_p)
2268 walk_tree (&ctor, wrap_refs, NULL, NULL);
2269 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2270 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2271 && !node->alias)
2272 {
2273 timevar_push (TV_IPA_LTO_CTORS_OUT);
2274 #ifdef ENABLE_CHECKING
2275 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2276 bitmap_set_bit (output, DECL_UID (node->decl));
2277 #endif
2278 decl_state = lto_new_out_decl_state ();
2279 lto_push_out_decl_state (decl_state);
2280 if (DECL_INITIAL (node->decl) != error_mark_node
2281 || !flag_wpa)
2282 output_constructor (node);
2283 else
2284 copy_function_or_variable (node);
2285 gcc_assert (lto_get_out_decl_state () == decl_state);
2286 lto_pop_out_decl_state ();
2287 lto_record_function_out_decl_state (node->decl, decl_state);
2288 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2289 }
2290 }
2291 }
2292
2293 /* Emit the callgraph after emitting function bodies. This needs to
2294 be done now to make sure that all the statements in every function
2295 have been renumbered so that edges can be associated with call
2296 statements using the statement UIDs. */
2297 output_symtab ();
2298
2299 #ifdef ENABLE_CHECKING
2300 lto_bitmap_free (output);
2301 #endif
2302 }
2303
2304 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2305 from it and required for correct representation of its semantics.
2306 Each node in ENCODER must be a global declaration or a type. A node
2307 is written only once, even if it appears multiple times in the
2308 vector. Certain transitively-reachable nodes, such as those
2309 representing expressions, may be duplicated, but such nodes
2310 must not appear in ENCODER itself. */
2311
2312 static void
2313 write_global_stream (struct output_block *ob,
2314 struct lto_tree_ref_encoder *encoder)
2315 {
2316 tree t;
2317 size_t index;
2318 const size_t size = lto_tree_ref_encoder_size (encoder);
2319
2320 for (index = 0; index < size; index++)
2321 {
2322 t = lto_tree_ref_encoder_get_tree (encoder, index);
2323 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2324 stream_write_tree (ob, t, false);
2325 }
2326 }
2327
2328
2329 /* Write a sequence of indices into the globals vector corresponding
2330 to the trees in ENCODER. These are used by the reader to map the
2331 indices used to refer to global entities within function bodies to
2332 their referents. */
2333
2334 static void
2335 write_global_references (struct output_block *ob,
2336 struct lto_tree_ref_encoder *encoder)
2337 {
2338 tree t;
2339 uint32_t index;
2340 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2341
2342 /* Write size and slot indexes as 32-bit unsigned numbers. */
2343 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2344 data[0] = size;
2345
2346 for (index = 0; index < size; index++)
2347 {
2348 uint32_t slot_num;
2349
2350 t = lto_tree_ref_encoder_get_tree (encoder, index);
2351 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2352 gcc_assert (slot_num != (unsigned)-1);
2353 data[index + 1] = slot_num;
2354 }
2355
2356 lto_write_data (data, sizeof (int32_t) * (size + 1));
2357 free (data);
2358 }
2359
2360
2361 /* Write all the streams in an lto_out_decl_state STATE using
2362 output block OB and output stream OUT_STREAM. */
2363
2364 void
2365 lto_output_decl_state_streams (struct output_block *ob,
2366 struct lto_out_decl_state *state)
2367 {
2368 int i;
2369
2370 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2371 write_global_stream (ob, &state->streams[i]);
2372 }
2373
2374
2375 /* Write all the references in an lto_out_decl_state STATE using
2376 output block OB and output stream OUT_STREAM. */
2377
2378 void
2379 lto_output_decl_state_refs (struct output_block *ob,
2380 struct lto_out_decl_state *state)
2381 {
2382 unsigned i;
2383 uint32_t ref;
2384 tree decl;
2385
2386 /* Write reference to FUNCTION_DECL. If there is not function,
2387 write reference to void_type_node. */
2388 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2389 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2390 gcc_assert (ref != (unsigned)-1);
2391 lto_write_data (&ref, sizeof (uint32_t));
2392
2393 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2394 write_global_references (ob, &state->streams[i]);
2395 }
2396
2397
2398 /* Return the written size of STATE. */
2399
2400 static size_t
2401 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2402 {
2403 int i;
2404 size_t size;
2405
2406 size = sizeof (int32_t); /* fn_ref. */
2407 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2408 {
2409 size += sizeof (int32_t); /* vector size. */
2410 size += (lto_tree_ref_encoder_size (&state->streams[i])
2411 * sizeof (int32_t));
2412 }
2413 return size;
2414 }
2415
2416
2417 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2418 so far. */
2419
2420 static void
2421 write_symbol (struct streamer_tree_cache_d *cache,
2422 tree t, hash_set<const char *> *seen, bool alias)
2423 {
2424 const char *name;
2425 enum gcc_plugin_symbol_kind kind;
2426 enum gcc_plugin_symbol_visibility visibility;
2427 unsigned slot_num;
2428 uint64_t size;
2429 const char *comdat;
2430 unsigned char c;
2431
2432 /* None of the following kinds of symbols are needed in the
2433 symbol table. */
2434 if (!TREE_PUBLIC (t)
2435 || is_builtin_fn (t)
2436 || DECL_ABSTRACT (t)
2437 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2438 return;
2439 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2440
2441 gcc_assert (TREE_CODE (t) == VAR_DECL
2442 || TREE_CODE (t) == FUNCTION_DECL);
2443
2444 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2445
2446 /* This behaves like assemble_name_raw in varasm.c, performing the
2447 same name manipulations that ASM_OUTPUT_LABELREF does. */
2448 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2449
2450 if (seen->add (name))
2451 return;
2452
2453 streamer_tree_cache_lookup (cache, t, &slot_num);
2454 gcc_assert (slot_num != (unsigned)-1);
2455
2456 if (DECL_EXTERNAL (t))
2457 {
2458 if (DECL_WEAK (t))
2459 kind = GCCPK_WEAKUNDEF;
2460 else
2461 kind = GCCPK_UNDEF;
2462 }
2463 else
2464 {
2465 if (DECL_WEAK (t))
2466 kind = GCCPK_WEAKDEF;
2467 else if (DECL_COMMON (t))
2468 kind = GCCPK_COMMON;
2469 else
2470 kind = GCCPK_DEF;
2471
2472 /* When something is defined, it should have node attached. */
2473 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2474 || varpool_node::get (t)->definition);
2475 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2476 || (cgraph_node::get (t)
2477 && cgraph_node::get (t)->definition));
2478 }
2479
2480 /* Imitate what default_elf_asm_output_external do.
2481 When symbol is external, we need to output it with DEFAULT visibility
2482 when compiling with -fvisibility=default, while with HIDDEN visibility
2483 when symbol has attribute (visibility("hidden")) specified.
2484 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2485 right. */
2486
2487 if (DECL_EXTERNAL (t)
2488 && !targetm.binds_local_p (t))
2489 visibility = GCCPV_DEFAULT;
2490 else
2491 switch (DECL_VISIBILITY (t))
2492 {
2493 case VISIBILITY_DEFAULT:
2494 visibility = GCCPV_DEFAULT;
2495 break;
2496 case VISIBILITY_PROTECTED:
2497 visibility = GCCPV_PROTECTED;
2498 break;
2499 case VISIBILITY_HIDDEN:
2500 visibility = GCCPV_HIDDEN;
2501 break;
2502 case VISIBILITY_INTERNAL:
2503 visibility = GCCPV_INTERNAL;
2504 break;
2505 }
2506
2507 if (kind == GCCPK_COMMON
2508 && DECL_SIZE_UNIT (t)
2509 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2510 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2511 else
2512 size = 0;
2513
2514 if (DECL_ONE_ONLY (t))
2515 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2516 else
2517 comdat = "";
2518
2519 lto_write_data (name, strlen (name) + 1);
2520 lto_write_data (comdat, strlen (comdat) + 1);
2521 c = (unsigned char) kind;
2522 lto_write_data (&c, 1);
2523 c = (unsigned char) visibility;
2524 lto_write_data (&c, 1);
2525 lto_write_data (&size, 8);
2526 lto_write_data (&slot_num, 4);
2527 }
2528
2529 /* Return true if NODE should appear in the plugin symbol table. */
2530
2531 bool
2532 output_symbol_p (symtab_node *node)
2533 {
2534 struct cgraph_node *cnode;
2535 if (!node->real_symbol_p ())
2536 return false;
2537 /* We keep external functions in symtab for sake of inlining
2538 and devirtualization. We do not want to see them in symbol table as
2539 references unless they are really used. */
2540 cnode = dyn_cast <cgraph_node *> (node);
2541 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2542 && cnode->callers)
2543 return true;
2544
2545 /* Ignore all references from external vars initializers - they are not really
2546 part of the compilation unit until they are used by folding. Some symbols,
2547 like references to external construction vtables can not be referred to at all.
2548 We decide this at can_refer_decl_in_current_unit_p. */
2549 if (!node->definition || DECL_EXTERNAL (node->decl))
2550 {
2551 int i;
2552 struct ipa_ref *ref;
2553 for (i = 0; node->iterate_referring (i, ref); i++)
2554 {
2555 if (ref->use == IPA_REF_ALIAS)
2556 continue;
2557 if (is_a <cgraph_node *> (ref->referring))
2558 return true;
2559 if (!DECL_EXTERNAL (ref->referring->decl))
2560 return true;
2561 }
2562 return false;
2563 }
2564 return true;
2565 }
2566
2567
2568 /* Write an IL symbol table to OB.
2569 SET and VSET are cgraph/varpool node sets we are outputting. */
2570
2571 static void
2572 produce_symtab (struct output_block *ob)
2573 {
2574 struct streamer_tree_cache_d *cache = ob->writer_cache;
2575 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2576 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2577 lto_symtab_encoder_iterator lsei;
2578
2579 lto_begin_section (section_name, false);
2580 free (section_name);
2581
2582 hash_set<const char *> seen;
2583
2584 /* Write the symbol table.
2585 First write everything defined and then all declarations.
2586 This is necessary to handle cases where we have duplicated symbols. */
2587 for (lsei = lsei_start (encoder);
2588 !lsei_end_p (lsei); lsei_next (&lsei))
2589 {
2590 symtab_node *node = lsei_node (lsei);
2591
2592 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2593 continue;
2594 write_symbol (cache, node->decl, &seen, false);
2595 }
2596 for (lsei = lsei_start (encoder);
2597 !lsei_end_p (lsei); lsei_next (&lsei))
2598 {
2599 symtab_node *node = lsei_node (lsei);
2600
2601 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2602 continue;
2603 write_symbol (cache, node->decl, &seen, false);
2604 }
2605
2606 lto_end_section ();
2607 }
2608
2609
2610 /* This pass is run after all of the functions are serialized and all
2611 of the IPA passes have written their serialized forms. This pass
2612 causes the vector of all of the global decls and types used from
2613 this file to be written in to a section that can then be read in to
2614 recover these on other side. */
2615
2616 void
2617 produce_asm_for_decls (void)
2618 {
2619 struct lto_out_decl_state *out_state;
2620 struct lto_out_decl_state *fn_out_state;
2621 struct lto_decl_header header;
2622 char *section_name;
2623 struct output_block *ob;
2624 unsigned idx, num_fns;
2625 size_t decl_state_size;
2626 int32_t num_decl_states;
2627
2628 ob = create_output_block (LTO_section_decls);
2629
2630 memset (&header, 0, sizeof (struct lto_decl_header));
2631
2632 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2633 lto_begin_section (section_name, !flag_wpa);
2634 free (section_name);
2635
2636 /* Make string 0 be a NULL string. */
2637 streamer_write_char_stream (ob->string_stream, 0);
2638
2639 gcc_assert (!alias_pairs);
2640
2641 /* Get rid of the global decl state hash tables to save some memory. */
2642 out_state = lto_get_out_decl_state ();
2643 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2644 if (out_state->streams[i].tree_hash_table)
2645 {
2646 delete out_state->streams[i].tree_hash_table;
2647 out_state->streams[i].tree_hash_table = NULL;
2648 }
2649
2650 /* Write the global symbols. */
2651 lto_output_decl_state_streams (ob, out_state);
2652 num_fns = lto_function_decl_states.length ();
2653 for (idx = 0; idx < num_fns; idx++)
2654 {
2655 fn_out_state =
2656 lto_function_decl_states[idx];
2657 lto_output_decl_state_streams (ob, fn_out_state);
2658 }
2659
2660 header.lto_header.major_version = LTO_major_version;
2661 header.lto_header.minor_version = LTO_minor_version;
2662
2663 /* Currently not used. This field would allow us to preallocate
2664 the globals vector, so that it need not be resized as it is extended. */
2665 header.num_nodes = -1;
2666
2667 /* Compute the total size of all decl out states. */
2668 decl_state_size = sizeof (int32_t);
2669 decl_state_size += lto_out_decl_state_written_size (out_state);
2670 for (idx = 0; idx < num_fns; idx++)
2671 {
2672 fn_out_state =
2673 lto_function_decl_states[idx];
2674 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2675 }
2676 header.decl_state_size = decl_state_size;
2677
2678 header.main_size = ob->main_stream->total_size;
2679 header.string_size = ob->string_stream->total_size;
2680
2681 lto_write_data (&header, sizeof header);
2682
2683 /* Write the main out-decl state, followed by out-decl states of
2684 functions. */
2685 num_decl_states = num_fns + 1;
2686 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2687 lto_output_decl_state_refs (ob, out_state);
2688 for (idx = 0; idx < num_fns; idx++)
2689 {
2690 fn_out_state = lto_function_decl_states[idx];
2691 lto_output_decl_state_refs (ob, fn_out_state);
2692 }
2693
2694 lto_write_stream (ob->main_stream);
2695 lto_write_stream (ob->string_stream);
2696
2697 lto_end_section ();
2698
2699 /* Write the symbol table. It is used by linker to determine dependencies
2700 and thus we can skip it for WPA. */
2701 if (!flag_wpa)
2702 produce_symtab (ob);
2703
2704 /* Write command line opts. */
2705 lto_write_options ();
2706
2707 /* Deallocate memory and clean up. */
2708 for (idx = 0; idx < num_fns; idx++)
2709 {
2710 fn_out_state =
2711 lto_function_decl_states[idx];
2712 lto_delete_out_decl_state (fn_out_state);
2713 }
2714 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2715 lto_function_decl_states.release ();
2716 destroy_output_block (ob);
2717 }