gimple.h: Remove all includes.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
54 #include "cfgloop.h"
55
56
57 /* Clear the line info stored in DATA_IN. */
58
59 static void
60 clear_line_info (struct output_block *ob)
61 {
62 ob->current_file = NULL;
63 ob->current_line = 0;
64 ob->current_col = 0;
65 }
66
67
68 /* Create the output block and return it. SECTION_TYPE is
69 LTO_section_function_body or LTO_static_initializer. */
70
71 struct output_block *
72 create_output_block (enum lto_section_type section_type)
73 {
74 struct output_block *ob = XCNEW (struct output_block);
75
76 ob->section_type = section_type;
77 ob->decl_state = lto_get_out_decl_state ();
78 ob->main_stream = XCNEW (struct lto_output_stream);
79 ob->string_stream = XCNEW (struct lto_output_stream);
80 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
81
82 if (section_type == LTO_section_function_body)
83 ob->cfg_stream = XCNEW (struct lto_output_stream);
84
85 clear_line_info (ob);
86
87 ob->string_hash_table.create (37);
88 gcc_obstack_init (&ob->obstack);
89
90 return ob;
91 }
92
93
94 /* Destroy the output block OB. */
95
96 void
97 destroy_output_block (struct output_block *ob)
98 {
99 enum lto_section_type section_type = ob->section_type;
100
101 ob->string_hash_table.dispose ();
102
103 free (ob->main_stream);
104 free (ob->string_stream);
105 if (section_type == LTO_section_function_body)
106 free (ob->cfg_stream);
107
108 streamer_tree_cache_delete (ob->writer_cache);
109 obstack_free (&ob->obstack, NULL);
110
111 free (ob);
112 }
113
114
115 /* Look up NODE in the type table and write the index for it to OB. */
116
117 static void
118 output_type_ref (struct output_block *ob, tree node)
119 {
120 streamer_write_record_start (ob, LTO_type_ref);
121 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
122 }
123
124
125 /* Return true if tree node T is written to various tables. For these
126 nodes, we sometimes want to write their phyiscal representation
127 (via lto_output_tree), and sometimes we need to emit an index
128 reference into a table (via lto_output_tree_ref). */
129
130 static bool
131 tree_is_indexable (tree t)
132 {
133 /* Parameters and return values of functions of variably modified types
134 must go to global stream, because they may be used in the type
135 definition. */
136 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
137 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
138 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
139 && !TREE_STATIC (t))
140 return false;
141 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
142 return false;
143 /* Variably modified types need to be streamed alongside function
144 bodies because they can refer to local entities. Together with
145 them we have to localize their members as well.
146 ??? In theory that includes non-FIELD_DECLs as well. */
147 else if (TYPE_P (t)
148 && variably_modified_type_p (t, NULL_TREE))
149 return false;
150 else if (TREE_CODE (t) == FIELD_DECL
151 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
152 return false;
153 else
154 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
155 }
156
157
158 /* Output info about new location into bitpack BP.
159 After outputting bitpack, lto_output_location_data has
160 to be done to output actual data. */
161
162 void
163 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
164 location_t loc)
165 {
166 expanded_location xloc;
167
168 loc = LOCATION_LOCUS (loc);
169 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
170 if (loc == UNKNOWN_LOCATION)
171 return;
172
173 xloc = expand_location (loc);
174
175 bp_pack_value (bp, ob->current_file != xloc.file, 1);
176 bp_pack_value (bp, ob->current_line != xloc.line, 1);
177 bp_pack_value (bp, ob->current_col != xloc.column, 1);
178
179 if (ob->current_file != xloc.file)
180 bp_pack_var_len_unsigned (bp,
181 streamer_string_index (ob, xloc.file,
182 strlen (xloc.file) + 1,
183 true));
184 ob->current_file = xloc.file;
185
186 if (ob->current_line != xloc.line)
187 bp_pack_var_len_unsigned (bp, xloc.line);
188 ob->current_line = xloc.line;
189
190 if (ob->current_col != xloc.column)
191 bp_pack_var_len_unsigned (bp, xloc.column);
192 ob->current_col = xloc.column;
193 }
194
195
196 /* If EXPR is an indexable tree node, output a reference to it to
197 output block OB. Otherwise, output the physical representation of
198 EXPR to OB. */
199
200 static void
201 lto_output_tree_ref (struct output_block *ob, tree expr)
202 {
203 enum tree_code code;
204
205 if (TYPE_P (expr))
206 {
207 output_type_ref (ob, expr);
208 return;
209 }
210
211 code = TREE_CODE (expr);
212 switch (code)
213 {
214 case SSA_NAME:
215 streamer_write_record_start (ob, LTO_ssa_name_ref);
216 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
217 break;
218
219 case FIELD_DECL:
220 streamer_write_record_start (ob, LTO_field_decl_ref);
221 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
222 break;
223
224 case FUNCTION_DECL:
225 streamer_write_record_start (ob, LTO_function_decl_ref);
226 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
227 break;
228
229 case VAR_DECL:
230 case DEBUG_EXPR_DECL:
231 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
232 case PARM_DECL:
233 streamer_write_record_start (ob, LTO_global_decl_ref);
234 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
235 break;
236
237 case CONST_DECL:
238 streamer_write_record_start (ob, LTO_const_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case IMPORTED_DECL:
243 gcc_assert (decl_function_context (expr) == NULL);
244 streamer_write_record_start (ob, LTO_imported_decl_ref);
245 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case TYPE_DECL:
249 streamer_write_record_start (ob, LTO_type_decl_ref);
250 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case NAMESPACE_DECL:
254 streamer_write_record_start (ob, LTO_namespace_decl_ref);
255 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case LABEL_DECL:
259 streamer_write_record_start (ob, LTO_label_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case RESULT_DECL:
264 streamer_write_record_start (ob, LTO_result_decl_ref);
265 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case TRANSLATION_UNIT_DECL:
269 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 default:
274 /* No other node is indexable, so it should have been handled by
275 lto_output_tree. */
276 gcc_unreachable ();
277 }
278 }
279
280
281 /* Return true if EXPR is a tree node that can be written to disk. */
282
283 static inline bool
284 lto_is_streamable (tree expr)
285 {
286 enum tree_code code = TREE_CODE (expr);
287
288 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
289 name version in lto_output_tree_ref (see output_ssa_names). */
290 return !is_lang_specific (expr)
291 && code != SSA_NAME
292 && code != CALL_EXPR
293 && code != LANG_TYPE
294 && code != MODIFY_EXPR
295 && code != INIT_EXPR
296 && code != TARGET_EXPR
297 && code != BIND_EXPR
298 && code != WITH_CLEANUP_EXPR
299 && code != STATEMENT_LIST
300 && code != OMP_CLAUSE
301 && (code == CASE_LABEL_EXPR
302 || code == DECL_EXPR
303 || TREE_CODE_CLASS (code) != tcc_statement);
304 }
305
306
307 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
308
309 static tree
310 get_symbol_initial_value (struct output_block *ob, tree expr)
311 {
312 gcc_checking_assert (DECL_P (expr)
313 && TREE_CODE (expr) != FUNCTION_DECL
314 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
315
316 /* Handle DECL_INITIAL for symbols. */
317 tree initial = DECL_INITIAL (expr);
318 if (TREE_CODE (expr) == VAR_DECL
319 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
320 && !DECL_IN_CONSTANT_POOL (expr)
321 && initial)
322 {
323 lto_symtab_encoder_t encoder;
324 struct varpool_node *vnode;
325
326 encoder = ob->decl_state->symtab_node_encoder;
327 vnode = varpool_get_node (expr);
328 if (!vnode
329 || !lto_symtab_encoder_encode_initializer_p (encoder,
330 vnode))
331 initial = error_mark_node;
332 }
333
334 return initial;
335 }
336
337
338 /* Write a physical representation of tree node EXPR to output block
339 OB. If REF_P is true, the leaves of EXPR are emitted as references
340 via lto_output_tree_ref. IX is the index into the streamer cache
341 where EXPR is stored. */
342
343 static void
344 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
345 {
346 /* Pack all the non-pointer fields in EXPR into a bitpack and write
347 the resulting bitpack. */
348 bitpack_d bp = bitpack_create (ob->main_stream);
349 streamer_pack_tree_bitfields (ob, &bp, expr);
350 streamer_write_bitpack (&bp);
351
352 /* Write all the pointer fields in EXPR. */
353 streamer_write_tree_body (ob, expr, ref_p);
354
355 /* Write any LTO-specific data to OB. */
356 if (DECL_P (expr)
357 && TREE_CODE (expr) != FUNCTION_DECL
358 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
359 {
360 /* Handle DECL_INITIAL for symbols. */
361 tree initial = get_symbol_initial_value (ob, expr);
362 stream_write_tree (ob, initial, ref_p);
363 }
364 }
365
366 /* Write a physical representation of tree node EXPR to output block
367 OB. If REF_P is true, the leaves of EXPR are emitted as references
368 via lto_output_tree_ref. IX is the index into the streamer cache
369 where EXPR is stored. */
370
371 static void
372 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
373 {
374 if (!lto_is_streamable (expr))
375 internal_error ("tree code %qs is not supported in LTO streams",
376 get_tree_code_name (TREE_CODE (expr)));
377
378 /* Write the header, containing everything needed to materialize
379 EXPR on the reading side. */
380 streamer_write_tree_header (ob, expr);
381
382 lto_write_tree_1 (ob, expr, ref_p);
383
384 /* Mark the end of EXPR. */
385 streamer_write_zero (ob);
386 }
387
388 /* Emit the physical representation of tree node EXPR to output block
389 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
390 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
391
392 static void
393 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
394 bool ref_p, bool this_ref_p)
395 {
396 unsigned ix;
397
398 gcc_checking_assert (expr != NULL_TREE
399 && !(this_ref_p && tree_is_indexable (expr)));
400
401 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
402 expr, hash, &ix);
403 gcc_assert (!exists_p);
404 if (streamer_handle_as_builtin_p (expr))
405 {
406 /* MD and NORMAL builtins do not need to be written out
407 completely as they are always instantiated by the
408 compiler on startup. The only builtins that need to
409 be written out are BUILT_IN_FRONTEND. For all other
410 builtins, we simply write the class and code. */
411 streamer_write_builtin (ob, expr);
412 }
413 else if (TREE_CODE (expr) == INTEGER_CST
414 && !TREE_OVERFLOW (expr))
415 {
416 /* Shared INTEGER_CST nodes are special because they need their
417 original type to be materialized by the reader (to implement
418 TYPE_CACHED_VALUES). */
419 streamer_write_integer_cst (ob, expr, ref_p);
420 }
421 else
422 {
423 /* This is the first time we see EXPR, write its fields
424 to OB. */
425 lto_write_tree (ob, expr, ref_p);
426 }
427 }
428
429 struct sccs
430 {
431 unsigned int dfsnum;
432 unsigned int low;
433 };
434
435 struct scc_entry
436 {
437 tree t;
438 hashval_t hash;
439 };
440
441 static unsigned int next_dfs_num;
442 static vec<scc_entry> sccstack;
443 static struct pointer_map_t *sccstate;
444 static struct obstack sccstate_obstack;
445
446 static void
447 DFS_write_tree (struct output_block *ob, sccs *from_state,
448 tree expr, bool ref_p, bool this_ref_p);
449
450 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
451 DFS recurse for all tree edges originating from it. */
452
453 static void
454 DFS_write_tree_body (struct output_block *ob,
455 tree expr, sccs *expr_state, bool ref_p)
456 {
457 #define DFS_follow_tree_edge(DEST) \
458 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
459
460 enum tree_code code;
461
462 code = TREE_CODE (expr);
463
464 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
465 {
466 if (TREE_CODE (expr) != IDENTIFIER_NODE)
467 DFS_follow_tree_edge (TREE_TYPE (expr));
468 }
469
470 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
471 {
472 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
473 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
474 }
475
476 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
477 {
478 DFS_follow_tree_edge (TREE_REALPART (expr));
479 DFS_follow_tree_edge (TREE_IMAGPART (expr));
480 }
481
482 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
483 {
484 /* Drop names that were created for anonymous entities. */
485 if (DECL_NAME (expr)
486 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
487 && ANON_AGGRNAME_P (DECL_NAME (expr)))
488 ;
489 else
490 DFS_follow_tree_edge (DECL_NAME (expr));
491 DFS_follow_tree_edge (DECL_CONTEXT (expr));
492 }
493
494 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
495 {
496 DFS_follow_tree_edge (DECL_SIZE (expr));
497 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
498
499 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
500 special handling in LTO, it must be handled by streamer hooks. */
501
502 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
503
504 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
505 for early inlining so drop it on the floor instead of ICEing in
506 dwarf2out.c. */
507
508 if ((TREE_CODE (expr) == VAR_DECL
509 || TREE_CODE (expr) == PARM_DECL)
510 && DECL_HAS_VALUE_EXPR_P (expr))
511 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
512 if (TREE_CODE (expr) == VAR_DECL)
513 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
514 }
515
516 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
517 {
518 if (TREE_CODE (expr) == TYPE_DECL)
519 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
520 DFS_follow_tree_edge (DECL_VINDEX (expr));
521 }
522
523 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
524 {
525 /* Make sure we don't inadvertently set the assembler name. */
526 if (DECL_ASSEMBLER_NAME_SET_P (expr))
527 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
528 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
529 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
533 {
534 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
535 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
536 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
537 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
538 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
539 }
540
541 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
542 {
543 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
544 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
545 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
546 }
547
548 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
549 {
550 DFS_follow_tree_edge (TYPE_SIZE (expr));
551 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
552 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
553 DFS_follow_tree_edge (TYPE_NAME (expr));
554 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
555 reconstructed during fixup. */
556 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
557 during fixup. */
558 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
559 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
560 /* TYPE_CANONICAL is re-computed during type merging, so no need
561 to follow it here. */
562 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
563 }
564
565 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
566 {
567 if (TREE_CODE (expr) == ENUMERAL_TYPE)
568 DFS_follow_tree_edge (TYPE_VALUES (expr));
569 else if (TREE_CODE (expr) == ARRAY_TYPE)
570 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
571 else if (RECORD_OR_UNION_TYPE_P (expr))
572 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
573 DFS_follow_tree_edge (t);
574 else if (TREE_CODE (expr) == FUNCTION_TYPE
575 || TREE_CODE (expr) == METHOD_TYPE)
576 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
577
578 if (!POINTER_TYPE_P (expr))
579 DFS_follow_tree_edge (TYPE_MINVAL (expr));
580 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
581 if (RECORD_OR_UNION_TYPE_P (expr))
582 DFS_follow_tree_edge (TYPE_BINFO (expr));
583 }
584
585 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
586 {
587 DFS_follow_tree_edge (TREE_PURPOSE (expr));
588 DFS_follow_tree_edge (TREE_VALUE (expr));
589 DFS_follow_tree_edge (TREE_CHAIN (expr));
590 }
591
592 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
593 {
594 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
595 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
596 }
597
598 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
599 {
600 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
601 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
602 DFS_follow_tree_edge (TREE_BLOCK (expr));
603 }
604
605 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
606 {
607 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
608 /* ??? FIXME. See also streamer_write_chain. */
609 if (!(VAR_OR_FUNCTION_DECL_P (t)
610 && DECL_EXTERNAL (t)))
611 DFS_follow_tree_edge (t);
612
613 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
614
615 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
616 handle - those that represent inlined function scopes.
617 For the drop rest them on the floor instead of ICEing
618 in dwarf2out.c. */
619 if (inlined_function_outer_scope_p (expr))
620 {
621 tree ultimate_origin = block_ultimate_origin (expr);
622 DFS_follow_tree_edge (ultimate_origin);
623 }
624 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
625 information for early inlined BLOCKs so drop it on the floor instead
626 of ICEing in dwarf2out.c. */
627
628 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
629 streaming time. */
630
631 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
632 list is re-constructed from BLOCK_SUPERCONTEXT. */
633 }
634
635 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
636 {
637 unsigned i;
638 tree t;
639
640 /* Note that the number of BINFO slots has already been emitted in
641 EXPR's header (see streamer_write_tree_header) because this length
642 is needed to build the empty BINFO node on the reader side. */
643 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
644 DFS_follow_tree_edge (t);
645 DFS_follow_tree_edge (BINFO_OFFSET (expr));
646 DFS_follow_tree_edge (BINFO_VTABLE (expr));
647 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
648
649 /* The number of BINFO_BASE_ACCESSES has already been emitted in
650 EXPR's bitfield section. */
651 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
652 DFS_follow_tree_edge (t);
653
654 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
655 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
656 }
657
658 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
659 {
660 unsigned i;
661 tree index, value;
662
663 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
664 {
665 DFS_follow_tree_edge (index);
666 DFS_follow_tree_edge (value);
667 }
668 }
669
670 #undef DFS_follow_tree_edge
671 }
672
673 /* Return a hash value for the tree T. */
674
675 static hashval_t
676 hash_tree (struct streamer_tree_cache_d *cache, tree t)
677 {
678 #define visit(SIBLING) \
679 do { \
680 unsigned ix; \
681 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
682 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
683 } while (0)
684
685 /* Hash TS_BASE. */
686 enum tree_code code = TREE_CODE (t);
687 hashval_t v = iterative_hash_host_wide_int (code, 0);
688 if (!TYPE_P (t))
689 {
690 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
691 | (TREE_CONSTANT (t) << 1)
692 | (TREE_READONLY (t) << 2)
693 | (TREE_PUBLIC (t) << 3), v);
694 }
695 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
696 | (TREE_THIS_VOLATILE (t) << 1), v);
697 if (DECL_P (t))
698 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
699 else if (TYPE_P (t))
700 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
701 if (TYPE_P (t))
702 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
703 else
704 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
705 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
706 | (TREE_STATIC (t) << 1)
707 | (TREE_PROTECTED (t) << 2)
708 | (TREE_DEPRECATED (t) << 3), v);
709 if (code != TREE_BINFO)
710 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
711 if (TYPE_P (t))
712 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
713 | (TYPE_ADDR_SPACE (t) << 1), v);
714 else if (code == SSA_NAME)
715 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
716
717 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
718 {
719 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
720 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
721 }
722
723 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
724 {
725 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
726 v = iterative_hash_host_wide_int (r.cl, v);
727 v = iterative_hash_host_wide_int (r.decimal
728 | (r.sign << 1)
729 | (r.signalling << 2)
730 | (r.canonical << 3), v);
731 v = iterative_hash_host_wide_int (r.uexp, v);
732 for (unsigned i = 0; i < SIGSZ; ++i)
733 v = iterative_hash_host_wide_int (r.sig[i], v);
734 }
735
736 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
737 {
738 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
739 v = iterative_hash_host_wide_int (f.mode, v);
740 v = iterative_hash_host_wide_int (f.data.low, v);
741 v = iterative_hash_host_wide_int (f.data.high, v);
742 }
743
744 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
745 {
746 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
747 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
748 | (DECL_VIRTUAL_P (t) << 1)
749 | (DECL_IGNORED_P (t) << 2)
750 | (DECL_ABSTRACT (t) << 3)
751 | (DECL_ARTIFICIAL (t) << 4)
752 | (DECL_USER_ALIGN (t) << 5)
753 | (DECL_PRESERVE_P (t) << 6)
754 | (DECL_EXTERNAL (t) << 7)
755 | (DECL_GIMPLE_REG_P (t) << 8), v);
756 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
757 if (code == LABEL_DECL)
758 {
759 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
760 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
761 }
762 else if (code == FIELD_DECL)
763 {
764 v = iterative_hash_host_wide_int (DECL_PACKED (t)
765 | (DECL_NONADDRESSABLE_P (t) << 1),
766 v);
767 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
768 }
769 else if (code == VAR_DECL)
770 {
771 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
772 | (DECL_NONLOCAL_FRAME (t) << 1),
773 v);
774 }
775 if (code == RESULT_DECL
776 || code == PARM_DECL
777 || code == VAR_DECL)
778 {
779 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
780 if (code == VAR_DECL
781 || code == PARM_DECL)
782 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
783 }
784 }
785
786 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
787 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
788
789 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
790 {
791 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
792 | (DECL_DLLIMPORT_P (t) << 1)
793 | (DECL_WEAK (t) << 2)
794 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
795 | (DECL_COMDAT (t) << 4)
796 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
797 v);
798 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
799 if (code == VAR_DECL)
800 {
801 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
802 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
803 | (DECL_IN_CONSTANT_POOL (t) << 1),
804 v);
805 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
806 }
807 if (TREE_CODE (t) == FUNCTION_DECL)
808 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
809 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
810 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
811 v);
812 if (VAR_OR_FUNCTION_DECL_P (t))
813 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
814 }
815
816 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
817 {
818 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
819 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
820 | (DECL_STATIC_DESTRUCTOR (t) << 1)
821 | (DECL_UNINLINABLE (t) << 2)
822 | (DECL_POSSIBLY_INLINED (t) << 3)
823 | (DECL_IS_NOVOPS (t) << 4)
824 | (DECL_IS_RETURNS_TWICE (t) << 5)
825 | (DECL_IS_MALLOC (t) << 6)
826 | (DECL_IS_OPERATOR_NEW (t) << 7)
827 | (DECL_DECLARED_INLINE_P (t) << 8)
828 | (DECL_STATIC_CHAIN (t) << 9)
829 | (DECL_NO_INLINE_WARNING_P (t) << 10)
830 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
831 | (DECL_NO_LIMIT_STACK (t) << 12)
832 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
833 | (DECL_PURE_P (t) << 14)
834 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
835 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
836 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
837 if (DECL_STATIC_DESTRUCTOR (t))
838 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
839 }
840
841 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
842 {
843 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
844 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
845 | (TYPE_NO_FORCE_BLK (t) << 1)
846 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
847 | (TYPE_PACKED (t) << 3)
848 | (TYPE_RESTRICT (t) << 4)
849 | (TYPE_USER_ALIGN (t) << 5)
850 | (TYPE_READONLY (t) << 6), v);
851 if (RECORD_OR_UNION_TYPE_P (t))
852 {
853 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
854 | (TYPE_FINAL_P (t) << 1), v);
855 }
856 else if (code == ARRAY_TYPE)
857 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
858 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
859 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
860 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
861 || (!in_lto_p
862 && get_alias_set (t) == 0))
863 ? 0 : -1, v);
864 }
865
866 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
867 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
868 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
869
870 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
871 v = iterative_hash (t, sizeof (struct cl_target_option), v);
872
873 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
874 v = iterative_hash (t, sizeof (struct cl_optimization), v);
875
876 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
877 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
878
879 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
880 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
881
882 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
883 {
884 if (POINTER_TYPE_P (t))
885 {
886 /* For pointers factor in the pointed-to type recursively as
887 we cannot recurse through only pointers.
888 ??? We can generalize this by keeping track of the
889 in-SCC edges for each tree (or arbitrarily the first
890 such edge) and hashing that in in a second stage
891 (instead of the quadratic mixing of the SCC we do now). */
892 hashval_t x;
893 unsigned ix;
894 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
895 x = streamer_tree_cache_get_hash (cache, ix);
896 else
897 x = hash_tree (cache, TREE_TYPE (t));
898 v = iterative_hash_hashval_t (x, v);
899 }
900 else if (code != IDENTIFIER_NODE)
901 visit (TREE_TYPE (t));
902 }
903
904 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
905 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
906 visit (VECTOR_CST_ELT (t, i));
907
908 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
909 {
910 visit (TREE_REALPART (t));
911 visit (TREE_IMAGPART (t));
912 }
913
914 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
915 {
916 /* Drop names that were created for anonymous entities. */
917 if (DECL_NAME (t)
918 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
919 && ANON_AGGRNAME_P (DECL_NAME (t)))
920 ;
921 else
922 visit (DECL_NAME (t));
923 if (DECL_FILE_SCOPE_P (t))
924 ;
925 else
926 visit (DECL_CONTEXT (t));
927 }
928
929 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
930 {
931 visit (DECL_SIZE (t));
932 visit (DECL_SIZE_UNIT (t));
933 visit (DECL_ATTRIBUTES (t));
934 if ((code == VAR_DECL
935 || code == PARM_DECL)
936 && DECL_HAS_VALUE_EXPR_P (t))
937 visit (DECL_VALUE_EXPR (t));
938 if (code == VAR_DECL
939 && DECL_HAS_DEBUG_EXPR_P (t))
940 visit (DECL_DEBUG_EXPR (t));
941 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
942 be able to call get_symbol_initial_value. */
943 }
944
945 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
946 {
947 if (code == TYPE_DECL)
948 visit (DECL_ORIGINAL_TYPE (t));
949 visit (DECL_VINDEX (t));
950 }
951
952 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
953 {
954 if (DECL_ASSEMBLER_NAME_SET_P (t))
955 visit (DECL_ASSEMBLER_NAME (t));
956 visit (DECL_SECTION_NAME (t));
957 visit (DECL_COMDAT_GROUP (t));
958 }
959
960 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
961 {
962 visit (DECL_FIELD_OFFSET (t));
963 visit (DECL_BIT_FIELD_TYPE (t));
964 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
965 visit (DECL_FIELD_BIT_OFFSET (t));
966 visit (DECL_FCONTEXT (t));
967 }
968
969 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
970 {
971 visit (DECL_FUNCTION_PERSONALITY (t));
972 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
973 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
974 }
975
976 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
977 {
978 visit (TYPE_SIZE (t));
979 visit (TYPE_SIZE_UNIT (t));
980 visit (TYPE_ATTRIBUTES (t));
981 visit (TYPE_NAME (t));
982 visit (TYPE_MAIN_VARIANT (t));
983 if (TYPE_FILE_SCOPE_P (t))
984 ;
985 else
986 visit (TYPE_CONTEXT (t));
987 visit (TYPE_STUB_DECL (t));
988 }
989
990 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
991 {
992 if (code == ENUMERAL_TYPE)
993 visit (TYPE_VALUES (t));
994 else if (code == ARRAY_TYPE)
995 visit (TYPE_DOMAIN (t));
996 else if (RECORD_OR_UNION_TYPE_P (t))
997 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
998 visit (f);
999 else if (code == FUNCTION_TYPE
1000 || code == METHOD_TYPE)
1001 visit (TYPE_ARG_TYPES (t));
1002 if (!POINTER_TYPE_P (t))
1003 visit (TYPE_MINVAL (t));
1004 visit (TYPE_MAXVAL (t));
1005 if (RECORD_OR_UNION_TYPE_P (t))
1006 visit (TYPE_BINFO (t));
1007 }
1008
1009 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1010 {
1011 visit (TREE_PURPOSE (t));
1012 visit (TREE_VALUE (t));
1013 visit (TREE_CHAIN (t));
1014 }
1015
1016 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1017 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1018 visit (TREE_VEC_ELT (t, i));
1019
1020 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1021 {
1022 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1023 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1024 visit (TREE_OPERAND (t, i));
1025 }
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1028 {
1029 unsigned i;
1030 tree b;
1031 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1032 visit (b);
1033 visit (BINFO_OFFSET (t));
1034 visit (BINFO_VTABLE (t));
1035 visit (BINFO_VPTR_FIELD (t));
1036 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1037 visit (b);
1038 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1039 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1040 }
1041
1042 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1043 {
1044 unsigned i;
1045 tree index, value;
1046 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1047 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1048 {
1049 visit (index);
1050 visit (value);
1051 }
1052 }
1053
1054 return v;
1055
1056 #undef visit
1057 }
1058
1059 /* Compare two SCC entries by their hash value for qsorting them. */
1060
1061 static int
1062 scc_entry_compare (const void *p1_, const void *p2_)
1063 {
1064 const scc_entry *p1 = (const scc_entry *) p1_;
1065 const scc_entry *p2 = (const scc_entry *) p2_;
1066 if (p1->hash < p2->hash)
1067 return -1;
1068 else if (p1->hash > p2->hash)
1069 return 1;
1070 return 0;
1071 }
1072
1073 /* Return a hash value for the SCC on the SCC stack from FIRST with
1074 size SIZE. */
1075
1076 static hashval_t
1077 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1078 {
1079 /* Compute hash values for the SCC members. */
1080 for (unsigned i = 0; i < size; ++i)
1081 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1082
1083 if (size == 1)
1084 return sccstack[first].hash;
1085
1086 /* Sort the SCC of type, hash pairs so that when we mix in
1087 all members of the SCC the hash value becomes independent on
1088 the order we visited the SCC. Disregard hashes equal to
1089 the hash of the tree we mix into because we cannot guarantee
1090 a stable sort for those across different TUs. */
1091 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1092 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1093 for (unsigned i = 0; i < size; ++i)
1094 {
1095 hashval_t hash = sccstack[first+i].hash;
1096 hashval_t orig_hash = hash;
1097 unsigned j;
1098 /* Skip same hashes. */
1099 for (j = i + 1;
1100 j < size && sccstack[first+j].hash == orig_hash; ++j)
1101 ;
1102 for (; j < size; ++j)
1103 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1104 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1105 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1106 tem[i] = hash;
1107 }
1108 hashval_t scc_hash = 0;
1109 for (unsigned i = 0; i < size; ++i)
1110 {
1111 sccstack[first+i].hash = tem[i];
1112 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1113 }
1114 return scc_hash;
1115 }
1116
1117 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1118 already in the streamer cache. Main routine called for
1119 each visit of EXPR. */
1120
1121 static void
1122 DFS_write_tree (struct output_block *ob, sccs *from_state,
1123 tree expr, bool ref_p, bool this_ref_p)
1124 {
1125 unsigned ix;
1126 sccs **slot;
1127
1128 /* Handle special cases. */
1129 if (expr == NULL_TREE)
1130 return;
1131
1132 /* Do not DFS walk into indexable trees. */
1133 if (this_ref_p && tree_is_indexable (expr))
1134 return;
1135
1136 /* Check if we already streamed EXPR. */
1137 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1138 return;
1139
1140 slot = (sccs **)pointer_map_insert (sccstate, expr);
1141 sccs *cstate = *slot;
1142 if (!cstate)
1143 {
1144 scc_entry e = { expr, 0 };
1145 /* Not yet visited. DFS recurse and push it onto the stack. */
1146 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1147 sccstack.safe_push (e);
1148 cstate->dfsnum = next_dfs_num++;
1149 cstate->low = cstate->dfsnum;
1150
1151 if (streamer_handle_as_builtin_p (expr))
1152 ;
1153 else if (TREE_CODE (expr) == INTEGER_CST
1154 && !TREE_OVERFLOW (expr))
1155 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1156 else
1157 {
1158 DFS_write_tree_body (ob, expr, cstate, ref_p);
1159
1160 /* Walk any LTO-specific edges. */
1161 if (DECL_P (expr)
1162 && TREE_CODE (expr) != FUNCTION_DECL
1163 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1164 {
1165 /* Handle DECL_INITIAL for symbols. */
1166 tree initial = get_symbol_initial_value (ob, expr);
1167 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1168 }
1169 }
1170
1171 /* See if we found an SCC. */
1172 if (cstate->low == cstate->dfsnum)
1173 {
1174 unsigned first, size;
1175 tree x;
1176
1177 /* Pop the SCC and compute its size. */
1178 first = sccstack.length ();
1179 do
1180 {
1181 x = sccstack[--first].t;
1182 }
1183 while (x != expr);
1184 size = sccstack.length () - first;
1185
1186 /* No need to compute hashes for LTRANS units, we don't perform
1187 any merging there. */
1188 hashval_t scc_hash = 0;
1189 unsigned scc_entry_len = 0;
1190 if (!flag_wpa)
1191 {
1192 scc_hash = hash_scc (ob->writer_cache, first, size);
1193
1194 /* Put the entries with the least number of collisions first. */
1195 unsigned entry_start = 0;
1196 scc_entry_len = size + 1;
1197 for (unsigned i = 0; i < size;)
1198 {
1199 unsigned from = i;
1200 for (i = i + 1; i < size
1201 && (sccstack[first + i].hash
1202 == sccstack[first + from].hash); ++i)
1203 ;
1204 if (i - from < scc_entry_len)
1205 {
1206 scc_entry_len = i - from;
1207 entry_start = from;
1208 }
1209 }
1210 for (unsigned i = 0; i < scc_entry_len; ++i)
1211 {
1212 scc_entry tem = sccstack[first + i];
1213 sccstack[first + i] = sccstack[first + entry_start + i];
1214 sccstack[first + entry_start + i] = tem;
1215 }
1216 }
1217
1218 /* Write LTO_tree_scc. */
1219 streamer_write_record_start (ob, LTO_tree_scc);
1220 streamer_write_uhwi (ob, size);
1221 streamer_write_uhwi (ob, scc_hash);
1222
1223 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1224 All INTEGER_CSTs need to be handled this way as we need
1225 their type to materialize them. Also builtins are handled
1226 this way.
1227 ??? We still wrap these in LTO_tree_scc so at the
1228 input side we can properly identify the tree we want
1229 to ultimatively return. */
1230 size_t old_len = ob->writer_cache->nodes.length ();
1231 if (size == 1)
1232 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1233 else
1234 {
1235 /* Write the size of the SCC entry candidates. */
1236 streamer_write_uhwi (ob, scc_entry_len);
1237
1238 /* Write all headers and populate the streamer cache. */
1239 for (unsigned i = 0; i < size; ++i)
1240 {
1241 hashval_t hash = sccstack[first+i].hash;
1242 tree t = sccstack[first+i].t;
1243 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1244 t, hash, &ix);
1245 gcc_assert (!exists_p);
1246
1247 if (!lto_is_streamable (t))
1248 internal_error ("tree code %qs is not supported "
1249 "in LTO streams",
1250 get_tree_code_name (TREE_CODE (t)));
1251
1252 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1253
1254 /* Write the header, containing everything needed to
1255 materialize EXPR on the reading side. */
1256 streamer_write_tree_header (ob, t);
1257 }
1258
1259 /* Write the bitpacks and tree references. */
1260 for (unsigned i = 0; i < size; ++i)
1261 {
1262 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1263
1264 /* Mark the end of the tree. */
1265 streamer_write_zero (ob);
1266 }
1267 }
1268 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1269
1270 /* Finally truncate the vector. */
1271 sccstack.truncate (first);
1272
1273 if (from_state)
1274 from_state->low = MIN (from_state->low, cstate->low);
1275 return;
1276 }
1277
1278 if (from_state)
1279 from_state->low = MIN (from_state->low, cstate->low);
1280 }
1281 gcc_checking_assert (from_state);
1282 if (cstate->dfsnum < from_state->dfsnum)
1283 from_state->low = MIN (cstate->dfsnum, from_state->low);
1284 }
1285
1286
1287 /* Emit the physical representation of tree node EXPR to output block
1288 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1289 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1290
1291 void
1292 lto_output_tree (struct output_block *ob, tree expr,
1293 bool ref_p, bool this_ref_p)
1294 {
1295 unsigned ix;
1296 bool existed_p;
1297
1298 if (expr == NULL_TREE)
1299 {
1300 streamer_write_record_start (ob, LTO_null);
1301 return;
1302 }
1303
1304 if (this_ref_p && tree_is_indexable (expr))
1305 {
1306 lto_output_tree_ref (ob, expr);
1307 return;
1308 }
1309
1310 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1311 if (existed_p)
1312 {
1313 /* If a node has already been streamed out, make sure that
1314 we don't write it more than once. Otherwise, the reader
1315 will instantiate two different nodes for the same object. */
1316 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1317 streamer_write_uhwi (ob, ix);
1318 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1319 lto_tree_code_to_tag (TREE_CODE (expr)));
1320 lto_stats.num_pickle_refs_output++;
1321 }
1322 else
1323 {
1324 /* This is the first time we see EXPR, write all reachable
1325 trees to OB. */
1326 static bool in_dfs_walk;
1327
1328 /* Protect against recursion which means disconnect between
1329 what tree edges we walk in the DFS walk and what edges
1330 we stream out. */
1331 gcc_assert (!in_dfs_walk);
1332
1333 /* Start the DFS walk. */
1334 /* Save ob state ... */
1335 /* let's see ... */
1336 in_dfs_walk = true;
1337 sccstate = pointer_map_create ();
1338 gcc_obstack_init (&sccstate_obstack);
1339 next_dfs_num = 1;
1340 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1341 sccstack.release ();
1342 pointer_map_destroy (sccstate);
1343 obstack_free (&sccstate_obstack, NULL);
1344 in_dfs_walk = false;
1345
1346 /* Finally append a reference to the tree we were writing.
1347 ??? If expr ended up as a singleton we could have
1348 inlined it here and avoid outputting a reference. */
1349 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1350 gcc_assert (existed_p);
1351 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1352 streamer_write_uhwi (ob, ix);
1353 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1354 lto_tree_code_to_tag (TREE_CODE (expr)));
1355 lto_stats.num_pickle_refs_output++;
1356 }
1357 }
1358
1359
1360 /* Output to OB a list of try/catch handlers starting with FIRST. */
1361
1362 static void
1363 output_eh_try_list (struct output_block *ob, eh_catch first)
1364 {
1365 eh_catch n;
1366
1367 for (n = first; n; n = n->next_catch)
1368 {
1369 streamer_write_record_start (ob, LTO_eh_catch);
1370 stream_write_tree (ob, n->type_list, true);
1371 stream_write_tree (ob, n->filter_list, true);
1372 stream_write_tree (ob, n->label, true);
1373 }
1374
1375 streamer_write_record_start (ob, LTO_null);
1376 }
1377
1378
1379 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1380 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1381 detect EH region sharing. */
1382
1383 static void
1384 output_eh_region (struct output_block *ob, eh_region r)
1385 {
1386 enum LTO_tags tag;
1387
1388 if (r == NULL)
1389 {
1390 streamer_write_record_start (ob, LTO_null);
1391 return;
1392 }
1393
1394 if (r->type == ERT_CLEANUP)
1395 tag = LTO_ert_cleanup;
1396 else if (r->type == ERT_TRY)
1397 tag = LTO_ert_try;
1398 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1399 tag = LTO_ert_allowed_exceptions;
1400 else if (r->type == ERT_MUST_NOT_THROW)
1401 tag = LTO_ert_must_not_throw;
1402 else
1403 gcc_unreachable ();
1404
1405 streamer_write_record_start (ob, tag);
1406 streamer_write_hwi (ob, r->index);
1407
1408 if (r->outer)
1409 streamer_write_hwi (ob, r->outer->index);
1410 else
1411 streamer_write_zero (ob);
1412
1413 if (r->inner)
1414 streamer_write_hwi (ob, r->inner->index);
1415 else
1416 streamer_write_zero (ob);
1417
1418 if (r->next_peer)
1419 streamer_write_hwi (ob, r->next_peer->index);
1420 else
1421 streamer_write_zero (ob);
1422
1423 if (r->type == ERT_TRY)
1424 {
1425 output_eh_try_list (ob, r->u.eh_try.first_catch);
1426 }
1427 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1428 {
1429 stream_write_tree (ob, r->u.allowed.type_list, true);
1430 stream_write_tree (ob, r->u.allowed.label, true);
1431 streamer_write_uhwi (ob, r->u.allowed.filter);
1432 }
1433 else if (r->type == ERT_MUST_NOT_THROW)
1434 {
1435 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1436 bitpack_d bp = bitpack_create (ob->main_stream);
1437 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1438 streamer_write_bitpack (&bp);
1439 }
1440
1441 if (r->landing_pads)
1442 streamer_write_hwi (ob, r->landing_pads->index);
1443 else
1444 streamer_write_zero (ob);
1445 }
1446
1447
1448 /* Output landing pad LP to OB. */
1449
1450 static void
1451 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1452 {
1453 if (lp == NULL)
1454 {
1455 streamer_write_record_start (ob, LTO_null);
1456 return;
1457 }
1458
1459 streamer_write_record_start (ob, LTO_eh_landing_pad);
1460 streamer_write_hwi (ob, lp->index);
1461 if (lp->next_lp)
1462 streamer_write_hwi (ob, lp->next_lp->index);
1463 else
1464 streamer_write_zero (ob);
1465
1466 if (lp->region)
1467 streamer_write_hwi (ob, lp->region->index);
1468 else
1469 streamer_write_zero (ob);
1470
1471 stream_write_tree (ob, lp->post_landing_pad, true);
1472 }
1473
1474
1475 /* Output the existing eh_table to OB. */
1476
1477 static void
1478 output_eh_regions (struct output_block *ob, struct function *fn)
1479 {
1480 if (fn->eh && fn->eh->region_tree)
1481 {
1482 unsigned i;
1483 eh_region eh;
1484 eh_landing_pad lp;
1485 tree ttype;
1486
1487 streamer_write_record_start (ob, LTO_eh_table);
1488
1489 /* Emit the index of the root of the EH region tree. */
1490 streamer_write_hwi (ob, fn->eh->region_tree->index);
1491
1492 /* Emit all the EH regions in the region array. */
1493 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1494 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1495 output_eh_region (ob, eh);
1496
1497 /* Emit all landing pads. */
1498 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1499 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1500 output_eh_lp (ob, lp);
1501
1502 /* Emit all the runtime type data. */
1503 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1504 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1505 stream_write_tree (ob, ttype, true);
1506
1507 /* Emit the table of action chains. */
1508 if (targetm.arm_eabi_unwinder)
1509 {
1510 tree t;
1511 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1512 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1513 stream_write_tree (ob, t, true);
1514 }
1515 else
1516 {
1517 uchar c;
1518 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1519 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1520 streamer_write_char_stream (ob->main_stream, c);
1521 }
1522 }
1523
1524 /* The LTO_null either terminates the record or indicates that there
1525 are no eh_records at all. */
1526 streamer_write_record_start (ob, LTO_null);
1527 }
1528
1529
1530 /* Output all of the active ssa names to the ssa_names stream. */
1531
1532 static void
1533 output_ssa_names (struct output_block *ob, struct function *fn)
1534 {
1535 unsigned int i, len;
1536
1537 len = vec_safe_length (SSANAMES (fn));
1538 streamer_write_uhwi (ob, len);
1539
1540 for (i = 1; i < len; i++)
1541 {
1542 tree ptr = (*SSANAMES (fn))[i];
1543
1544 if (ptr == NULL_TREE
1545 || SSA_NAME_IN_FREE_LIST (ptr)
1546 || virtual_operand_p (ptr))
1547 continue;
1548
1549 streamer_write_uhwi (ob, i);
1550 streamer_write_char_stream (ob->main_stream,
1551 SSA_NAME_IS_DEFAULT_DEF (ptr));
1552 if (SSA_NAME_VAR (ptr))
1553 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1554 else
1555 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1556 stream_write_tree (ob, TREE_TYPE (ptr), true);
1557 }
1558
1559 streamer_write_zero (ob);
1560 }
1561
1562
1563 /* Output the cfg. */
1564
1565 static void
1566 output_cfg (struct output_block *ob, struct function *fn)
1567 {
1568 struct lto_output_stream *tmp_stream = ob->main_stream;
1569 basic_block bb;
1570
1571 ob->main_stream = ob->cfg_stream;
1572
1573 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1574 profile_status_for_function (fn));
1575
1576 /* Output the number of the highest basic block. */
1577 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1578
1579 FOR_ALL_BB_FN (bb, fn)
1580 {
1581 edge_iterator ei;
1582 edge e;
1583
1584 streamer_write_hwi (ob, bb->index);
1585
1586 /* Output the successors and the edge flags. */
1587 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1588 FOR_EACH_EDGE (e, ei, bb->succs)
1589 {
1590 streamer_write_uhwi (ob, e->dest->index);
1591 streamer_write_hwi (ob, e->probability);
1592 streamer_write_gcov_count (ob, e->count);
1593 streamer_write_uhwi (ob, e->flags);
1594 }
1595 }
1596
1597 streamer_write_hwi (ob, -1);
1598
1599 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1600 while (bb->next_bb)
1601 {
1602 streamer_write_hwi (ob, bb->next_bb->index);
1603 bb = bb->next_bb;
1604 }
1605
1606 streamer_write_hwi (ob, -1);
1607
1608 /* ??? The cfgloop interface is tied to cfun. */
1609 gcc_assert (cfun == fn);
1610
1611 /* Output the number of loops. */
1612 streamer_write_uhwi (ob, number_of_loops (fn));
1613
1614 /* Output each loop, skipping the tree root which has number zero. */
1615 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1616 {
1617 struct loop *loop = get_loop (fn, i);
1618
1619 /* Write the index of the loop header. That's enough to rebuild
1620 the loop tree on the reader side. Stream -1 for an unused
1621 loop entry. */
1622 if (!loop)
1623 {
1624 streamer_write_hwi (ob, -1);
1625 continue;
1626 }
1627 else
1628 streamer_write_hwi (ob, loop->header->index);
1629
1630 /* Write everything copy_loop_info copies. */
1631 streamer_write_enum (ob->main_stream,
1632 loop_estimation, EST_LAST, loop->estimate_state);
1633 streamer_write_hwi (ob, loop->any_upper_bound);
1634 if (loop->any_upper_bound)
1635 {
1636 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1637 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1638 }
1639 streamer_write_hwi (ob, loop->any_estimate);
1640 if (loop->any_estimate)
1641 {
1642 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1643 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1644 }
1645 }
1646
1647 ob->main_stream = tmp_stream;
1648 }
1649
1650
1651 /* Create the header in the file using OB. If the section type is for
1652 a function, set FN to the decl for that function. */
1653
1654 void
1655 produce_asm (struct output_block *ob, tree fn)
1656 {
1657 enum lto_section_type section_type = ob->section_type;
1658 struct lto_function_header header;
1659 char *section_name;
1660 struct lto_output_stream *header_stream;
1661
1662 if (section_type == LTO_section_function_body)
1663 {
1664 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1665 section_name = lto_get_section_name (section_type, name, NULL);
1666 }
1667 else
1668 section_name = lto_get_section_name (section_type, NULL, NULL);
1669
1670 lto_begin_section (section_name, !flag_wpa);
1671 free (section_name);
1672
1673 /* The entire header is stream computed here. */
1674 memset (&header, 0, sizeof (struct lto_function_header));
1675
1676 /* Write the header. */
1677 header.lto_header.major_version = LTO_major_version;
1678 header.lto_header.minor_version = LTO_minor_version;
1679
1680 header.compressed_size = 0;
1681
1682 if (section_type == LTO_section_function_body)
1683 header.cfg_size = ob->cfg_stream->total_size;
1684 header.main_size = ob->main_stream->total_size;
1685 header.string_size = ob->string_stream->total_size;
1686
1687 header_stream = XCNEW (struct lto_output_stream);
1688 lto_output_data_stream (header_stream, &header, sizeof header);
1689 lto_write_stream (header_stream);
1690 free (header_stream);
1691
1692 /* Put all of the gimple and the string table out the asm file as a
1693 block of text. */
1694 if (section_type == LTO_section_function_body)
1695 lto_write_stream (ob->cfg_stream);
1696 lto_write_stream (ob->main_stream);
1697 lto_write_stream (ob->string_stream);
1698
1699 lto_end_section ();
1700 }
1701
1702
1703 /* Output the base body of struct function FN using output block OB. */
1704
1705 static void
1706 output_struct_function_base (struct output_block *ob, struct function *fn)
1707 {
1708 struct bitpack_d bp;
1709 unsigned i;
1710 tree t;
1711
1712 /* Output the static chain and non-local goto save area. */
1713 stream_write_tree (ob, fn->static_chain_decl, true);
1714 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1715
1716 /* Output all the local variables in the function. */
1717 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1718 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1719 stream_write_tree (ob, t, true);
1720
1721 /* Output current IL state of the function. */
1722 streamer_write_uhwi (ob, fn->curr_properties);
1723
1724 /* Write all the attributes for FN. */
1725 bp = bitpack_create (ob->main_stream);
1726 bp_pack_value (&bp, fn->is_thunk, 1);
1727 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1728 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1729 bp_pack_value (&bp, fn->returns_struct, 1);
1730 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1731 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1732 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1733 bp_pack_value (&bp, fn->after_inlining, 1);
1734 bp_pack_value (&bp, fn->stdarg, 1);
1735 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1736 bp_pack_value (&bp, fn->calls_alloca, 1);
1737 bp_pack_value (&bp, fn->calls_setjmp, 1);
1738 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1739 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1740
1741 /* Output the function start and end loci. */
1742 stream_output_location (ob, &bp, fn->function_start_locus);
1743 stream_output_location (ob, &bp, fn->function_end_locus);
1744
1745 streamer_write_bitpack (&bp);
1746 }
1747
1748
1749 /* Output the body of function NODE->DECL. */
1750
1751 static void
1752 output_function (struct cgraph_node *node)
1753 {
1754 tree function;
1755 struct function *fn;
1756 basic_block bb;
1757 struct output_block *ob;
1758
1759 function = node->decl;
1760 fn = DECL_STRUCT_FUNCTION (function);
1761 ob = create_output_block (LTO_section_function_body);
1762
1763 clear_line_info (ob);
1764 ob->cgraph_node = node;
1765
1766 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1767
1768 /* Set current_function_decl and cfun. */
1769 push_cfun (fn);
1770
1771 /* Make string 0 be a NULL string. */
1772 streamer_write_char_stream (ob->string_stream, 0);
1773
1774 streamer_write_record_start (ob, LTO_function);
1775
1776 /* Output decls for parameters and args. */
1777 stream_write_tree (ob, DECL_RESULT (function), true);
1778 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1779
1780 /* Output DECL_INITIAL for the function, which contains the tree of
1781 lexical scopes. */
1782 stream_write_tree (ob, DECL_INITIAL (function), true);
1783
1784 /* We also stream abstract functions where we stream only stuff needed for
1785 debug info. */
1786 if (gimple_has_body_p (function))
1787 {
1788 streamer_write_uhwi (ob, 1);
1789 output_struct_function_base (ob, fn);
1790
1791 /* Output all the SSA names used in the function. */
1792 output_ssa_names (ob, fn);
1793
1794 /* Output any exception handling regions. */
1795 output_eh_regions (ob, fn);
1796
1797
1798 /* We will renumber the statements. The code that does this uses
1799 the same ordering that we use for serializing them so we can use
1800 the same code on the other end and not have to write out the
1801 statement numbers. We do not assign UIDs to PHIs here because
1802 virtual PHIs get re-computed on-the-fly which would make numbers
1803 inconsistent. */
1804 set_gimple_stmt_max_uid (cfun, 0);
1805 FOR_ALL_BB (bb)
1806 {
1807 gimple_stmt_iterator gsi;
1808 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1809 {
1810 gimple stmt = gsi_stmt (gsi);
1811
1812 /* Virtual PHIs are not going to be streamed. */
1813 if (!virtual_operand_p (gimple_phi_result (stmt)))
1814 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1815 }
1816 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1817 {
1818 gimple stmt = gsi_stmt (gsi);
1819 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1820 }
1821 }
1822 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1823 virtual phis now. */
1824 FOR_ALL_BB (bb)
1825 {
1826 gimple_stmt_iterator gsi;
1827 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1828 {
1829 gimple stmt = gsi_stmt (gsi);
1830 if (virtual_operand_p (gimple_phi_result (stmt)))
1831 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1832 }
1833 }
1834
1835 /* Output the code for the function. */
1836 FOR_ALL_BB_FN (bb, fn)
1837 output_bb (ob, bb, fn);
1838
1839 /* The terminator for this function. */
1840 streamer_write_record_start (ob, LTO_null);
1841
1842 output_cfg (ob, fn);
1843
1844 pop_cfun ();
1845 }
1846 else
1847 streamer_write_uhwi (ob, 0);
1848
1849 /* Create a section to hold the pickled output of this function. */
1850 produce_asm (ob, function);
1851
1852 destroy_output_block (ob);
1853 }
1854
1855
1856 /* Emit toplevel asms. */
1857
1858 void
1859 lto_output_toplevel_asms (void)
1860 {
1861 struct output_block *ob;
1862 struct asm_node *can;
1863 char *section_name;
1864 struct lto_output_stream *header_stream;
1865 struct lto_asm_header header;
1866
1867 if (! asm_nodes)
1868 return;
1869
1870 ob = create_output_block (LTO_section_asm);
1871
1872 /* Make string 0 be a NULL string. */
1873 streamer_write_char_stream (ob->string_stream, 0);
1874
1875 for (can = asm_nodes; can; can = can->next)
1876 {
1877 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1878 streamer_write_hwi (ob, can->order);
1879 }
1880
1881 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1882
1883 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1884 lto_begin_section (section_name, !flag_wpa);
1885 free (section_name);
1886
1887 /* The entire header stream is computed here. */
1888 memset (&header, 0, sizeof (header));
1889
1890 /* Write the header. */
1891 header.lto_header.major_version = LTO_major_version;
1892 header.lto_header.minor_version = LTO_minor_version;
1893
1894 header.main_size = ob->main_stream->total_size;
1895 header.string_size = ob->string_stream->total_size;
1896
1897 header_stream = XCNEW (struct lto_output_stream);
1898 lto_output_data_stream (header_stream, &header, sizeof (header));
1899 lto_write_stream (header_stream);
1900 free (header_stream);
1901
1902 /* Put all of the gimple and the string table out the asm file as a
1903 block of text. */
1904 lto_write_stream (ob->main_stream);
1905 lto_write_stream (ob->string_stream);
1906
1907 lto_end_section ();
1908
1909 destroy_output_block (ob);
1910 }
1911
1912
1913 /* Copy the function body of NODE without deserializing. */
1914
1915 static void
1916 copy_function (struct cgraph_node *node)
1917 {
1918 tree function = node->decl;
1919 struct lto_file_decl_data *file_data = node->lto_file_data;
1920 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1921 const char *data;
1922 size_t len;
1923 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1924 char *section_name =
1925 lto_get_section_name (LTO_section_function_body, name, NULL);
1926 size_t i, j;
1927 struct lto_in_decl_state *in_state;
1928 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1929
1930 lto_begin_section (section_name, !flag_wpa);
1931 free (section_name);
1932
1933 /* We may have renamed the declaration, e.g., a static function. */
1934 name = lto_get_decl_name_mapping (file_data, name);
1935
1936 data = lto_get_section_data (file_data, LTO_section_function_body,
1937 name, &len);
1938 gcc_assert (data);
1939
1940 /* Do a bit copy of the function body. */
1941 lto_output_data_stream (output_stream, data, len);
1942 lto_write_stream (output_stream);
1943
1944 /* Copy decls. */
1945 in_state =
1946 lto_get_function_in_decl_state (node->lto_file_data, function);
1947 gcc_assert (in_state);
1948
1949 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1950 {
1951 size_t n = in_state->streams[i].size;
1952 tree *trees = in_state->streams[i].trees;
1953 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1954
1955 /* The out state must have the same indices and the in state.
1956 So just copy the vector. All the encoders in the in state
1957 must be empty where we reach here. */
1958 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1959 encoder->trees.reserve_exact (n);
1960 for (j = 0; j < n; j++)
1961 encoder->trees.safe_push (trees[j]);
1962 }
1963
1964 lto_free_section_data (file_data, LTO_section_function_body, name,
1965 data, len);
1966 free (output_stream);
1967 lto_end_section ();
1968 }
1969
1970
1971 /* Main entry point from the pass manager. */
1972
1973 void
1974 lto_output (void)
1975 {
1976 struct lto_out_decl_state *decl_state;
1977 #ifdef ENABLE_CHECKING
1978 bitmap output = lto_bitmap_alloc ();
1979 #endif
1980 int i, n_nodes;
1981 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1982
1983 /* Initialize the streamer. */
1984 lto_streamer_init ();
1985
1986 n_nodes = lto_symtab_encoder_size (encoder);
1987 /* Process only the functions with bodies. */
1988 for (i = 0; i < n_nodes; i++)
1989 {
1990 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1991 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1992 if (node
1993 && lto_symtab_encoder_encode_body_p (encoder, node)
1994 && !node->alias)
1995 {
1996 #ifdef ENABLE_CHECKING
1997 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
1998 bitmap_set_bit (output, DECL_UID (node->decl));
1999 #endif
2000 decl_state = lto_new_out_decl_state ();
2001 lto_push_out_decl_state (decl_state);
2002 if (gimple_has_body_p (node->decl) || !flag_wpa)
2003 output_function (node);
2004 else
2005 copy_function (node);
2006 gcc_assert (lto_get_out_decl_state () == decl_state);
2007 lto_pop_out_decl_state ();
2008 lto_record_function_out_decl_state (node->decl, decl_state);
2009 }
2010 }
2011
2012 /* Emit the callgraph after emitting function bodies. This needs to
2013 be done now to make sure that all the statements in every function
2014 have been renumbered so that edges can be associated with call
2015 statements using the statement UIDs. */
2016 output_symtab ();
2017
2018 #ifdef ENABLE_CHECKING
2019 lto_bitmap_free (output);
2020 #endif
2021 }
2022
2023 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2024 from it and required for correct representation of its semantics.
2025 Each node in ENCODER must be a global declaration or a type. A node
2026 is written only once, even if it appears multiple times in the
2027 vector. Certain transitively-reachable nodes, such as those
2028 representing expressions, may be duplicated, but such nodes
2029 must not appear in ENCODER itself. */
2030
2031 static void
2032 write_global_stream (struct output_block *ob,
2033 struct lto_tree_ref_encoder *encoder)
2034 {
2035 tree t;
2036 size_t index;
2037 const size_t size = lto_tree_ref_encoder_size (encoder);
2038
2039 for (index = 0; index < size; index++)
2040 {
2041 t = lto_tree_ref_encoder_get_tree (encoder, index);
2042 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2043 stream_write_tree (ob, t, false);
2044 }
2045 }
2046
2047
2048 /* Write a sequence of indices into the globals vector corresponding
2049 to the trees in ENCODER. These are used by the reader to map the
2050 indices used to refer to global entities within function bodies to
2051 their referents. */
2052
2053 static void
2054 write_global_references (struct output_block *ob,
2055 struct lto_output_stream *ref_stream,
2056 struct lto_tree_ref_encoder *encoder)
2057 {
2058 tree t;
2059 uint32_t index;
2060 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2061
2062 /* Write size as 32-bit unsigned. */
2063 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2064
2065 for (index = 0; index < size; index++)
2066 {
2067 uint32_t slot_num;
2068
2069 t = lto_tree_ref_encoder_get_tree (encoder, index);
2070 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2071 gcc_assert (slot_num != (unsigned)-1);
2072 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2073 }
2074 }
2075
2076
2077 /* Write all the streams in an lto_out_decl_state STATE using
2078 output block OB and output stream OUT_STREAM. */
2079
2080 void
2081 lto_output_decl_state_streams (struct output_block *ob,
2082 struct lto_out_decl_state *state)
2083 {
2084 int i;
2085
2086 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2087 write_global_stream (ob, &state->streams[i]);
2088 }
2089
2090
2091 /* Write all the references in an lto_out_decl_state STATE using
2092 output block OB and output stream OUT_STREAM. */
2093
2094 void
2095 lto_output_decl_state_refs (struct output_block *ob,
2096 struct lto_output_stream *out_stream,
2097 struct lto_out_decl_state *state)
2098 {
2099 unsigned i;
2100 uint32_t ref;
2101 tree decl;
2102
2103 /* Write reference to FUNCTION_DECL. If there is not function,
2104 write reference to void_type_node. */
2105 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2106 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2107 gcc_assert (ref != (unsigned)-1);
2108 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2109
2110 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2111 write_global_references (ob, out_stream, &state->streams[i]);
2112 }
2113
2114
2115 /* Return the written size of STATE. */
2116
2117 static size_t
2118 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2119 {
2120 int i;
2121 size_t size;
2122
2123 size = sizeof (int32_t); /* fn_ref. */
2124 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2125 {
2126 size += sizeof (int32_t); /* vector size. */
2127 size += (lto_tree_ref_encoder_size (&state->streams[i])
2128 * sizeof (int32_t));
2129 }
2130 return size;
2131 }
2132
2133
2134 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2135 so far. */
2136
2137 static void
2138 write_symbol (struct streamer_tree_cache_d *cache,
2139 struct lto_output_stream *stream,
2140 tree t, struct pointer_set_t *seen, bool alias)
2141 {
2142 const char *name;
2143 enum gcc_plugin_symbol_kind kind;
2144 enum gcc_plugin_symbol_visibility visibility;
2145 unsigned slot_num;
2146 unsigned HOST_WIDEST_INT size;
2147 const char *comdat;
2148 unsigned char c;
2149
2150 /* None of the following kinds of symbols are needed in the
2151 symbol table. */
2152 if (!TREE_PUBLIC (t)
2153 || is_builtin_fn (t)
2154 || DECL_ABSTRACT (t)
2155 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2156 return;
2157 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2158
2159 gcc_assert (TREE_CODE (t) == VAR_DECL
2160 || TREE_CODE (t) == FUNCTION_DECL);
2161
2162 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2163
2164 /* This behaves like assemble_name_raw in varasm.c, performing the
2165 same name manipulations that ASM_OUTPUT_LABELREF does. */
2166 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2167
2168 if (pointer_set_contains (seen, name))
2169 return;
2170 pointer_set_insert (seen, name);
2171
2172 streamer_tree_cache_lookup (cache, t, &slot_num);
2173 gcc_assert (slot_num != (unsigned)-1);
2174
2175 if (DECL_EXTERNAL (t))
2176 {
2177 if (DECL_WEAK (t))
2178 kind = GCCPK_WEAKUNDEF;
2179 else
2180 kind = GCCPK_UNDEF;
2181 }
2182 else
2183 {
2184 if (DECL_WEAK (t))
2185 kind = GCCPK_WEAKDEF;
2186 else if (DECL_COMMON (t))
2187 kind = GCCPK_COMMON;
2188 else
2189 kind = GCCPK_DEF;
2190
2191 /* When something is defined, it should have node attached. */
2192 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2193 || varpool_get_node (t)->definition);
2194 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2195 || (cgraph_get_node (t)
2196 && cgraph_get_node (t)->definition));
2197 }
2198
2199 /* Imitate what default_elf_asm_output_external do.
2200 When symbol is external, we need to output it with DEFAULT visibility
2201 when compiling with -fvisibility=default, while with HIDDEN visibility
2202 when symbol has attribute (visibility("hidden")) specified.
2203 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2204 right. */
2205
2206 if (DECL_EXTERNAL (t)
2207 && !targetm.binds_local_p (t))
2208 visibility = GCCPV_DEFAULT;
2209 else
2210 switch (DECL_VISIBILITY (t))
2211 {
2212 case VISIBILITY_DEFAULT:
2213 visibility = GCCPV_DEFAULT;
2214 break;
2215 case VISIBILITY_PROTECTED:
2216 visibility = GCCPV_PROTECTED;
2217 break;
2218 case VISIBILITY_HIDDEN:
2219 visibility = GCCPV_HIDDEN;
2220 break;
2221 case VISIBILITY_INTERNAL:
2222 visibility = GCCPV_INTERNAL;
2223 break;
2224 }
2225
2226 if (kind == GCCPK_COMMON
2227 && DECL_SIZE_UNIT (t)
2228 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2229 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2230 else
2231 size = 0;
2232
2233 if (DECL_ONE_ONLY (t))
2234 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2235 else
2236 comdat = "";
2237
2238 lto_output_data_stream (stream, name, strlen (name) + 1);
2239 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2240 c = (unsigned char) kind;
2241 lto_output_data_stream (stream, &c, 1);
2242 c = (unsigned char) visibility;
2243 lto_output_data_stream (stream, &c, 1);
2244 lto_output_data_stream (stream, &size, 8);
2245 lto_output_data_stream (stream, &slot_num, 4);
2246 }
2247
2248 /* Return true if NODE should appear in the plugin symbol table. */
2249
2250 bool
2251 output_symbol_p (symtab_node *node)
2252 {
2253 struct cgraph_node *cnode;
2254 if (!symtab_real_symbol_p (node))
2255 return false;
2256 /* We keep external functions in symtab for sake of inlining
2257 and devirtualization. We do not want to see them in symbol table as
2258 references unless they are really used. */
2259 cnode = dyn_cast <cgraph_node> (node);
2260 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2261 && cnode->callers)
2262 return true;
2263
2264 /* Ignore all references from external vars initializers - they are not really
2265 part of the compilation unit until they are used by folding. Some symbols,
2266 like references to external construction vtables can not be referred to at all.
2267 We decide this at can_refer_decl_in_current_unit_p. */
2268 if (!node->definition || DECL_EXTERNAL (node->decl))
2269 {
2270 int i;
2271 struct ipa_ref *ref;
2272 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2273 i, ref); i++)
2274 {
2275 if (ref->use == IPA_REF_ALIAS)
2276 continue;
2277 if (is_a <cgraph_node> (ref->referring))
2278 return true;
2279 if (!DECL_EXTERNAL (ref->referring->decl))
2280 return true;
2281 }
2282 return false;
2283 }
2284 return true;
2285 }
2286
2287
2288 /* Write an IL symbol table to OB.
2289 SET and VSET are cgraph/varpool node sets we are outputting. */
2290
2291 static void
2292 produce_symtab (struct output_block *ob)
2293 {
2294 struct streamer_tree_cache_d *cache = ob->writer_cache;
2295 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2296 struct pointer_set_t *seen;
2297 struct lto_output_stream stream;
2298 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2299 lto_symtab_encoder_iterator lsei;
2300
2301 lto_begin_section (section_name, false);
2302 free (section_name);
2303
2304 seen = pointer_set_create ();
2305 memset (&stream, 0, sizeof (stream));
2306
2307 /* Write the symbol table.
2308 First write everything defined and then all declarations.
2309 This is necessary to handle cases where we have duplicated symbols. */
2310 for (lsei = lsei_start (encoder);
2311 !lsei_end_p (lsei); lsei_next (&lsei))
2312 {
2313 symtab_node *node = lsei_node (lsei);
2314
2315 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2316 continue;
2317 write_symbol (cache, &stream, node->decl, seen, false);
2318 }
2319 for (lsei = lsei_start (encoder);
2320 !lsei_end_p (lsei); lsei_next (&lsei))
2321 {
2322 symtab_node *node = lsei_node (lsei);
2323
2324 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2325 continue;
2326 write_symbol (cache, &stream, node->decl, seen, false);
2327 }
2328
2329 lto_write_stream (&stream);
2330 pointer_set_destroy (seen);
2331
2332 lto_end_section ();
2333 }
2334
2335
2336 /* This pass is run after all of the functions are serialized and all
2337 of the IPA passes have written their serialized forms. This pass
2338 causes the vector of all of the global decls and types used from
2339 this file to be written in to a section that can then be read in to
2340 recover these on other side. */
2341
2342 void
2343 produce_asm_for_decls (void)
2344 {
2345 struct lto_out_decl_state *out_state;
2346 struct lto_out_decl_state *fn_out_state;
2347 struct lto_decl_header header;
2348 char *section_name;
2349 struct output_block *ob;
2350 struct lto_output_stream *header_stream, *decl_state_stream;
2351 unsigned idx, num_fns;
2352 size_t decl_state_size;
2353 int32_t num_decl_states;
2354
2355 ob = create_output_block (LTO_section_decls);
2356 ob->global = true;
2357
2358 memset (&header, 0, sizeof (struct lto_decl_header));
2359
2360 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2361 lto_begin_section (section_name, !flag_wpa);
2362 free (section_name);
2363
2364 /* Make string 0 be a NULL string. */
2365 streamer_write_char_stream (ob->string_stream, 0);
2366
2367 gcc_assert (!alias_pairs);
2368
2369 /* Write the global symbols. */
2370 out_state = lto_get_out_decl_state ();
2371 num_fns = lto_function_decl_states.length ();
2372 lto_output_decl_state_streams (ob, out_state);
2373 for (idx = 0; idx < num_fns; idx++)
2374 {
2375 fn_out_state =
2376 lto_function_decl_states[idx];
2377 lto_output_decl_state_streams (ob, fn_out_state);
2378 }
2379
2380 header.lto_header.major_version = LTO_major_version;
2381 header.lto_header.minor_version = LTO_minor_version;
2382
2383 /* Currently not used. This field would allow us to preallocate
2384 the globals vector, so that it need not be resized as it is extended. */
2385 header.num_nodes = -1;
2386
2387 /* Compute the total size of all decl out states. */
2388 decl_state_size = sizeof (int32_t);
2389 decl_state_size += lto_out_decl_state_written_size (out_state);
2390 for (idx = 0; idx < num_fns; idx++)
2391 {
2392 fn_out_state =
2393 lto_function_decl_states[idx];
2394 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2395 }
2396 header.decl_state_size = decl_state_size;
2397
2398 header.main_size = ob->main_stream->total_size;
2399 header.string_size = ob->string_stream->total_size;
2400
2401 header_stream = XCNEW (struct lto_output_stream);
2402 lto_output_data_stream (header_stream, &header, sizeof header);
2403 lto_write_stream (header_stream);
2404 free (header_stream);
2405
2406 /* Write the main out-decl state, followed by out-decl states of
2407 functions. */
2408 decl_state_stream = XCNEW (struct lto_output_stream);
2409 num_decl_states = num_fns + 1;
2410 lto_output_data_stream (decl_state_stream, &num_decl_states,
2411 sizeof (num_decl_states));
2412 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2413 for (idx = 0; idx < num_fns; idx++)
2414 {
2415 fn_out_state =
2416 lto_function_decl_states[idx];
2417 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2418 }
2419 lto_write_stream (decl_state_stream);
2420 free (decl_state_stream);
2421
2422 lto_write_stream (ob->main_stream);
2423 lto_write_stream (ob->string_stream);
2424
2425 lto_end_section ();
2426
2427 /* Write the symbol table. It is used by linker to determine dependencies
2428 and thus we can skip it for WPA. */
2429 if (!flag_wpa)
2430 produce_symtab (ob);
2431
2432 /* Write command line opts. */
2433 lto_write_options ();
2434
2435 /* Deallocate memory and clean up. */
2436 for (idx = 0; idx < num_fns; idx++)
2437 {
2438 fn_out_state =
2439 lto_function_decl_states[idx];
2440 lto_delete_out_decl_state (fn_out_state);
2441 }
2442 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2443 lto_function_decl_states.release ();
2444 destroy_output_block (ob);
2445 }