re PR ipa/66181 (/usr/include/bits/types.h:134:16: ICE: verify_type failed)
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "tree.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "stringpool.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "rtl.h"
37 #include "flags.h"
38 #include "insn-config.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "emit-rtl.h"
44 #include "varasm.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "params.h"
48 #include "predict.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "basic-block.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-expr.h"
55 #include "is-a.h"
56 #include "gimple.h"
57 #include "gimple-iterator.h"
58 #include "gimple-ssa.h"
59 #include "tree-ssanames.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "lto-symtab.h"
64 #include "plugin-api.h"
65 #include "ipa-ref.h"
66 #include "cgraph.h"
67 #include "lto-streamer.h"
68 #include "data-streamer.h"
69 #include "gimple-streamer.h"
70 #include "tree-streamer.h"
71 #include "streamer-hooks.h"
72 #include "cfgloop.h"
73 #include "builtins.h"
74 #include "gomp-constants.h"
75
76
77 static void lto_write_tree (struct output_block*, tree, bool);
78
79 /* Clear the line info stored in DATA_IN. */
80
81 static void
82 clear_line_info (struct output_block *ob)
83 {
84 ob->current_file = NULL;
85 ob->current_line = 0;
86 ob->current_col = 0;
87 }
88
89
90 /* Create the output block and return it. SECTION_TYPE is
91 LTO_section_function_body or LTO_static_initializer. */
92
93 struct output_block *
94 create_output_block (enum lto_section_type section_type)
95 {
96 struct output_block *ob = XCNEW (struct output_block);
97
98 ob->section_type = section_type;
99 ob->decl_state = lto_get_out_decl_state ();
100 ob->main_stream = XCNEW (struct lto_output_stream);
101 ob->string_stream = XCNEW (struct lto_output_stream);
102 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
103
104 if (section_type == LTO_section_function_body)
105 ob->cfg_stream = XCNEW (struct lto_output_stream);
106
107 clear_line_info (ob);
108
109 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
110 gcc_obstack_init (&ob->obstack);
111
112 return ob;
113 }
114
115
116 /* Destroy the output block OB. */
117
118 void
119 destroy_output_block (struct output_block *ob)
120 {
121 enum lto_section_type section_type = ob->section_type;
122
123 delete ob->string_hash_table;
124 ob->string_hash_table = NULL;
125
126 free (ob->main_stream);
127 free (ob->string_stream);
128 if (section_type == LTO_section_function_body)
129 free (ob->cfg_stream);
130
131 streamer_tree_cache_delete (ob->writer_cache);
132 obstack_free (&ob->obstack, NULL);
133
134 free (ob);
135 }
136
137
138 /* Look up NODE in the type table and write the index for it to OB. */
139
140 static void
141 output_type_ref (struct output_block *ob, tree node)
142 {
143 streamer_write_record_start (ob, LTO_type_ref);
144 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
145 }
146
147
148 /* Return true if tree node T is written to various tables. For these
149 nodes, we sometimes want to write their phyiscal representation
150 (via lto_output_tree), and sometimes we need to emit an index
151 reference into a table (via lto_output_tree_ref). */
152
153 static bool
154 tree_is_indexable (tree t)
155 {
156 /* Parameters and return values of functions of variably modified types
157 must go to global stream, because they may be used in the type
158 definition. */
159 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
160 && DECL_CONTEXT (t))
161 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
162 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
163 else if (TREE_CODE (t) == IMPORTED_DECL)
164 return false;
165 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
166 || TREE_CODE (t) == TYPE_DECL
167 || TREE_CODE (t) == CONST_DECL
168 || TREE_CODE (t) == NAMELIST_DECL)
169 && decl_function_context (t))
170 return false;
171 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
172 return false;
173 /* Variably modified types need to be streamed alongside function
174 bodies because they can refer to local entities. Together with
175 them we have to localize their members as well.
176 ??? In theory that includes non-FIELD_DECLs as well. */
177 else if (TYPE_P (t)
178 && variably_modified_type_p (t, NULL_TREE))
179 return false;
180 else if (TREE_CODE (t) == FIELD_DECL
181 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
182 return false;
183 else
184 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
185 }
186
187
188 /* Output info about new location into bitpack BP.
189 After outputting bitpack, lto_output_location_data has
190 to be done to output actual data. */
191
192 void
193 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
194 location_t loc)
195 {
196 expanded_location xloc;
197
198 loc = LOCATION_LOCUS (loc);
199 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
200 loc < RESERVED_LOCATION_COUNT
201 ? loc : RESERVED_LOCATION_COUNT);
202 if (loc < RESERVED_LOCATION_COUNT)
203 return;
204
205 xloc = expand_location (loc);
206
207 bp_pack_value (bp, ob->current_file != xloc.file, 1);
208 bp_pack_value (bp, ob->current_line != xloc.line, 1);
209 bp_pack_value (bp, ob->current_col != xloc.column, 1);
210
211 if (ob->current_file != xloc.file)
212 bp_pack_string (ob, bp, xloc.file, true);
213 ob->current_file = xloc.file;
214
215 if (ob->current_line != xloc.line)
216 bp_pack_var_len_unsigned (bp, xloc.line);
217 ob->current_line = xloc.line;
218
219 if (ob->current_col != xloc.column)
220 bp_pack_var_len_unsigned (bp, xloc.column);
221 ob->current_col = xloc.column;
222 }
223
224
225 /* If EXPR is an indexable tree node, output a reference to it to
226 output block OB. Otherwise, output the physical representation of
227 EXPR to OB. */
228
229 static void
230 lto_output_tree_ref (struct output_block *ob, tree expr)
231 {
232 enum tree_code code;
233
234 if (TYPE_P (expr))
235 {
236 output_type_ref (ob, expr);
237 return;
238 }
239
240 code = TREE_CODE (expr);
241 switch (code)
242 {
243 case SSA_NAME:
244 streamer_write_record_start (ob, LTO_ssa_name_ref);
245 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
246 break;
247
248 case FIELD_DECL:
249 streamer_write_record_start (ob, LTO_field_decl_ref);
250 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case FUNCTION_DECL:
254 streamer_write_record_start (ob, LTO_function_decl_ref);
255 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case VAR_DECL:
259 case DEBUG_EXPR_DECL:
260 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
261 case PARM_DECL:
262 streamer_write_record_start (ob, LTO_global_decl_ref);
263 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
265
266 case CONST_DECL:
267 streamer_write_record_start (ob, LTO_const_decl_ref);
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269 break;
270
271 case IMPORTED_DECL:
272 gcc_assert (decl_function_context (expr) == NULL);
273 streamer_write_record_start (ob, LTO_imported_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case TYPE_DECL:
278 streamer_write_record_start (ob, LTO_type_decl_ref);
279 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case NAMELIST_DECL:
283 streamer_write_record_start (ob, LTO_namelist_decl_ref);
284 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 case NAMESPACE_DECL:
288 streamer_write_record_start (ob, LTO_namespace_decl_ref);
289 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
290 break;
291
292 case LABEL_DECL:
293 streamer_write_record_start (ob, LTO_label_decl_ref);
294 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
295 break;
296
297 case RESULT_DECL:
298 streamer_write_record_start (ob, LTO_result_decl_ref);
299 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
300 break;
301
302 case TRANSLATION_UNIT_DECL:
303 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
304 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
305 break;
306
307 default:
308 /* No other node is indexable, so it should have been handled by
309 lto_output_tree. */
310 gcc_unreachable ();
311 }
312 }
313
314
315 /* Return true if EXPR is a tree node that can be written to disk. */
316
317 static inline bool
318 lto_is_streamable (tree expr)
319 {
320 enum tree_code code = TREE_CODE (expr);
321
322 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
323 name version in lto_output_tree_ref (see output_ssa_names). */
324 return !is_lang_specific (expr)
325 && code != SSA_NAME
326 && code != CALL_EXPR
327 && code != LANG_TYPE
328 && code != MODIFY_EXPR
329 && code != INIT_EXPR
330 && code != TARGET_EXPR
331 && code != BIND_EXPR
332 && code != WITH_CLEANUP_EXPR
333 && code != STATEMENT_LIST
334 && (code == CASE_LABEL_EXPR
335 || code == DECL_EXPR
336 || TREE_CODE_CLASS (code) != tcc_statement);
337 }
338
339
340 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
341
342 static tree
343 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
344 {
345 gcc_checking_assert (DECL_P (expr)
346 && TREE_CODE (expr) != FUNCTION_DECL
347 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
348
349 /* Handle DECL_INITIAL for symbols. */
350 tree initial = DECL_INITIAL (expr);
351 if (TREE_CODE (expr) == VAR_DECL
352 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
353 && !DECL_IN_CONSTANT_POOL (expr)
354 && initial)
355 {
356 varpool_node *vnode;
357 /* Extra section needs about 30 bytes; do not produce it for simple
358 scalar values. */
359 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
360 || !(vnode = varpool_node::get (expr))
361 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
362 initial = error_mark_node;
363 }
364
365 return initial;
366 }
367
368
369 /* Write a physical representation of tree node EXPR to output block
370 OB. If REF_P is true, the leaves of EXPR are emitted as references
371 via lto_output_tree_ref. IX is the index into the streamer cache
372 where EXPR is stored. */
373
374 static void
375 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
376 {
377 /* Pack all the non-pointer fields in EXPR into a bitpack and write
378 the resulting bitpack. */
379 streamer_write_tree_bitfields (ob, expr);
380
381 /* Write all the pointer fields in EXPR. */
382 streamer_write_tree_body (ob, expr, ref_p);
383
384 /* Write any LTO-specific data to OB. */
385 if (DECL_P (expr)
386 && TREE_CODE (expr) != FUNCTION_DECL
387 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
388 {
389 /* Handle DECL_INITIAL for symbols. */
390 tree initial = get_symbol_initial_value
391 (ob->decl_state->symtab_node_encoder, expr);
392 stream_write_tree (ob, initial, ref_p);
393 }
394 }
395
396 /* Write a physical representation of tree node EXPR to output block
397 OB. If REF_P is true, the leaves of EXPR are emitted as references
398 via lto_output_tree_ref. IX is the index into the streamer cache
399 where EXPR is stored. */
400
401 static void
402 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
403 {
404 if (!lto_is_streamable (expr))
405 internal_error ("tree code %qs is not supported in LTO streams",
406 get_tree_code_name (TREE_CODE (expr)));
407
408 /* Write the header, containing everything needed to materialize
409 EXPR on the reading side. */
410 streamer_write_tree_header (ob, expr);
411
412 lto_write_tree_1 (ob, expr, ref_p);
413
414 /* Mark the end of EXPR. */
415 streamer_write_zero (ob);
416 }
417
418 /* Emit the physical representation of tree node EXPR to output block
419 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
420 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
421
422 static void
423 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
424 bool ref_p, bool this_ref_p)
425 {
426 unsigned ix;
427
428 gcc_checking_assert (expr != NULL_TREE
429 && !(this_ref_p && tree_is_indexable (expr)));
430
431 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
432 expr, hash, &ix);
433 gcc_assert (!exists_p);
434 if (streamer_handle_as_builtin_p (expr))
435 {
436 /* MD and NORMAL builtins do not need to be written out
437 completely as they are always instantiated by the
438 compiler on startup. The only builtins that need to
439 be written out are BUILT_IN_FRONTEND. For all other
440 builtins, we simply write the class and code. */
441 streamer_write_builtin (ob, expr);
442 }
443 else if (TREE_CODE (expr) == INTEGER_CST
444 && !TREE_OVERFLOW (expr))
445 {
446 /* Shared INTEGER_CST nodes are special because they need their
447 original type to be materialized by the reader (to implement
448 TYPE_CACHED_VALUES). */
449 streamer_write_integer_cst (ob, expr, ref_p);
450 }
451 else
452 {
453 /* This is the first time we see EXPR, write its fields
454 to OB. */
455 lto_write_tree (ob, expr, ref_p);
456 }
457 }
458
459 class DFS
460 {
461 public:
462 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
463 bool single_p);
464 ~DFS ();
465
466 struct scc_entry
467 {
468 tree t;
469 hashval_t hash;
470 };
471 vec<scc_entry> sccstack;
472
473 private:
474 struct sccs
475 {
476 unsigned int dfsnum;
477 unsigned int low;
478 };
479 struct worklist
480 {
481 tree expr;
482 sccs *from_state;
483 sccs *cstate;
484 bool ref_p;
485 bool this_ref_p;
486 };
487
488 static int scc_entry_compare (const void *, const void *);
489
490 void DFS_write_tree_body (struct output_block *ob,
491 tree expr, sccs *expr_state, bool ref_p);
492
493 void DFS_write_tree (struct output_block *ob, sccs *from_state,
494 tree expr, bool ref_p, bool this_ref_p);
495
496 hashval_t
497 hash_scc (struct output_block *ob, unsigned first, unsigned size);
498
499 hash_map<tree, sccs *> sccstate;
500 vec<worklist> worklist_vec;
501 struct obstack sccstate_obstack;
502 };
503
504 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
505 bool single_p)
506 {
507 unsigned int next_dfs_num = 1;
508 sccstack.create (0);
509 gcc_obstack_init (&sccstate_obstack);
510 worklist_vec = vNULL;
511 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
512 while (!worklist_vec.is_empty ())
513 {
514 worklist &w = worklist_vec.last ();
515 expr = w.expr;
516 sccs *from_state = w.from_state;
517 sccs *cstate = w.cstate;
518 ref_p = w.ref_p;
519 this_ref_p = w.this_ref_p;
520 if (cstate == NULL)
521 {
522 sccs **slot = &sccstate.get_or_insert (expr);
523 cstate = *slot;
524 if (cstate)
525 {
526 gcc_checking_assert (from_state);
527 if (cstate->dfsnum < from_state->dfsnum)
528 from_state->low = MIN (cstate->dfsnum, from_state->low);
529 worklist_vec.pop ();
530 continue;
531 }
532
533 scc_entry e = { expr, 0 };
534 /* Not yet visited. DFS recurse and push it onto the stack. */
535 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
536 sccstack.safe_push (e);
537 cstate->dfsnum = next_dfs_num++;
538 cstate->low = cstate->dfsnum;
539 w.cstate = cstate;
540
541 if (streamer_handle_as_builtin_p (expr))
542 ;
543 else if (TREE_CODE (expr) == INTEGER_CST
544 && !TREE_OVERFLOW (expr))
545 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
546 else
547 {
548 DFS_write_tree_body (ob, expr, cstate, ref_p);
549
550 /* Walk any LTO-specific edges. */
551 if (DECL_P (expr)
552 && TREE_CODE (expr) != FUNCTION_DECL
553 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
554 {
555 /* Handle DECL_INITIAL for symbols. */
556 tree initial
557 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
558 expr);
559 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
560 }
561 }
562 continue;
563 }
564
565 /* See if we found an SCC. */
566 if (cstate->low == cstate->dfsnum)
567 {
568 unsigned first, size;
569 tree x;
570
571 /* If we are re-walking a single leaf-SCC just pop it,
572 let earlier worklist item access the sccstack. */
573 if (single_p)
574 {
575 worklist_vec.pop ();
576 continue;
577 }
578
579 /* Pop the SCC and compute its size. */
580 first = sccstack.length ();
581 do
582 {
583 x = sccstack[--first].t;
584 }
585 while (x != expr);
586 size = sccstack.length () - first;
587
588 /* No need to compute hashes for LTRANS units, we don't perform
589 any merging there. */
590 hashval_t scc_hash = 0;
591 unsigned scc_entry_len = 0;
592 if (!flag_wpa)
593 {
594 scc_hash = hash_scc (ob, first, size);
595
596 /* Put the entries with the least number of collisions first. */
597 unsigned entry_start = 0;
598 scc_entry_len = size + 1;
599 for (unsigned i = 0; i < size;)
600 {
601 unsigned from = i;
602 for (i = i + 1; i < size
603 && (sccstack[first + i].hash
604 == sccstack[first + from].hash); ++i)
605 ;
606 if (i - from < scc_entry_len)
607 {
608 scc_entry_len = i - from;
609 entry_start = from;
610 }
611 }
612 for (unsigned i = 0; i < scc_entry_len; ++i)
613 {
614 scc_entry tem = sccstack[first + i];
615 sccstack[first + i] = sccstack[first + entry_start + i];
616 sccstack[first + entry_start + i] = tem;
617 }
618
619 if (scc_entry_len == 1)
620 ; /* We already sorted SCC deterministically in hash_scc. */
621 else
622 /* Check that we have only one SCC.
623 Naturally we may have conflicts if hash function is not
624 strong enough. Lets see how far this gets. */
625 {
626 #ifdef ENABLE_CHECKING
627 gcc_unreachable ();
628 #endif
629 }
630 }
631
632 /* Write LTO_tree_scc. */
633 streamer_write_record_start (ob, LTO_tree_scc);
634 streamer_write_uhwi (ob, size);
635 streamer_write_uhwi (ob, scc_hash);
636
637 /* Write size-1 SCCs without wrapping them inside SCC bundles.
638 All INTEGER_CSTs need to be handled this way as we need
639 their type to materialize them. Also builtins are handled
640 this way.
641 ??? We still wrap these in LTO_tree_scc so at the
642 input side we can properly identify the tree we want
643 to ultimatively return. */
644 if (size == 1)
645 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
646 else
647 {
648 /* Write the size of the SCC entry candidates. */
649 streamer_write_uhwi (ob, scc_entry_len);
650
651 /* Write all headers and populate the streamer cache. */
652 for (unsigned i = 0; i < size; ++i)
653 {
654 hashval_t hash = sccstack[first+i].hash;
655 tree t = sccstack[first+i].t;
656 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
657 t, hash, NULL);
658 gcc_assert (!exists_p);
659
660 if (!lto_is_streamable (t))
661 internal_error ("tree code %qs is not supported "
662 "in LTO streams",
663 get_tree_code_name (TREE_CODE (t)));
664
665 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
666
667 /* Write the header, containing everything needed to
668 materialize EXPR on the reading side. */
669 streamer_write_tree_header (ob, t);
670 }
671
672 /* Write the bitpacks and tree references. */
673 for (unsigned i = 0; i < size; ++i)
674 {
675 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
676
677 /* Mark the end of the tree. */
678 streamer_write_zero (ob);
679 }
680 }
681
682 /* Finally truncate the vector. */
683 sccstack.truncate (first);
684
685 if (from_state)
686 from_state->low = MIN (from_state->low, cstate->low);
687 worklist_vec.pop ();
688 continue;
689 }
690
691 gcc_checking_assert (from_state);
692 from_state->low = MIN (from_state->low, cstate->low);
693 if (cstate->dfsnum < from_state->dfsnum)
694 from_state->low = MIN (cstate->dfsnum, from_state->low);
695 worklist_vec.pop ();
696 }
697 worklist_vec.release ();
698 }
699
700 DFS::~DFS ()
701 {
702 sccstack.release ();
703 obstack_free (&sccstate_obstack, NULL);
704 }
705
706 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
707 DFS recurse for all tree edges originating from it. */
708
709 void
710 DFS::DFS_write_tree_body (struct output_block *ob,
711 tree expr, sccs *expr_state, bool ref_p)
712 {
713 #define DFS_follow_tree_edge(DEST) \
714 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
715
716 enum tree_code code;
717
718 code = TREE_CODE (expr);
719
720 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
721 {
722 if (TREE_CODE (expr) != IDENTIFIER_NODE)
723 DFS_follow_tree_edge (TREE_TYPE (expr));
724 }
725
726 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
727 {
728 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
729 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
730 }
731
732 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
733 {
734 DFS_follow_tree_edge (TREE_REALPART (expr));
735 DFS_follow_tree_edge (TREE_IMAGPART (expr));
736 }
737
738 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
739 {
740 /* Drop names that were created for anonymous entities. */
741 if (DECL_NAME (expr)
742 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
743 && ANON_AGGRNAME_P (DECL_NAME (expr)))
744 ;
745 else
746 DFS_follow_tree_edge (DECL_NAME (expr));
747 DFS_follow_tree_edge (DECL_CONTEXT (expr));
748 }
749
750 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
751 {
752 DFS_follow_tree_edge (DECL_SIZE (expr));
753 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
754
755 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
756 special handling in LTO, it must be handled by streamer hooks. */
757
758 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
759
760 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
761 for early inlining so drop it on the floor instead of ICEing in
762 dwarf2out.c. */
763
764 if ((TREE_CODE (expr) == VAR_DECL
765 || TREE_CODE (expr) == PARM_DECL)
766 && DECL_HAS_VALUE_EXPR_P (expr))
767 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
768 if (TREE_CODE (expr) == VAR_DECL)
769 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
770 }
771
772 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
773 {
774 if (TREE_CODE (expr) == TYPE_DECL)
775 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
776 }
777
778 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
779 {
780 /* Make sure we don't inadvertently set the assembler name. */
781 if (DECL_ASSEMBLER_NAME_SET_P (expr))
782 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
783 }
784
785 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
786 {
787 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
788 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
789 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
790 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
791 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
792 }
793
794 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
795 {
796 DFS_follow_tree_edge (DECL_VINDEX (expr));
797 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
798 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
799 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
800 }
801
802 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
803 {
804 DFS_follow_tree_edge (TYPE_SIZE (expr));
805 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
806 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
807 DFS_follow_tree_edge (TYPE_NAME (expr));
808 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
809 reconstructed during fixup. */
810 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
811 during fixup. */
812 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
813 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
814 /* TYPE_CANONICAL is re-computed during type merging, so no need
815 to follow it here. */
816 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
817 }
818
819 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
820 {
821 if (TREE_CODE (expr) == ENUMERAL_TYPE)
822 DFS_follow_tree_edge (TYPE_VALUES (expr));
823 else if (TREE_CODE (expr) == ARRAY_TYPE)
824 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
825 else if (RECORD_OR_UNION_TYPE_P (expr))
826 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
827 DFS_follow_tree_edge (t);
828 else if (TREE_CODE (expr) == FUNCTION_TYPE
829 || TREE_CODE (expr) == METHOD_TYPE)
830 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
831
832 if (!POINTER_TYPE_P (expr))
833 DFS_follow_tree_edge (TYPE_MINVAL (expr));
834 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
835 if (RECORD_OR_UNION_TYPE_P (expr))
836 DFS_follow_tree_edge (TYPE_BINFO (expr));
837 }
838
839 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
840 {
841 DFS_follow_tree_edge (TREE_PURPOSE (expr));
842 DFS_follow_tree_edge (TREE_VALUE (expr));
843 DFS_follow_tree_edge (TREE_CHAIN (expr));
844 }
845
846 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
847 {
848 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
849 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
850 }
851
852 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
853 {
854 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
855 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
856 DFS_follow_tree_edge (TREE_BLOCK (expr));
857 }
858
859 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
860 {
861 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
862 if (VAR_OR_FUNCTION_DECL_P (t)
863 && DECL_EXTERNAL (t))
864 /* We have to stream externals in the block chain as
865 non-references. See also
866 tree-streamer-out.c:streamer_write_chain. */
867 DFS_write_tree (ob, expr_state, t, ref_p, false);
868 else
869 DFS_follow_tree_edge (t);
870
871 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
872
873 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
874 handle - those that represent inlined function scopes.
875 For the drop rest them on the floor instead of ICEing
876 in dwarf2out.c. */
877 if (inlined_function_outer_scope_p (expr))
878 {
879 tree ultimate_origin = block_ultimate_origin (expr);
880 DFS_follow_tree_edge (ultimate_origin);
881 }
882 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
883 information for early inlined BLOCKs so drop it on the floor instead
884 of ICEing in dwarf2out.c. */
885
886 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
887 streaming time. */
888
889 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
890 list is re-constructed from BLOCK_SUPERCONTEXT. */
891 }
892
893 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
894 {
895 unsigned i;
896 tree t;
897
898 /* Note that the number of BINFO slots has already been emitted in
899 EXPR's header (see streamer_write_tree_header) because this length
900 is needed to build the empty BINFO node on the reader side. */
901 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
902 DFS_follow_tree_edge (t);
903 DFS_follow_tree_edge (BINFO_OFFSET (expr));
904 DFS_follow_tree_edge (BINFO_VTABLE (expr));
905 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
906
907 /* The number of BINFO_BASE_ACCESSES has already been emitted in
908 EXPR's bitfield section. */
909 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
910 DFS_follow_tree_edge (t);
911
912 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
913 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
914 }
915
916 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
917 {
918 unsigned i;
919 tree index, value;
920
921 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
922 {
923 DFS_follow_tree_edge (index);
924 DFS_follow_tree_edge (value);
925 }
926 }
927
928 if (code == OMP_CLAUSE)
929 {
930 int i;
931 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
932 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
933 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
934 }
935
936 #undef DFS_follow_tree_edge
937 }
938
939 /* Return a hash value for the tree T.
940 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
941 may hold hash values if trees inside current SCC. */
942
943 static hashval_t
944 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
945 {
946 inchash::hash hstate;
947
948 #define visit(SIBLING) \
949 do { \
950 unsigned ix; \
951 if (!SIBLING) \
952 hstate.add_int (0); \
953 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
954 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
955 else if (map) \
956 hstate.add_int (*map->get (SIBLING)); \
957 else \
958 hstate.add_int (1); \
959 } while (0)
960
961 /* Hash TS_BASE. */
962 enum tree_code code = TREE_CODE (t);
963 hstate.add_int (code);
964 if (!TYPE_P (t))
965 {
966 hstate.add_flag (TREE_SIDE_EFFECTS (t));
967 hstate.add_flag (TREE_CONSTANT (t));
968 hstate.add_flag (TREE_READONLY (t));
969 hstate.add_flag (TREE_PUBLIC (t));
970 }
971 hstate.add_flag (TREE_ADDRESSABLE (t));
972 hstate.add_flag (TREE_THIS_VOLATILE (t));
973 if (DECL_P (t))
974 hstate.add_flag (DECL_UNSIGNED (t));
975 else if (TYPE_P (t))
976 hstate.add_flag (TYPE_UNSIGNED (t));
977 if (TYPE_P (t))
978 hstate.add_flag (TYPE_ARTIFICIAL (t));
979 else
980 hstate.add_flag (TREE_NO_WARNING (t));
981 hstate.add_flag (TREE_NOTHROW (t));
982 hstate.add_flag (TREE_STATIC (t));
983 hstate.add_flag (TREE_PROTECTED (t));
984 hstate.add_flag (TREE_DEPRECATED (t));
985 if (code != TREE_BINFO)
986 hstate.add_flag (TREE_PRIVATE (t));
987 if (TYPE_P (t))
988 {
989 hstate.add_flag (TYPE_SATURATING (t));
990 hstate.add_flag (TYPE_ADDR_SPACE (t));
991 }
992 else if (code == SSA_NAME)
993 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
994 hstate.commit_flag ();
995
996 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
997 {
998 int i;
999 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1000 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1001 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1002 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1003 }
1004
1005 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1006 {
1007 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1008 hstate.add_flag (r.cl);
1009 hstate.add_flag (r.sign);
1010 hstate.add_flag (r.signalling);
1011 hstate.add_flag (r.canonical);
1012 hstate.commit_flag ();
1013 hstate.add_int (r.uexp);
1014 hstate.add (r.sig, sizeof (r.sig));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1018 {
1019 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1020 hstate.add_int (f.mode);
1021 hstate.add_int (f.data.low);
1022 hstate.add_int (f.data.high);
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1026 {
1027 hstate.add_wide_int (DECL_MODE (t));
1028 hstate.add_flag (DECL_NONLOCAL (t));
1029 hstate.add_flag (DECL_VIRTUAL_P (t));
1030 hstate.add_flag (DECL_IGNORED_P (t));
1031 hstate.add_flag (DECL_ABSTRACT_P (t));
1032 hstate.add_flag (DECL_ARTIFICIAL (t));
1033 hstate.add_flag (DECL_USER_ALIGN (t));
1034 hstate.add_flag (DECL_PRESERVE_P (t));
1035 hstate.add_flag (DECL_EXTERNAL (t));
1036 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1037 hstate.commit_flag ();
1038 hstate.add_int (DECL_ALIGN (t));
1039 if (code == LABEL_DECL)
1040 {
1041 hstate.add_int (EH_LANDING_PAD_NR (t));
1042 hstate.add_int (LABEL_DECL_UID (t));
1043 }
1044 else if (code == FIELD_DECL)
1045 {
1046 hstate.add_flag (DECL_PACKED (t));
1047 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1048 hstate.add_int (DECL_OFFSET_ALIGN (t));
1049 }
1050 else if (code == VAR_DECL)
1051 {
1052 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1053 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1054 }
1055 if (code == RESULT_DECL
1056 || code == PARM_DECL
1057 || code == VAR_DECL)
1058 {
1059 hstate.add_flag (DECL_BY_REFERENCE (t));
1060 if (code == VAR_DECL
1061 || code == PARM_DECL)
1062 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1063 }
1064 hstate.commit_flag ();
1065 }
1066
1067 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1068 hstate.add_int (DECL_REGISTER (t));
1069
1070 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1071 {
1072 hstate.add_flag (DECL_COMMON (t));
1073 hstate.add_flag (DECL_DLLIMPORT_P (t));
1074 hstate.add_flag (DECL_WEAK (t));
1075 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1076 hstate.add_flag (DECL_COMDAT (t));
1077 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1078 hstate.add_int (DECL_VISIBILITY (t));
1079 if (code == VAR_DECL)
1080 {
1081 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1082 hstate.add_flag (DECL_HARD_REGISTER (t));
1083 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1084 }
1085 if (TREE_CODE (t) == FUNCTION_DECL)
1086 {
1087 hstate.add_flag (DECL_FINAL_P (t));
1088 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1089 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1090 }
1091 hstate.commit_flag ();
1092 }
1093
1094 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1095 {
1096 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1097 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1098 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1099 hstate.add_flag (DECL_UNINLINABLE (t));
1100 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1101 hstate.add_flag (DECL_IS_NOVOPS (t));
1102 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1103 hstate.add_flag (DECL_IS_MALLOC (t));
1104 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1105 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1106 hstate.add_flag (DECL_STATIC_CHAIN (t));
1107 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1108 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1109 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1110 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1111 hstate.add_flag (DECL_PURE_P (t));
1112 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1113 hstate.commit_flag ();
1114 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1115 hstate.add_int (DECL_FUNCTION_CODE (t));
1116 }
1117
1118 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1119 {
1120 hstate.add_wide_int (TYPE_MODE (t));
1121 hstate.add_flag (TYPE_STRING_FLAG (t));
1122 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1123 no streaming. */
1124 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1125 hstate.add_flag (TYPE_PACKED (t));
1126 hstate.add_flag (TYPE_RESTRICT (t));
1127 hstate.add_flag (TYPE_USER_ALIGN (t));
1128 hstate.add_flag (TYPE_READONLY (t));
1129 if (RECORD_OR_UNION_TYPE_P (t))
1130 {
1131 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1132 hstate.add_flag (TYPE_FINAL_P (t));
1133 }
1134 else if (code == ARRAY_TYPE)
1135 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1136 hstate.commit_flag ();
1137 hstate.add_int (TYPE_PRECISION (t));
1138 hstate.add_int (TYPE_ALIGN (t));
1139 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1140 || (!in_lto_p
1141 && get_alias_set (t) == 0))
1142 ? 0 : -1);
1143 }
1144
1145 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1146 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1147 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1148
1149 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1150 /* We don't stream these when passing things to a different target. */
1151 && !lto_stream_offload_p)
1152 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1153
1154 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1155 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1156
1157 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1158 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1159
1160 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1161 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1162
1163 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1164 {
1165 if (code != IDENTIFIER_NODE)
1166 visit (TREE_TYPE (t));
1167 }
1168
1169 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1170 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1171 visit (VECTOR_CST_ELT (t, i));
1172
1173 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1174 {
1175 visit (TREE_REALPART (t));
1176 visit (TREE_IMAGPART (t));
1177 }
1178
1179 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1180 {
1181 /* Drop names that were created for anonymous entities. */
1182 if (DECL_NAME (t)
1183 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1184 && ANON_AGGRNAME_P (DECL_NAME (t)))
1185 ;
1186 else
1187 visit (DECL_NAME (t));
1188 if (DECL_FILE_SCOPE_P (t))
1189 ;
1190 else
1191 visit (DECL_CONTEXT (t));
1192 }
1193
1194 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1195 {
1196 visit (DECL_SIZE (t));
1197 visit (DECL_SIZE_UNIT (t));
1198 visit (DECL_ATTRIBUTES (t));
1199 if ((code == VAR_DECL
1200 || code == PARM_DECL)
1201 && DECL_HAS_VALUE_EXPR_P (t))
1202 visit (DECL_VALUE_EXPR (t));
1203 if (code == VAR_DECL
1204 && DECL_HAS_DEBUG_EXPR_P (t))
1205 visit (DECL_DEBUG_EXPR (t));
1206 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1207 be able to call get_symbol_initial_value. */
1208 }
1209
1210 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1211 {
1212 if (code == TYPE_DECL)
1213 visit (DECL_ORIGINAL_TYPE (t));
1214 }
1215
1216 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1217 {
1218 if (DECL_ASSEMBLER_NAME_SET_P (t))
1219 visit (DECL_ASSEMBLER_NAME (t));
1220 }
1221
1222 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1223 {
1224 visit (DECL_FIELD_OFFSET (t));
1225 visit (DECL_BIT_FIELD_TYPE (t));
1226 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1227 visit (DECL_FIELD_BIT_OFFSET (t));
1228 visit (DECL_FCONTEXT (t));
1229 }
1230
1231 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1232 {
1233 visit (DECL_VINDEX (t));
1234 visit (DECL_FUNCTION_PERSONALITY (t));
1235 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1236 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1237 }
1238
1239 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1240 {
1241 visit (TYPE_SIZE (t));
1242 visit (TYPE_SIZE_UNIT (t));
1243 visit (TYPE_ATTRIBUTES (t));
1244 visit (TYPE_NAME (t));
1245 visit (TYPE_MAIN_VARIANT (t));
1246 if (TYPE_FILE_SCOPE_P (t))
1247 ;
1248 else
1249 visit (TYPE_CONTEXT (t));
1250 visit (TYPE_STUB_DECL (t));
1251 }
1252
1253 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1254 {
1255 if (code == ENUMERAL_TYPE)
1256 visit (TYPE_VALUES (t));
1257 else if (code == ARRAY_TYPE)
1258 visit (TYPE_DOMAIN (t));
1259 else if (RECORD_OR_UNION_TYPE_P (t))
1260 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1261 visit (f);
1262 else if (code == FUNCTION_TYPE
1263 || code == METHOD_TYPE)
1264 visit (TYPE_ARG_TYPES (t));
1265 if (!POINTER_TYPE_P (t))
1266 visit (TYPE_MINVAL (t));
1267 visit (TYPE_MAXVAL (t));
1268 if (RECORD_OR_UNION_TYPE_P (t))
1269 visit (TYPE_BINFO (t));
1270 }
1271
1272 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1273 {
1274 visit (TREE_PURPOSE (t));
1275 visit (TREE_VALUE (t));
1276 visit (TREE_CHAIN (t));
1277 }
1278
1279 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1280 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1281 visit (TREE_VEC_ELT (t, i));
1282
1283 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1284 {
1285 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1286 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1287 visit (TREE_OPERAND (t, i));
1288 }
1289
1290 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1291 {
1292 unsigned i;
1293 tree b;
1294 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1295 visit (b);
1296 visit (BINFO_OFFSET (t));
1297 visit (BINFO_VTABLE (t));
1298 visit (BINFO_VPTR_FIELD (t));
1299 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1300 visit (b);
1301 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1302 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1303 }
1304
1305 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1306 {
1307 unsigned i;
1308 tree index, value;
1309 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1311 {
1312 visit (index);
1313 visit (value);
1314 }
1315 }
1316
1317 if (code == OMP_CLAUSE)
1318 {
1319 int i;
1320 HOST_WIDE_INT val;
1321
1322 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1323 switch (OMP_CLAUSE_CODE (t))
1324 {
1325 case OMP_CLAUSE_DEFAULT:
1326 val = OMP_CLAUSE_DEFAULT_KIND (t);
1327 break;
1328 case OMP_CLAUSE_SCHEDULE:
1329 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1330 break;
1331 case OMP_CLAUSE_DEPEND:
1332 val = OMP_CLAUSE_DEPEND_KIND (t);
1333 break;
1334 case OMP_CLAUSE_MAP:
1335 val = OMP_CLAUSE_MAP_KIND (t);
1336 break;
1337 case OMP_CLAUSE_PROC_BIND:
1338 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1339 break;
1340 case OMP_CLAUSE_REDUCTION:
1341 val = OMP_CLAUSE_REDUCTION_CODE (t);
1342 break;
1343 default:
1344 val = 0;
1345 break;
1346 }
1347 hstate.add_wide_int (val);
1348 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1349 visit (OMP_CLAUSE_OPERAND (t, i));
1350 visit (OMP_CLAUSE_CHAIN (t));
1351 }
1352
1353 return hstate.end ();
1354
1355 #undef visit
1356 }
1357
1358 /* Compare two SCC entries by their hash value for qsorting them. */
1359
1360 int
1361 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1362 {
1363 const scc_entry *p1 = (const scc_entry *) p1_;
1364 const scc_entry *p2 = (const scc_entry *) p2_;
1365 if (p1->hash < p2->hash)
1366 return -1;
1367 else if (p1->hash > p2->hash)
1368 return 1;
1369 return 0;
1370 }
1371
1372 /* Return a hash value for the SCC on the SCC stack from FIRST with
1373 size SIZE. */
1374
1375 hashval_t
1376 DFS::hash_scc (struct output_block *ob,
1377 unsigned first, unsigned size)
1378 {
1379 unsigned int last_classes = 0, iterations = 0;
1380
1381 /* Compute hash values for the SCC members. */
1382 for (unsigned i = 0; i < size; ++i)
1383 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1384 sccstack[first+i].t);
1385
1386 if (size == 1)
1387 return sccstack[first].hash;
1388
1389 /* We aim to get unique hash for every tree within SCC and compute hash value
1390 of the whole SCC by combing all values together in an stable (entry point
1391 independent) order. This guarantees that the same SCC regions within
1392 different translation units will get the same hash values and therefore
1393 will be merged at WPA time.
1394
1395 Often the hashes are already unique. In that case we compute scc hash
1396 by combining individual hash values in an increasing order.
1397
1398 If thre are duplicates we seek at least one tree with unique hash (and
1399 pick one with minimal hash and this property). Then we obtain stable
1400 order by DFS walk starting from this unique tree and then use index
1401 within this order to make individual hash values unique.
1402
1403 If there is no tree with unique hash, we iteratively propagate the hash
1404 values across the internal edges of SCC. This usually quickly leads
1405 to unique hashes. Consider, for example, an SCC containing two pointers
1406 that are identical except for type they point and assume that these
1407 types are also part of the SCC.
1408 The propagation will add the points-to type information into their hash
1409 values. */
1410 do
1411 {
1412 /* Sort the SCC so we can easily see check for uniqueness. */
1413 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1414
1415 unsigned int classes = 1;
1416 int firstunique = -1;
1417
1418 /* Find tree with lowest unique hash (if it exists) and compute
1419 number of equivalence classes. */
1420 if (sccstack[first].hash != sccstack[first+1].hash)
1421 firstunique = 0;
1422 for (unsigned i = 1; i < size; ++i)
1423 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1424 {
1425 classes++;
1426 if (firstunique == -1
1427 && (i == size - 1
1428 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1429 firstunique = i;
1430 }
1431
1432 /* If we found tree with unique hash; stop the iteration. */
1433 if (firstunique != -1
1434 /* Also terminate if we run out of iterations or if the number of
1435 equivalence classes is no longer increasing.
1436 For example a cyclic list of trees that are all equivalent will
1437 never have unique entry point; we however do not build such SCCs
1438 in our IL. */
1439 || classes <= last_classes || iterations > 16)
1440 {
1441 hashval_t scc_hash;
1442
1443 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1444 starting from FIRSTUNIQUE to obstain stable order. */
1445 if (classes != size && firstunique != -1)
1446 {
1447 hash_map <tree, hashval_t> map(size*2);
1448
1449 /* Store hash values into a map, so we can associate them with
1450 reordered SCC. */
1451 for (unsigned i = 0; i < size; ++i)
1452 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1453
1454 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1455 gcc_assert (again.sccstack.length () == size);
1456
1457 memcpy (sccstack.address () + first,
1458 again.sccstack.address (),
1459 sizeof (scc_entry) * size);
1460
1461 /* Update hash values of individual members by hashing in the
1462 index within the stable order. This ensures uniqueness.
1463 Also compute the scc_hash by mixing in all hash values in the
1464 stable order we obtained. */
1465 sccstack[first].hash = *map.get (sccstack[first].t);
1466 scc_hash = sccstack[first].hash;
1467 for (unsigned i = 1; i < size; ++i)
1468 {
1469 sccstack[first+i].hash
1470 = iterative_hash_hashval_t (i,
1471 *map.get (sccstack[first+i].t));
1472 scc_hash = iterative_hash_hashval_t (scc_hash,
1473 sccstack[first+i].hash);
1474 }
1475 }
1476 /* If we got unique hash values for each tree, then sort already
1477 ensured entry point independent order. Only compute the final
1478 scc hash.
1479
1480 If we failed to find the unique entry point, we go by the same
1481 route. We will eventually introduce unwanted hash conflicts. */
1482 else
1483 {
1484 scc_hash = sccstack[first].hash;
1485 for (unsigned i = 1; i < size; ++i)
1486 scc_hash = iterative_hash_hashval_t (scc_hash,
1487 sccstack[first+i].hash);
1488 /* We can not 100% guarantee that the hash will not conflict in
1489 in a way so the unique hash is not found. This however
1490 should be extremely rare situation. ICE for now so possible
1491 issues are found and evaulated. */
1492 gcc_checking_assert (classes == size);
1493 }
1494
1495 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1496 hash into the hash of each of the elements. */
1497 for (unsigned i = 0; i < size; ++i)
1498 sccstack[first+i].hash
1499 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1500 return scc_hash;
1501 }
1502
1503 last_classes = classes;
1504 iterations++;
1505
1506 /* We failed to identify the entry point; propagate hash values across
1507 the edges. */
1508 {
1509 hash_map <tree, hashval_t> map(size*2);
1510 for (unsigned i = 0; i < size; ++i)
1511 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1512
1513 for (unsigned i = 0; i < size; i++)
1514 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1515 sccstack[first+i].t);
1516 }
1517 }
1518 while (true);
1519 }
1520
1521 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1522 already in the streamer cache. Main routine called for
1523 each visit of EXPR. */
1524
1525 void
1526 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1527 tree expr, bool ref_p, bool this_ref_p)
1528 {
1529 /* Handle special cases. */
1530 if (expr == NULL_TREE)
1531 return;
1532
1533 /* Do not DFS walk into indexable trees. */
1534 if (this_ref_p && tree_is_indexable (expr))
1535 return;
1536
1537 /* Check if we already streamed EXPR. */
1538 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1539 return;
1540
1541 worklist w;
1542 w.expr = expr;
1543 w.from_state = from_state;
1544 w.cstate = NULL;
1545 w.ref_p = ref_p;
1546 w.this_ref_p = this_ref_p;
1547 worklist_vec.safe_push (w);
1548 }
1549
1550
1551 /* Emit the physical representation of tree node EXPR to output block
1552 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1553 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1554
1555 void
1556 lto_output_tree (struct output_block *ob, tree expr,
1557 bool ref_p, bool this_ref_p)
1558 {
1559 unsigned ix;
1560 bool existed_p;
1561
1562 if (expr == NULL_TREE)
1563 {
1564 streamer_write_record_start (ob, LTO_null);
1565 return;
1566 }
1567
1568 if (this_ref_p && tree_is_indexable (expr))
1569 {
1570 lto_output_tree_ref (ob, expr);
1571 return;
1572 }
1573
1574 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1575 if (existed_p)
1576 {
1577 /* If a node has already been streamed out, make sure that
1578 we don't write it more than once. Otherwise, the reader
1579 will instantiate two different nodes for the same object. */
1580 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1581 streamer_write_uhwi (ob, ix);
1582 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1583 lto_tree_code_to_tag (TREE_CODE (expr)));
1584 lto_stats.num_pickle_refs_output++;
1585 }
1586 else
1587 {
1588 /* This is the first time we see EXPR, write all reachable
1589 trees to OB. */
1590 static bool in_dfs_walk;
1591
1592 /* Protect against recursion which means disconnect between
1593 what tree edges we walk in the DFS walk and what edges
1594 we stream out. */
1595 gcc_assert (!in_dfs_walk);
1596
1597 /* Start the DFS walk. */
1598 /* Save ob state ... */
1599 /* let's see ... */
1600 in_dfs_walk = true;
1601 DFS (ob, expr, ref_p, this_ref_p, false);
1602 in_dfs_walk = false;
1603
1604 /* Finally append a reference to the tree we were writing.
1605 ??? If expr ended up as a singleton we could have
1606 inlined it here and avoid outputting a reference. */
1607 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1608 gcc_assert (existed_p);
1609 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1610 streamer_write_uhwi (ob, ix);
1611 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1612 lto_tree_code_to_tag (TREE_CODE (expr)));
1613 lto_stats.num_pickle_refs_output++;
1614 }
1615 }
1616
1617
1618 /* Output to OB a list of try/catch handlers starting with FIRST. */
1619
1620 static void
1621 output_eh_try_list (struct output_block *ob, eh_catch first)
1622 {
1623 eh_catch n;
1624
1625 for (n = first; n; n = n->next_catch)
1626 {
1627 streamer_write_record_start (ob, LTO_eh_catch);
1628 stream_write_tree (ob, n->type_list, true);
1629 stream_write_tree (ob, n->filter_list, true);
1630 stream_write_tree (ob, n->label, true);
1631 }
1632
1633 streamer_write_record_start (ob, LTO_null);
1634 }
1635
1636
1637 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1638 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1639 detect EH region sharing. */
1640
1641 static void
1642 output_eh_region (struct output_block *ob, eh_region r)
1643 {
1644 enum LTO_tags tag;
1645
1646 if (r == NULL)
1647 {
1648 streamer_write_record_start (ob, LTO_null);
1649 return;
1650 }
1651
1652 if (r->type == ERT_CLEANUP)
1653 tag = LTO_ert_cleanup;
1654 else if (r->type == ERT_TRY)
1655 tag = LTO_ert_try;
1656 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1657 tag = LTO_ert_allowed_exceptions;
1658 else if (r->type == ERT_MUST_NOT_THROW)
1659 tag = LTO_ert_must_not_throw;
1660 else
1661 gcc_unreachable ();
1662
1663 streamer_write_record_start (ob, tag);
1664 streamer_write_hwi (ob, r->index);
1665
1666 if (r->outer)
1667 streamer_write_hwi (ob, r->outer->index);
1668 else
1669 streamer_write_zero (ob);
1670
1671 if (r->inner)
1672 streamer_write_hwi (ob, r->inner->index);
1673 else
1674 streamer_write_zero (ob);
1675
1676 if (r->next_peer)
1677 streamer_write_hwi (ob, r->next_peer->index);
1678 else
1679 streamer_write_zero (ob);
1680
1681 if (r->type == ERT_TRY)
1682 {
1683 output_eh_try_list (ob, r->u.eh_try.first_catch);
1684 }
1685 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1686 {
1687 stream_write_tree (ob, r->u.allowed.type_list, true);
1688 stream_write_tree (ob, r->u.allowed.label, true);
1689 streamer_write_uhwi (ob, r->u.allowed.filter);
1690 }
1691 else if (r->type == ERT_MUST_NOT_THROW)
1692 {
1693 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1694 bitpack_d bp = bitpack_create (ob->main_stream);
1695 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1696 streamer_write_bitpack (&bp);
1697 }
1698
1699 if (r->landing_pads)
1700 streamer_write_hwi (ob, r->landing_pads->index);
1701 else
1702 streamer_write_zero (ob);
1703 }
1704
1705
1706 /* Output landing pad LP to OB. */
1707
1708 static void
1709 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1710 {
1711 if (lp == NULL)
1712 {
1713 streamer_write_record_start (ob, LTO_null);
1714 return;
1715 }
1716
1717 streamer_write_record_start (ob, LTO_eh_landing_pad);
1718 streamer_write_hwi (ob, lp->index);
1719 if (lp->next_lp)
1720 streamer_write_hwi (ob, lp->next_lp->index);
1721 else
1722 streamer_write_zero (ob);
1723
1724 if (lp->region)
1725 streamer_write_hwi (ob, lp->region->index);
1726 else
1727 streamer_write_zero (ob);
1728
1729 stream_write_tree (ob, lp->post_landing_pad, true);
1730 }
1731
1732
1733 /* Output the existing eh_table to OB. */
1734
1735 static void
1736 output_eh_regions (struct output_block *ob, struct function *fn)
1737 {
1738 if (fn->eh && fn->eh->region_tree)
1739 {
1740 unsigned i;
1741 eh_region eh;
1742 eh_landing_pad lp;
1743 tree ttype;
1744
1745 streamer_write_record_start (ob, LTO_eh_table);
1746
1747 /* Emit the index of the root of the EH region tree. */
1748 streamer_write_hwi (ob, fn->eh->region_tree->index);
1749
1750 /* Emit all the EH regions in the region array. */
1751 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1752 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1753 output_eh_region (ob, eh);
1754
1755 /* Emit all landing pads. */
1756 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1757 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1758 output_eh_lp (ob, lp);
1759
1760 /* Emit all the runtime type data. */
1761 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1762 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1763 stream_write_tree (ob, ttype, true);
1764
1765 /* Emit the table of action chains. */
1766 if (targetm.arm_eabi_unwinder)
1767 {
1768 tree t;
1769 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1770 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1771 stream_write_tree (ob, t, true);
1772 }
1773 else
1774 {
1775 uchar c;
1776 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1777 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1778 streamer_write_char_stream (ob->main_stream, c);
1779 }
1780 }
1781
1782 /* The LTO_null either terminates the record or indicates that there
1783 are no eh_records at all. */
1784 streamer_write_record_start (ob, LTO_null);
1785 }
1786
1787
1788 /* Output all of the active ssa names to the ssa_names stream. */
1789
1790 static void
1791 output_ssa_names (struct output_block *ob, struct function *fn)
1792 {
1793 unsigned int i, len;
1794
1795 len = vec_safe_length (SSANAMES (fn));
1796 streamer_write_uhwi (ob, len);
1797
1798 for (i = 1; i < len; i++)
1799 {
1800 tree ptr = (*SSANAMES (fn))[i];
1801
1802 if (ptr == NULL_TREE
1803 || SSA_NAME_IN_FREE_LIST (ptr)
1804 || virtual_operand_p (ptr))
1805 continue;
1806
1807 streamer_write_uhwi (ob, i);
1808 streamer_write_char_stream (ob->main_stream,
1809 SSA_NAME_IS_DEFAULT_DEF (ptr));
1810 if (SSA_NAME_VAR (ptr))
1811 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1812 else
1813 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1814 stream_write_tree (ob, TREE_TYPE (ptr), true);
1815 }
1816
1817 streamer_write_zero (ob);
1818 }
1819
1820
1821 /* Output a wide-int. */
1822
1823 static void
1824 streamer_write_wi (struct output_block *ob,
1825 const widest_int &w)
1826 {
1827 int len = w.get_len ();
1828
1829 streamer_write_uhwi (ob, w.get_precision ());
1830 streamer_write_uhwi (ob, len);
1831 for (int i = 0; i < len; i++)
1832 streamer_write_hwi (ob, w.elt (i));
1833 }
1834
1835
1836 /* Output the cfg. */
1837
1838 static void
1839 output_cfg (struct output_block *ob, struct function *fn)
1840 {
1841 struct lto_output_stream *tmp_stream = ob->main_stream;
1842 basic_block bb;
1843
1844 ob->main_stream = ob->cfg_stream;
1845
1846 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1847 profile_status_for_fn (fn));
1848
1849 /* Output the number of the highest basic block. */
1850 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1851
1852 FOR_ALL_BB_FN (bb, fn)
1853 {
1854 edge_iterator ei;
1855 edge e;
1856
1857 streamer_write_hwi (ob, bb->index);
1858
1859 /* Output the successors and the edge flags. */
1860 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1861 FOR_EACH_EDGE (e, ei, bb->succs)
1862 {
1863 streamer_write_uhwi (ob, e->dest->index);
1864 streamer_write_hwi (ob, e->probability);
1865 streamer_write_gcov_count (ob, e->count);
1866 streamer_write_uhwi (ob, e->flags);
1867 }
1868 }
1869
1870 streamer_write_hwi (ob, -1);
1871
1872 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1873 while (bb->next_bb)
1874 {
1875 streamer_write_hwi (ob, bb->next_bb->index);
1876 bb = bb->next_bb;
1877 }
1878
1879 streamer_write_hwi (ob, -1);
1880
1881 /* ??? The cfgloop interface is tied to cfun. */
1882 gcc_assert (cfun == fn);
1883
1884 /* Output the number of loops. */
1885 streamer_write_uhwi (ob, number_of_loops (fn));
1886
1887 /* Output each loop, skipping the tree root which has number zero. */
1888 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1889 {
1890 struct loop *loop = get_loop (fn, i);
1891
1892 /* Write the index of the loop header. That's enough to rebuild
1893 the loop tree on the reader side. Stream -1 for an unused
1894 loop entry. */
1895 if (!loop)
1896 {
1897 streamer_write_hwi (ob, -1);
1898 continue;
1899 }
1900 else
1901 streamer_write_hwi (ob, loop->header->index);
1902
1903 /* Write everything copy_loop_info copies. */
1904 streamer_write_enum (ob->main_stream,
1905 loop_estimation, EST_LAST, loop->estimate_state);
1906 streamer_write_hwi (ob, loop->any_upper_bound);
1907 if (loop->any_upper_bound)
1908 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1909 streamer_write_hwi (ob, loop->any_estimate);
1910 if (loop->any_estimate)
1911 streamer_write_wi (ob, loop->nb_iterations_estimate);
1912
1913 /* Write OMP SIMD related info. */
1914 streamer_write_hwi (ob, loop->safelen);
1915 streamer_write_hwi (ob, loop->dont_vectorize);
1916 streamer_write_hwi (ob, loop->force_vectorize);
1917 stream_write_tree (ob, loop->simduid, true);
1918 }
1919
1920 ob->main_stream = tmp_stream;
1921 }
1922
1923
1924 /* Create the header in the file using OB. If the section type is for
1925 a function, set FN to the decl for that function. */
1926
1927 void
1928 produce_asm (struct output_block *ob, tree fn)
1929 {
1930 enum lto_section_type section_type = ob->section_type;
1931 struct lto_function_header header;
1932 char *section_name;
1933
1934 if (section_type == LTO_section_function_body)
1935 {
1936 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1937 section_name = lto_get_section_name (section_type, name, NULL);
1938 }
1939 else
1940 section_name = lto_get_section_name (section_type, NULL, NULL);
1941
1942 lto_begin_section (section_name, !flag_wpa);
1943 free (section_name);
1944
1945 /* The entire header is stream computed here. */
1946 memset (&header, 0, sizeof (struct lto_function_header));
1947
1948 /* Write the header. */
1949 header.major_version = LTO_major_version;
1950 header.minor_version = LTO_minor_version;
1951
1952 if (section_type == LTO_section_function_body)
1953 header.cfg_size = ob->cfg_stream->total_size;
1954 header.main_size = ob->main_stream->total_size;
1955 header.string_size = ob->string_stream->total_size;
1956 lto_write_data (&header, sizeof header);
1957
1958 /* Put all of the gimple and the string table out the asm file as a
1959 block of text. */
1960 if (section_type == LTO_section_function_body)
1961 lto_write_stream (ob->cfg_stream);
1962 lto_write_stream (ob->main_stream);
1963 lto_write_stream (ob->string_stream);
1964
1965 lto_end_section ();
1966 }
1967
1968
1969 /* Output the base body of struct function FN using output block OB. */
1970
1971 static void
1972 output_struct_function_base (struct output_block *ob, struct function *fn)
1973 {
1974 struct bitpack_d bp;
1975 unsigned i;
1976 tree t;
1977
1978 /* Output the static chain and non-local goto save area. */
1979 stream_write_tree (ob, fn->static_chain_decl, true);
1980 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1981
1982 /* Output all the local variables in the function. */
1983 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1984 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1985 stream_write_tree (ob, t, true);
1986
1987 /* Output current IL state of the function. */
1988 streamer_write_uhwi (ob, fn->curr_properties);
1989
1990 /* Write all the attributes for FN. */
1991 bp = bitpack_create (ob->main_stream);
1992 bp_pack_value (&bp, fn->is_thunk, 1);
1993 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1994 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1995 bp_pack_value (&bp, fn->returns_struct, 1);
1996 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1997 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1998 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1999 bp_pack_value (&bp, fn->after_inlining, 1);
2000 bp_pack_value (&bp, fn->stdarg, 1);
2001 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2002 bp_pack_value (&bp, fn->calls_alloca, 1);
2003 bp_pack_value (&bp, fn->calls_setjmp, 1);
2004 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2005 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2006 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2007 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2008 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2009
2010 /* Output the function start and end loci. */
2011 stream_output_location (ob, &bp, fn->function_start_locus);
2012 stream_output_location (ob, &bp, fn->function_end_locus);
2013
2014 streamer_write_bitpack (&bp);
2015 }
2016
2017
2018 /* Output the body of function NODE->DECL. */
2019
2020 static void
2021 output_function (struct cgraph_node *node)
2022 {
2023 tree function;
2024 struct function *fn;
2025 basic_block bb;
2026 struct output_block *ob;
2027
2028 function = node->decl;
2029 fn = DECL_STRUCT_FUNCTION (function);
2030 ob = create_output_block (LTO_section_function_body);
2031
2032 clear_line_info (ob);
2033 ob->symbol = node;
2034
2035 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2036
2037 /* Set current_function_decl and cfun. */
2038 push_cfun (fn);
2039
2040 /* Make string 0 be a NULL string. */
2041 streamer_write_char_stream (ob->string_stream, 0);
2042
2043 streamer_write_record_start (ob, LTO_function);
2044
2045 /* Output decls for parameters and args. */
2046 stream_write_tree (ob, DECL_RESULT (function), true);
2047 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2048
2049 /* Output DECL_INITIAL for the function, which contains the tree of
2050 lexical scopes. */
2051 stream_write_tree (ob, DECL_INITIAL (function), true);
2052
2053 /* We also stream abstract functions where we stream only stuff needed for
2054 debug info. */
2055 if (gimple_has_body_p (function))
2056 {
2057 streamer_write_uhwi (ob, 1);
2058 output_struct_function_base (ob, fn);
2059
2060 /* Output all the SSA names used in the function. */
2061 output_ssa_names (ob, fn);
2062
2063 /* Output any exception handling regions. */
2064 output_eh_regions (ob, fn);
2065
2066
2067 /* We will renumber the statements. The code that does this uses
2068 the same ordering that we use for serializing them so we can use
2069 the same code on the other end and not have to write out the
2070 statement numbers. We do not assign UIDs to PHIs here because
2071 virtual PHIs get re-computed on-the-fly which would make numbers
2072 inconsistent. */
2073 set_gimple_stmt_max_uid (cfun, 0);
2074 FOR_ALL_BB_FN (bb, cfun)
2075 {
2076 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2077 gsi_next (&gsi))
2078 {
2079 gphi *stmt = gsi.phi ();
2080
2081 /* Virtual PHIs are not going to be streamed. */
2082 if (!virtual_operand_p (gimple_phi_result (stmt)))
2083 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2084 }
2085 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2086 gsi_next (&gsi))
2087 {
2088 gimple stmt = gsi_stmt (gsi);
2089 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2090 }
2091 }
2092 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2093 virtual phis now. */
2094 FOR_ALL_BB_FN (bb, cfun)
2095 {
2096 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2097 gsi_next (&gsi))
2098 {
2099 gphi *stmt = gsi.phi ();
2100 if (virtual_operand_p (gimple_phi_result (stmt)))
2101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2102 }
2103 }
2104
2105 /* Output the code for the function. */
2106 FOR_ALL_BB_FN (bb, fn)
2107 output_bb (ob, bb, fn);
2108
2109 /* The terminator for this function. */
2110 streamer_write_record_start (ob, LTO_null);
2111
2112 output_cfg (ob, fn);
2113
2114 pop_cfun ();
2115 }
2116 else
2117 streamer_write_uhwi (ob, 0);
2118
2119 /* Create a section to hold the pickled output of this function. */
2120 produce_asm (ob, function);
2121
2122 destroy_output_block (ob);
2123 }
2124
2125 /* Output the body of function NODE->DECL. */
2126
2127 static void
2128 output_constructor (struct varpool_node *node)
2129 {
2130 tree var = node->decl;
2131 struct output_block *ob;
2132
2133 ob = create_output_block (LTO_section_function_body);
2134
2135 clear_line_info (ob);
2136 ob->symbol = node;
2137
2138 /* Make string 0 be a NULL string. */
2139 streamer_write_char_stream (ob->string_stream, 0);
2140
2141 /* Output DECL_INITIAL for the function, which contains the tree of
2142 lexical scopes. */
2143 stream_write_tree (ob, DECL_INITIAL (var), true);
2144
2145 /* Create a section to hold the pickled output of this function. */
2146 produce_asm (ob, var);
2147
2148 destroy_output_block (ob);
2149 }
2150
2151
2152 /* Emit toplevel asms. */
2153
2154 void
2155 lto_output_toplevel_asms (void)
2156 {
2157 struct output_block *ob;
2158 struct asm_node *can;
2159 char *section_name;
2160 struct lto_simple_header_with_strings header;
2161
2162 if (!symtab->first_asm_symbol ())
2163 return;
2164
2165 ob = create_output_block (LTO_section_asm);
2166
2167 /* Make string 0 be a NULL string. */
2168 streamer_write_char_stream (ob->string_stream, 0);
2169
2170 for (can = symtab->first_asm_symbol (); can; can = can->next)
2171 {
2172 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2173 streamer_write_hwi (ob, can->order);
2174 }
2175
2176 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2177
2178 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2179 lto_begin_section (section_name, !flag_wpa);
2180 free (section_name);
2181
2182 /* The entire header stream is computed here. */
2183 memset (&header, 0, sizeof (header));
2184
2185 /* Write the header. */
2186 header.major_version = LTO_major_version;
2187 header.minor_version = LTO_minor_version;
2188
2189 header.main_size = ob->main_stream->total_size;
2190 header.string_size = ob->string_stream->total_size;
2191 lto_write_data (&header, sizeof header);
2192
2193 /* Put all of the gimple and the string table out the asm file as a
2194 block of text. */
2195 lto_write_stream (ob->main_stream);
2196 lto_write_stream (ob->string_stream);
2197
2198 lto_end_section ();
2199
2200 destroy_output_block (ob);
2201 }
2202
2203
2204 /* Copy the function body or variable constructor of NODE without deserializing. */
2205
2206 static void
2207 copy_function_or_variable (struct symtab_node *node)
2208 {
2209 tree function = node->decl;
2210 struct lto_file_decl_data *file_data = node->lto_file_data;
2211 const char *data;
2212 size_t len;
2213 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2214 char *section_name =
2215 lto_get_section_name (LTO_section_function_body, name, NULL);
2216 size_t i, j;
2217 struct lto_in_decl_state *in_state;
2218 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2219
2220 lto_begin_section (section_name, !flag_wpa);
2221 free (section_name);
2222
2223 /* We may have renamed the declaration, e.g., a static function. */
2224 name = lto_get_decl_name_mapping (file_data, name);
2225
2226 data = lto_get_section_data (file_data, LTO_section_function_body,
2227 name, &len);
2228 gcc_assert (data);
2229
2230 /* Do a bit copy of the function body. */
2231 lto_write_data (data, len);
2232
2233 /* Copy decls. */
2234 in_state =
2235 lto_get_function_in_decl_state (node->lto_file_data, function);
2236 gcc_assert (in_state);
2237
2238 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2239 {
2240 size_t n = vec_safe_length (in_state->streams[i]);
2241 vec<tree, va_gc> *trees = in_state->streams[i];
2242 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2243
2244 /* The out state must have the same indices and the in state.
2245 So just copy the vector. All the encoders in the in state
2246 must be empty where we reach here. */
2247 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2248 encoder->trees.reserve_exact (n);
2249 for (j = 0; j < n; j++)
2250 encoder->trees.safe_push ((*trees)[j]);
2251 }
2252
2253 lto_free_section_data (file_data, LTO_section_function_body, name,
2254 data, len);
2255 lto_end_section ();
2256 }
2257
2258 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2259
2260 static tree
2261 wrap_refs (tree *tp, int *ws, void *)
2262 {
2263 tree t = *tp;
2264 if (handled_component_p (t)
2265 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2266 {
2267 tree decl = TREE_OPERAND (t, 0);
2268 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2269 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2270 build1 (ADDR_EXPR, ptrtype, decl),
2271 build_int_cst (ptrtype, 0));
2272 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2273 *ws = 0;
2274 }
2275 else if (TREE_CODE (t) == CONSTRUCTOR)
2276 ;
2277 else if (!EXPR_P (t))
2278 *ws = 0;
2279 return NULL_TREE;
2280 }
2281
2282 /* Main entry point from the pass manager. */
2283
2284 void
2285 lto_output (void)
2286 {
2287 struct lto_out_decl_state *decl_state;
2288 #ifdef ENABLE_CHECKING
2289 bitmap output = lto_bitmap_alloc ();
2290 #endif
2291 int i, n_nodes;
2292 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2293
2294 /* Initialize the streamer. */
2295 lto_streamer_init ();
2296
2297 n_nodes = lto_symtab_encoder_size (encoder);
2298 /* Process only the functions with bodies. */
2299 for (i = 0; i < n_nodes; i++)
2300 {
2301 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2302 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2303 {
2304 if (lto_symtab_encoder_encode_body_p (encoder, node)
2305 && !node->alias)
2306 {
2307 #ifdef ENABLE_CHECKING
2308 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2309 bitmap_set_bit (output, DECL_UID (node->decl));
2310 #endif
2311 decl_state = lto_new_out_decl_state ();
2312 lto_push_out_decl_state (decl_state);
2313 if (gimple_has_body_p (node->decl) || !flag_wpa
2314 /* Thunks have no body but they may be synthetized
2315 at WPA time. */
2316 || DECL_ARGUMENTS (node->decl))
2317 output_function (node);
2318 else
2319 copy_function_or_variable (node);
2320 gcc_assert (lto_get_out_decl_state () == decl_state);
2321 lto_pop_out_decl_state ();
2322 lto_record_function_out_decl_state (node->decl, decl_state);
2323 }
2324 }
2325 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2326 {
2327 /* Wrap symbol references inside the ctor in a type
2328 preserving MEM_REF. */
2329 tree ctor = DECL_INITIAL (node->decl);
2330 if (ctor && !in_lto_p)
2331 walk_tree (&ctor, wrap_refs, NULL, NULL);
2332 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2333 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2334 && !node->alias)
2335 {
2336 timevar_push (TV_IPA_LTO_CTORS_OUT);
2337 #ifdef ENABLE_CHECKING
2338 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2339 bitmap_set_bit (output, DECL_UID (node->decl));
2340 #endif
2341 decl_state = lto_new_out_decl_state ();
2342 lto_push_out_decl_state (decl_state);
2343 if (DECL_INITIAL (node->decl) != error_mark_node
2344 || !flag_wpa)
2345 output_constructor (node);
2346 else
2347 copy_function_or_variable (node);
2348 gcc_assert (lto_get_out_decl_state () == decl_state);
2349 lto_pop_out_decl_state ();
2350 lto_record_function_out_decl_state (node->decl, decl_state);
2351 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2352 }
2353 }
2354 }
2355
2356 /* Emit the callgraph after emitting function bodies. This needs to
2357 be done now to make sure that all the statements in every function
2358 have been renumbered so that edges can be associated with call
2359 statements using the statement UIDs. */
2360 output_symtab ();
2361
2362 output_offload_tables ();
2363
2364 #ifdef ENABLE_CHECKING
2365 lto_bitmap_free (output);
2366 #endif
2367 }
2368
2369 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2370 from it and required for correct representation of its semantics.
2371 Each node in ENCODER must be a global declaration or a type. A node
2372 is written only once, even if it appears multiple times in the
2373 vector. Certain transitively-reachable nodes, such as those
2374 representing expressions, may be duplicated, but such nodes
2375 must not appear in ENCODER itself. */
2376
2377 static void
2378 write_global_stream (struct output_block *ob,
2379 struct lto_tree_ref_encoder *encoder)
2380 {
2381 tree t;
2382 size_t index;
2383 const size_t size = lto_tree_ref_encoder_size (encoder);
2384
2385 for (index = 0; index < size; index++)
2386 {
2387 t = lto_tree_ref_encoder_get_tree (encoder, index);
2388 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2389 stream_write_tree (ob, t, false);
2390 }
2391 }
2392
2393
2394 /* Write a sequence of indices into the globals vector corresponding
2395 to the trees in ENCODER. These are used by the reader to map the
2396 indices used to refer to global entities within function bodies to
2397 their referents. */
2398
2399 static void
2400 write_global_references (struct output_block *ob,
2401 struct lto_tree_ref_encoder *encoder)
2402 {
2403 tree t;
2404 uint32_t index;
2405 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2406
2407 /* Write size and slot indexes as 32-bit unsigned numbers. */
2408 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2409 data[0] = size;
2410
2411 for (index = 0; index < size; index++)
2412 {
2413 uint32_t slot_num;
2414
2415 t = lto_tree_ref_encoder_get_tree (encoder, index);
2416 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2417 gcc_assert (slot_num != (unsigned)-1);
2418 data[index + 1] = slot_num;
2419 }
2420
2421 lto_write_data (data, sizeof (int32_t) * (size + 1));
2422 free (data);
2423 }
2424
2425
2426 /* Write all the streams in an lto_out_decl_state STATE using
2427 output block OB and output stream OUT_STREAM. */
2428
2429 void
2430 lto_output_decl_state_streams (struct output_block *ob,
2431 struct lto_out_decl_state *state)
2432 {
2433 int i;
2434
2435 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2436 write_global_stream (ob, &state->streams[i]);
2437 }
2438
2439
2440 /* Write all the references in an lto_out_decl_state STATE using
2441 output block OB and output stream OUT_STREAM. */
2442
2443 void
2444 lto_output_decl_state_refs (struct output_block *ob,
2445 struct lto_out_decl_state *state)
2446 {
2447 unsigned i;
2448 uint32_t ref;
2449 tree decl;
2450
2451 /* Write reference to FUNCTION_DECL. If there is not function,
2452 write reference to void_type_node. */
2453 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2454 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2455 gcc_assert (ref != (unsigned)-1);
2456 lto_write_data (&ref, sizeof (uint32_t));
2457
2458 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2459 write_global_references (ob, &state->streams[i]);
2460 }
2461
2462
2463 /* Return the written size of STATE. */
2464
2465 static size_t
2466 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2467 {
2468 int i;
2469 size_t size;
2470
2471 size = sizeof (int32_t); /* fn_ref. */
2472 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2473 {
2474 size += sizeof (int32_t); /* vector size. */
2475 size += (lto_tree_ref_encoder_size (&state->streams[i])
2476 * sizeof (int32_t));
2477 }
2478 return size;
2479 }
2480
2481
2482 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2483 so far. */
2484
2485 static void
2486 write_symbol (struct streamer_tree_cache_d *cache,
2487 tree t, hash_set<const char *> *seen, bool alias)
2488 {
2489 const char *name;
2490 enum gcc_plugin_symbol_kind kind;
2491 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2492 unsigned slot_num;
2493 uint64_t size;
2494 const char *comdat;
2495 unsigned char c;
2496
2497 /* None of the following kinds of symbols are needed in the
2498 symbol table. */
2499 if (!TREE_PUBLIC (t)
2500 || is_builtin_fn (t)
2501 || DECL_ABSTRACT_P (t)
2502 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2503 return;
2504 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2505
2506 gcc_assert (TREE_CODE (t) == VAR_DECL
2507 || TREE_CODE (t) == FUNCTION_DECL);
2508
2509 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2510
2511 /* This behaves like assemble_name_raw in varasm.c, performing the
2512 same name manipulations that ASM_OUTPUT_LABELREF does. */
2513 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2514
2515 if (seen->add (name))
2516 return;
2517
2518 streamer_tree_cache_lookup (cache, t, &slot_num);
2519 gcc_assert (slot_num != (unsigned)-1);
2520
2521 if (DECL_EXTERNAL (t))
2522 {
2523 if (DECL_WEAK (t))
2524 kind = GCCPK_WEAKUNDEF;
2525 else
2526 kind = GCCPK_UNDEF;
2527 }
2528 else
2529 {
2530 if (DECL_WEAK (t))
2531 kind = GCCPK_WEAKDEF;
2532 else if (DECL_COMMON (t))
2533 kind = GCCPK_COMMON;
2534 else
2535 kind = GCCPK_DEF;
2536
2537 /* When something is defined, it should have node attached. */
2538 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2539 || varpool_node::get (t)->definition);
2540 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2541 || (cgraph_node::get (t)
2542 && cgraph_node::get (t)->definition));
2543 }
2544
2545 /* Imitate what default_elf_asm_output_external do.
2546 When symbol is external, we need to output it with DEFAULT visibility
2547 when compiling with -fvisibility=default, while with HIDDEN visibility
2548 when symbol has attribute (visibility("hidden")) specified.
2549 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2550 right. */
2551
2552 if (DECL_EXTERNAL (t)
2553 && !targetm.binds_local_p (t))
2554 visibility = GCCPV_DEFAULT;
2555 else
2556 switch (DECL_VISIBILITY (t))
2557 {
2558 case VISIBILITY_DEFAULT:
2559 visibility = GCCPV_DEFAULT;
2560 break;
2561 case VISIBILITY_PROTECTED:
2562 visibility = GCCPV_PROTECTED;
2563 break;
2564 case VISIBILITY_HIDDEN:
2565 visibility = GCCPV_HIDDEN;
2566 break;
2567 case VISIBILITY_INTERNAL:
2568 visibility = GCCPV_INTERNAL;
2569 break;
2570 }
2571
2572 if (kind == GCCPK_COMMON
2573 && DECL_SIZE_UNIT (t)
2574 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2575 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2576 else
2577 size = 0;
2578
2579 if (DECL_ONE_ONLY (t))
2580 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2581 else
2582 comdat = "";
2583
2584 lto_write_data (name, strlen (name) + 1);
2585 lto_write_data (comdat, strlen (comdat) + 1);
2586 c = (unsigned char) kind;
2587 lto_write_data (&c, 1);
2588 c = (unsigned char) visibility;
2589 lto_write_data (&c, 1);
2590 lto_write_data (&size, 8);
2591 lto_write_data (&slot_num, 4);
2592 }
2593
2594 /* Return true if NODE should appear in the plugin symbol table. */
2595
2596 bool
2597 output_symbol_p (symtab_node *node)
2598 {
2599 struct cgraph_node *cnode;
2600 if (!node->real_symbol_p ())
2601 return false;
2602 /* We keep external functions in symtab for sake of inlining
2603 and devirtualization. We do not want to see them in symbol table as
2604 references unless they are really used. */
2605 cnode = dyn_cast <cgraph_node *> (node);
2606 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2607 && cnode->callers)
2608 return true;
2609
2610 /* Ignore all references from external vars initializers - they are not really
2611 part of the compilation unit until they are used by folding. Some symbols,
2612 like references to external construction vtables can not be referred to at all.
2613 We decide this at can_refer_decl_in_current_unit_p. */
2614 if (!node->definition || DECL_EXTERNAL (node->decl))
2615 {
2616 int i;
2617 struct ipa_ref *ref;
2618 for (i = 0; node->iterate_referring (i, ref); i++)
2619 {
2620 if (ref->use == IPA_REF_ALIAS)
2621 continue;
2622 if (is_a <cgraph_node *> (ref->referring))
2623 return true;
2624 if (!DECL_EXTERNAL (ref->referring->decl))
2625 return true;
2626 }
2627 return false;
2628 }
2629 return true;
2630 }
2631
2632
2633 /* Write an IL symbol table to OB.
2634 SET and VSET are cgraph/varpool node sets we are outputting. */
2635
2636 static void
2637 produce_symtab (struct output_block *ob)
2638 {
2639 struct streamer_tree_cache_d *cache = ob->writer_cache;
2640 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2641 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2642 lto_symtab_encoder_iterator lsei;
2643
2644 lto_begin_section (section_name, false);
2645 free (section_name);
2646
2647 hash_set<const char *> seen;
2648
2649 /* Write the symbol table.
2650 First write everything defined and then all declarations.
2651 This is necessary to handle cases where we have duplicated symbols. */
2652 for (lsei = lsei_start (encoder);
2653 !lsei_end_p (lsei); lsei_next (&lsei))
2654 {
2655 symtab_node *node = lsei_node (lsei);
2656
2657 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2658 continue;
2659 write_symbol (cache, node->decl, &seen, false);
2660 }
2661 for (lsei = lsei_start (encoder);
2662 !lsei_end_p (lsei); lsei_next (&lsei))
2663 {
2664 symtab_node *node = lsei_node (lsei);
2665
2666 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2667 continue;
2668 write_symbol (cache, node->decl, &seen, false);
2669 }
2670
2671 lto_end_section ();
2672 }
2673
2674
2675 /* Init the streamer_mode_table for output, where we collect info on what
2676 machine_mode values have been streamed. */
2677 void
2678 lto_output_init_mode_table (void)
2679 {
2680 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2681 }
2682
2683
2684 /* Write the mode table. */
2685 static void
2686 lto_write_mode_table (void)
2687 {
2688 struct output_block *ob;
2689 ob = create_output_block (LTO_section_mode_table);
2690 bitpack_d bp = bitpack_create (ob->main_stream);
2691
2692 /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have
2693 also the inner mode marked. */
2694 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2695 if (streamer_mode_table[i])
2696 {
2697 machine_mode m = (machine_mode) i;
2698 if (GET_MODE_INNER (m) != VOIDmode)
2699 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2700 }
2701 /* First stream modes that have GET_MODE_INNER (m) == VOIDmode,
2702 so that we can refer to them afterwards. */
2703 for (int pass = 0; pass < 2; pass++)
2704 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2705 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2706 {
2707 machine_mode m = (machine_mode) i;
2708 if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0))
2709 continue;
2710 bp_pack_value (&bp, m, 8);
2711 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2712 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2713 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2714 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2715 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2716 switch (GET_MODE_CLASS (m))
2717 {
2718 case MODE_FRACT:
2719 case MODE_UFRACT:
2720 case MODE_ACCUM:
2721 case MODE_UACCUM:
2722 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2723 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2724 break;
2725 case MODE_FLOAT:
2726 case MODE_DECIMAL_FLOAT:
2727 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2728 break;
2729 default:
2730 break;
2731 }
2732 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2733 }
2734 bp_pack_value (&bp, VOIDmode, 8);
2735
2736 streamer_write_bitpack (&bp);
2737
2738 char *section_name
2739 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2740 lto_begin_section (section_name, !flag_wpa);
2741 free (section_name);
2742
2743 /* The entire header stream is computed here. */
2744 struct lto_simple_header_with_strings header;
2745 memset (&header, 0, sizeof (header));
2746
2747 /* Write the header. */
2748 header.major_version = LTO_major_version;
2749 header.minor_version = LTO_minor_version;
2750
2751 header.main_size = ob->main_stream->total_size;
2752 header.string_size = ob->string_stream->total_size;
2753 lto_write_data (&header, sizeof header);
2754
2755 /* Put all of the gimple and the string table out the asm file as a
2756 block of text. */
2757 lto_write_stream (ob->main_stream);
2758 lto_write_stream (ob->string_stream);
2759
2760 lto_end_section ();
2761 destroy_output_block (ob);
2762 }
2763
2764
2765 /* This pass is run after all of the functions are serialized and all
2766 of the IPA passes have written their serialized forms. This pass
2767 causes the vector of all of the global decls and types used from
2768 this file to be written in to a section that can then be read in to
2769 recover these on other side. */
2770
2771 void
2772 produce_asm_for_decls (void)
2773 {
2774 struct lto_out_decl_state *out_state;
2775 struct lto_out_decl_state *fn_out_state;
2776 struct lto_decl_header header;
2777 char *section_name;
2778 struct output_block *ob;
2779 unsigned idx, num_fns;
2780 size_t decl_state_size;
2781 int32_t num_decl_states;
2782
2783 ob = create_output_block (LTO_section_decls);
2784
2785 memset (&header, 0, sizeof (struct lto_decl_header));
2786
2787 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2788 lto_begin_section (section_name, !flag_wpa);
2789 free (section_name);
2790
2791 /* Make string 0 be a NULL string. */
2792 streamer_write_char_stream (ob->string_stream, 0);
2793
2794 gcc_assert (!alias_pairs);
2795
2796 /* Get rid of the global decl state hash tables to save some memory. */
2797 out_state = lto_get_out_decl_state ();
2798 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2799 if (out_state->streams[i].tree_hash_table)
2800 {
2801 delete out_state->streams[i].tree_hash_table;
2802 out_state->streams[i].tree_hash_table = NULL;
2803 }
2804
2805 /* Write the global symbols. */
2806 lto_output_decl_state_streams (ob, out_state);
2807 num_fns = lto_function_decl_states.length ();
2808 for (idx = 0; idx < num_fns; idx++)
2809 {
2810 fn_out_state =
2811 lto_function_decl_states[idx];
2812 lto_output_decl_state_streams (ob, fn_out_state);
2813 }
2814
2815 header.major_version = LTO_major_version;
2816 header.minor_version = LTO_minor_version;
2817
2818 /* Currently not used. This field would allow us to preallocate
2819 the globals vector, so that it need not be resized as it is extended. */
2820 header.num_nodes = -1;
2821
2822 /* Compute the total size of all decl out states. */
2823 decl_state_size = sizeof (int32_t);
2824 decl_state_size += lto_out_decl_state_written_size (out_state);
2825 for (idx = 0; idx < num_fns; idx++)
2826 {
2827 fn_out_state =
2828 lto_function_decl_states[idx];
2829 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2830 }
2831 header.decl_state_size = decl_state_size;
2832
2833 header.main_size = ob->main_stream->total_size;
2834 header.string_size = ob->string_stream->total_size;
2835
2836 lto_write_data (&header, sizeof header);
2837
2838 /* Write the main out-decl state, followed by out-decl states of
2839 functions. */
2840 num_decl_states = num_fns + 1;
2841 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2842 lto_output_decl_state_refs (ob, out_state);
2843 for (idx = 0; idx < num_fns; idx++)
2844 {
2845 fn_out_state = lto_function_decl_states[idx];
2846 lto_output_decl_state_refs (ob, fn_out_state);
2847 }
2848
2849 lto_write_stream (ob->main_stream);
2850 lto_write_stream (ob->string_stream);
2851
2852 lto_end_section ();
2853
2854 /* Write the symbol table. It is used by linker to determine dependencies
2855 and thus we can skip it for WPA. */
2856 if (!flag_wpa)
2857 produce_symtab (ob);
2858
2859 /* Write command line opts. */
2860 lto_write_options ();
2861
2862 /* Deallocate memory and clean up. */
2863 for (idx = 0; idx < num_fns; idx++)
2864 {
2865 fn_out_state =
2866 lto_function_decl_states[idx];
2867 lto_delete_out_decl_state (fn_out_state);
2868 }
2869 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2870 lto_function_decl_states.release ();
2871 destroy_output_block (ob);
2872 if (lto_stream_offload_p)
2873 lto_write_mode_table ();
2874 }