re PR target/65697 (__atomic memory barriers not strong enough for __sync builtins)
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "stringpool.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "rtl.h"
36 #include "flags.h"
37 #include "insn-config.h"
38 #include "expmed.h"
39 #include "dojump.h"
40 #include "explow.h"
41 #include "calls.h"
42 #include "emit-rtl.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "params.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-pass.h"
59 #include "diagnostic-core.h"
60 #include "except.h"
61 #include "lto-symtab.h"
62 #include "cgraph.h"
63 #include "lto-streamer.h"
64 #include "data-streamer.h"
65 #include "gimple-streamer.h"
66 #include "tree-streamer.h"
67 #include "streamer-hooks.h"
68 #include "cfgloop.h"
69 #include "builtins.h"
70 #include "gomp-constants.h"
71
72
73 static void lto_write_tree (struct output_block*, tree, bool);
74
75 /* Clear the line info stored in DATA_IN. */
76
77 static void
78 clear_line_info (struct output_block *ob)
79 {
80 ob->current_file = NULL;
81 ob->current_line = 0;
82 ob->current_col = 0;
83 }
84
85
86 /* Create the output block and return it. SECTION_TYPE is
87 LTO_section_function_body or LTO_static_initializer. */
88
89 struct output_block *
90 create_output_block (enum lto_section_type section_type)
91 {
92 struct output_block *ob = XCNEW (struct output_block);
93
94 ob->section_type = section_type;
95 ob->decl_state = lto_get_out_decl_state ();
96 ob->main_stream = XCNEW (struct lto_output_stream);
97 ob->string_stream = XCNEW (struct lto_output_stream);
98 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
99
100 if (section_type == LTO_section_function_body)
101 ob->cfg_stream = XCNEW (struct lto_output_stream);
102
103 clear_line_info (ob);
104
105 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
106 gcc_obstack_init (&ob->obstack);
107
108 return ob;
109 }
110
111
112 /* Destroy the output block OB. */
113
114 void
115 destroy_output_block (struct output_block *ob)
116 {
117 enum lto_section_type section_type = ob->section_type;
118
119 delete ob->string_hash_table;
120 ob->string_hash_table = NULL;
121
122 free (ob->main_stream);
123 free (ob->string_stream);
124 if (section_type == LTO_section_function_body)
125 free (ob->cfg_stream);
126
127 streamer_tree_cache_delete (ob->writer_cache);
128 obstack_free (&ob->obstack, NULL);
129
130 free (ob);
131 }
132
133
134 /* Look up NODE in the type table and write the index for it to OB. */
135
136 static void
137 output_type_ref (struct output_block *ob, tree node)
138 {
139 streamer_write_record_start (ob, LTO_type_ref);
140 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
141 }
142
143
144 /* Return true if tree node T is written to various tables. For these
145 nodes, we sometimes want to write their phyiscal representation
146 (via lto_output_tree), and sometimes we need to emit an index
147 reference into a table (via lto_output_tree_ref). */
148
149 static bool
150 tree_is_indexable (tree t)
151 {
152 /* Parameters and return values of functions of variably modified types
153 must go to global stream, because they may be used in the type
154 definition. */
155 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
156 && DECL_CONTEXT (t))
157 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
158 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
159 else if (TREE_CODE (t) == IMPORTED_DECL)
160 return false;
161 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
162 || TREE_CODE (t) == TYPE_DECL
163 || TREE_CODE (t) == CONST_DECL
164 || TREE_CODE (t) == NAMELIST_DECL)
165 && decl_function_context (t))
166 return false;
167 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
168 return false;
169 /* Variably modified types need to be streamed alongside function
170 bodies because they can refer to local entities. Together with
171 them we have to localize their members as well.
172 ??? In theory that includes non-FIELD_DECLs as well. */
173 else if (TYPE_P (t)
174 && variably_modified_type_p (t, NULL_TREE))
175 return false;
176 else if (TREE_CODE (t) == FIELD_DECL
177 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
178 return false;
179 else
180 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
181 }
182
183
184 /* Output info about new location into bitpack BP.
185 After outputting bitpack, lto_output_location_data has
186 to be done to output actual data. */
187
188 void
189 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
190 location_t loc)
191 {
192 expanded_location xloc;
193
194 loc = LOCATION_LOCUS (loc);
195 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
196 loc < RESERVED_LOCATION_COUNT
197 ? loc : RESERVED_LOCATION_COUNT);
198 if (loc < RESERVED_LOCATION_COUNT)
199 return;
200
201 xloc = expand_location (loc);
202
203 bp_pack_value (bp, ob->current_file != xloc.file, 1);
204 bp_pack_value (bp, ob->current_line != xloc.line, 1);
205 bp_pack_value (bp, ob->current_col != xloc.column, 1);
206
207 if (ob->current_file != xloc.file)
208 bp_pack_string (ob, bp, xloc.file, true);
209 ob->current_file = xloc.file;
210
211 if (ob->current_line != xloc.line)
212 bp_pack_var_len_unsigned (bp, xloc.line);
213 ob->current_line = xloc.line;
214
215 if (ob->current_col != xloc.column)
216 bp_pack_var_len_unsigned (bp, xloc.column);
217 ob->current_col = xloc.column;
218 }
219
220
221 /* If EXPR is an indexable tree node, output a reference to it to
222 output block OB. Otherwise, output the physical representation of
223 EXPR to OB. */
224
225 static void
226 lto_output_tree_ref (struct output_block *ob, tree expr)
227 {
228 enum tree_code code;
229
230 if (TYPE_P (expr))
231 {
232 output_type_ref (ob, expr);
233 return;
234 }
235
236 code = TREE_CODE (expr);
237 switch (code)
238 {
239 case SSA_NAME:
240 streamer_write_record_start (ob, LTO_ssa_name_ref);
241 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
242 break;
243
244 case FIELD_DECL:
245 streamer_write_record_start (ob, LTO_field_decl_ref);
246 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case FUNCTION_DECL:
250 streamer_write_record_start (ob, LTO_function_decl_ref);
251 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case VAR_DECL:
255 case DEBUG_EXPR_DECL:
256 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
257 case PARM_DECL:
258 streamer_write_record_start (ob, LTO_global_decl_ref);
259 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case CONST_DECL:
263 streamer_write_record_start (ob, LTO_const_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 case IMPORTED_DECL:
268 gcc_assert (decl_function_context (expr) == NULL);
269 streamer_write_record_start (ob, LTO_imported_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case TYPE_DECL:
274 streamer_write_record_start (ob, LTO_type_decl_ref);
275 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case NAMELIST_DECL:
279 streamer_write_record_start (ob, LTO_namelist_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 case NAMESPACE_DECL:
284 streamer_write_record_start (ob, LTO_namespace_decl_ref);
285 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
286 break;
287
288 case LABEL_DECL:
289 streamer_write_record_start (ob, LTO_label_decl_ref);
290 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
291 break;
292
293 case RESULT_DECL:
294 streamer_write_record_start (ob, LTO_result_decl_ref);
295 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
296 break;
297
298 case TRANSLATION_UNIT_DECL:
299 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
300 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
301 break;
302
303 default:
304 /* No other node is indexable, so it should have been handled by
305 lto_output_tree. */
306 gcc_unreachable ();
307 }
308 }
309
310
311 /* Return true if EXPR is a tree node that can be written to disk. */
312
313 static inline bool
314 lto_is_streamable (tree expr)
315 {
316 enum tree_code code = TREE_CODE (expr);
317
318 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
319 name version in lto_output_tree_ref (see output_ssa_names). */
320 return !is_lang_specific (expr)
321 && code != SSA_NAME
322 && code != CALL_EXPR
323 && code != LANG_TYPE
324 && code != MODIFY_EXPR
325 && code != INIT_EXPR
326 && code != TARGET_EXPR
327 && code != BIND_EXPR
328 && code != WITH_CLEANUP_EXPR
329 && code != STATEMENT_LIST
330 && (code == CASE_LABEL_EXPR
331 || code == DECL_EXPR
332 || TREE_CODE_CLASS (code) != tcc_statement);
333 }
334
335
336 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
337
338 static tree
339 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
340 {
341 gcc_checking_assert (DECL_P (expr)
342 && TREE_CODE (expr) != FUNCTION_DECL
343 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
344
345 /* Handle DECL_INITIAL for symbols. */
346 tree initial = DECL_INITIAL (expr);
347 if (TREE_CODE (expr) == VAR_DECL
348 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
349 && !DECL_IN_CONSTANT_POOL (expr)
350 && initial)
351 {
352 varpool_node *vnode;
353 /* Extra section needs about 30 bytes; do not produce it for simple
354 scalar values. */
355 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
356 || !(vnode = varpool_node::get (expr))
357 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
358 initial = error_mark_node;
359 }
360
361 return initial;
362 }
363
364
365 /* Write a physical representation of tree node EXPR to output block
366 OB. If REF_P is true, the leaves of EXPR are emitted as references
367 via lto_output_tree_ref. IX is the index into the streamer cache
368 where EXPR is stored. */
369
370 static void
371 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
372 {
373 /* Pack all the non-pointer fields in EXPR into a bitpack and write
374 the resulting bitpack. */
375 streamer_write_tree_bitfields (ob, expr);
376
377 /* Write all the pointer fields in EXPR. */
378 streamer_write_tree_body (ob, expr, ref_p);
379
380 /* Write any LTO-specific data to OB. */
381 if (DECL_P (expr)
382 && TREE_CODE (expr) != FUNCTION_DECL
383 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
384 {
385 /* Handle DECL_INITIAL for symbols. */
386 tree initial = get_symbol_initial_value
387 (ob->decl_state->symtab_node_encoder, expr);
388 stream_write_tree (ob, initial, ref_p);
389 }
390 }
391
392 /* Write a physical representation of tree node EXPR to output block
393 OB. If REF_P is true, the leaves of EXPR are emitted as references
394 via lto_output_tree_ref. IX is the index into the streamer cache
395 where EXPR is stored. */
396
397 static void
398 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
399 {
400 if (!lto_is_streamable (expr))
401 internal_error ("tree code %qs is not supported in LTO streams",
402 get_tree_code_name (TREE_CODE (expr)));
403
404 /* Write the header, containing everything needed to materialize
405 EXPR on the reading side. */
406 streamer_write_tree_header (ob, expr);
407
408 lto_write_tree_1 (ob, expr, ref_p);
409
410 /* Mark the end of EXPR. */
411 streamer_write_zero (ob);
412 }
413
414 /* Emit the physical representation of tree node EXPR to output block
415 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
416 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
417
418 static void
419 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
420 bool ref_p, bool this_ref_p)
421 {
422 unsigned ix;
423
424 gcc_checking_assert (expr != NULL_TREE
425 && !(this_ref_p && tree_is_indexable (expr)));
426
427 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
428 expr, hash, &ix);
429 gcc_assert (!exists_p);
430 if (streamer_handle_as_builtin_p (expr))
431 {
432 /* MD and NORMAL builtins do not need to be written out
433 completely as they are always instantiated by the
434 compiler on startup. The only builtins that need to
435 be written out are BUILT_IN_FRONTEND. For all other
436 builtins, we simply write the class and code. */
437 streamer_write_builtin (ob, expr);
438 }
439 else if (TREE_CODE (expr) == INTEGER_CST
440 && !TREE_OVERFLOW (expr))
441 {
442 /* Shared INTEGER_CST nodes are special because they need their
443 original type to be materialized by the reader (to implement
444 TYPE_CACHED_VALUES). */
445 streamer_write_integer_cst (ob, expr, ref_p);
446 }
447 else
448 {
449 /* This is the first time we see EXPR, write its fields
450 to OB. */
451 lto_write_tree (ob, expr, ref_p);
452 }
453 }
454
455 class DFS
456 {
457 public:
458 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
459 bool single_p);
460 ~DFS ();
461
462 struct scc_entry
463 {
464 tree t;
465 hashval_t hash;
466 };
467 vec<scc_entry> sccstack;
468
469 private:
470 struct sccs
471 {
472 unsigned int dfsnum;
473 unsigned int low;
474 };
475 struct worklist
476 {
477 tree expr;
478 sccs *from_state;
479 sccs *cstate;
480 bool ref_p;
481 bool this_ref_p;
482 };
483
484 static int scc_entry_compare (const void *, const void *);
485
486 void DFS_write_tree_body (struct output_block *ob,
487 tree expr, sccs *expr_state, bool ref_p);
488
489 void DFS_write_tree (struct output_block *ob, sccs *from_state,
490 tree expr, bool ref_p, bool this_ref_p);
491
492 hashval_t
493 hash_scc (struct output_block *ob, unsigned first, unsigned size);
494
495 hash_map<tree, sccs *> sccstate;
496 vec<worklist> worklist_vec;
497 struct obstack sccstate_obstack;
498 };
499
500 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
501 bool single_p)
502 {
503 unsigned int next_dfs_num = 1;
504 sccstack.create (0);
505 gcc_obstack_init (&sccstate_obstack);
506 worklist_vec = vNULL;
507 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
508 while (!worklist_vec.is_empty ())
509 {
510 worklist &w = worklist_vec.last ();
511 expr = w.expr;
512 sccs *from_state = w.from_state;
513 sccs *cstate = w.cstate;
514 ref_p = w.ref_p;
515 this_ref_p = w.this_ref_p;
516 if (cstate == NULL)
517 {
518 sccs **slot = &sccstate.get_or_insert (expr);
519 cstate = *slot;
520 if (cstate)
521 {
522 gcc_checking_assert (from_state);
523 if (cstate->dfsnum < from_state->dfsnum)
524 from_state->low = MIN (cstate->dfsnum, from_state->low);
525 worklist_vec.pop ();
526 continue;
527 }
528
529 scc_entry e = { expr, 0 };
530 /* Not yet visited. DFS recurse and push it onto the stack. */
531 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
532 sccstack.safe_push (e);
533 cstate->dfsnum = next_dfs_num++;
534 cstate->low = cstate->dfsnum;
535 w.cstate = cstate;
536
537 if (streamer_handle_as_builtin_p (expr))
538 ;
539 else if (TREE_CODE (expr) == INTEGER_CST
540 && !TREE_OVERFLOW (expr))
541 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
542 else
543 {
544 DFS_write_tree_body (ob, expr, cstate, ref_p);
545
546 /* Walk any LTO-specific edges. */
547 if (DECL_P (expr)
548 && TREE_CODE (expr) != FUNCTION_DECL
549 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
550 {
551 /* Handle DECL_INITIAL for symbols. */
552 tree initial
553 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
554 expr);
555 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
556 }
557 }
558 continue;
559 }
560
561 /* See if we found an SCC. */
562 if (cstate->low == cstate->dfsnum)
563 {
564 unsigned first, size;
565 tree x;
566
567 /* If we are re-walking a single leaf-SCC just pop it,
568 let earlier worklist item access the sccstack. */
569 if (single_p)
570 {
571 worklist_vec.pop ();
572 continue;
573 }
574
575 /* Pop the SCC and compute its size. */
576 first = sccstack.length ();
577 do
578 {
579 x = sccstack[--first].t;
580 }
581 while (x != expr);
582 size = sccstack.length () - first;
583
584 /* No need to compute hashes for LTRANS units, we don't perform
585 any merging there. */
586 hashval_t scc_hash = 0;
587 unsigned scc_entry_len = 0;
588 if (!flag_wpa)
589 {
590 scc_hash = hash_scc (ob, first, size);
591
592 /* Put the entries with the least number of collisions first. */
593 unsigned entry_start = 0;
594 scc_entry_len = size + 1;
595 for (unsigned i = 0; i < size;)
596 {
597 unsigned from = i;
598 for (i = i + 1; i < size
599 && (sccstack[first + i].hash
600 == sccstack[first + from].hash); ++i)
601 ;
602 if (i - from < scc_entry_len)
603 {
604 scc_entry_len = i - from;
605 entry_start = from;
606 }
607 }
608 for (unsigned i = 0; i < scc_entry_len; ++i)
609 std::swap (sccstack[first + i],
610 sccstack[first + entry_start + i]);
611
612 if (scc_entry_len == 1)
613 ; /* We already sorted SCC deterministically in hash_scc. */
614 else
615 /* Check that we have only one SCC.
616 Naturally we may have conflicts if hash function is not
617 strong enough. Lets see how far this gets. */
618 {
619 #ifdef ENABLE_CHECKING
620 gcc_unreachable ();
621 #endif
622 }
623 }
624
625 /* Write LTO_tree_scc. */
626 streamer_write_record_start (ob, LTO_tree_scc);
627 streamer_write_uhwi (ob, size);
628 streamer_write_uhwi (ob, scc_hash);
629
630 /* Write size-1 SCCs without wrapping them inside SCC bundles.
631 All INTEGER_CSTs need to be handled this way as we need
632 their type to materialize them. Also builtins are handled
633 this way.
634 ??? We still wrap these in LTO_tree_scc so at the
635 input side we can properly identify the tree we want
636 to ultimatively return. */
637 if (size == 1)
638 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
639 else
640 {
641 /* Write the size of the SCC entry candidates. */
642 streamer_write_uhwi (ob, scc_entry_len);
643
644 /* Write all headers and populate the streamer cache. */
645 for (unsigned i = 0; i < size; ++i)
646 {
647 hashval_t hash = sccstack[first+i].hash;
648 tree t = sccstack[first+i].t;
649 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
650 t, hash, NULL);
651 gcc_assert (!exists_p);
652
653 if (!lto_is_streamable (t))
654 internal_error ("tree code %qs is not supported "
655 "in LTO streams",
656 get_tree_code_name (TREE_CODE (t)));
657
658 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
659
660 /* Write the header, containing everything needed to
661 materialize EXPR on the reading side. */
662 streamer_write_tree_header (ob, t);
663 }
664
665 /* Write the bitpacks and tree references. */
666 for (unsigned i = 0; i < size; ++i)
667 {
668 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
669
670 /* Mark the end of the tree. */
671 streamer_write_zero (ob);
672 }
673 }
674
675 /* Finally truncate the vector. */
676 sccstack.truncate (first);
677
678 if (from_state)
679 from_state->low = MIN (from_state->low, cstate->low);
680 worklist_vec.pop ();
681 continue;
682 }
683
684 gcc_checking_assert (from_state);
685 from_state->low = MIN (from_state->low, cstate->low);
686 if (cstate->dfsnum < from_state->dfsnum)
687 from_state->low = MIN (cstate->dfsnum, from_state->low);
688 worklist_vec.pop ();
689 }
690 worklist_vec.release ();
691 }
692
693 DFS::~DFS ()
694 {
695 sccstack.release ();
696 obstack_free (&sccstate_obstack, NULL);
697 }
698
699 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
700 DFS recurse for all tree edges originating from it. */
701
702 void
703 DFS::DFS_write_tree_body (struct output_block *ob,
704 tree expr, sccs *expr_state, bool ref_p)
705 {
706 #define DFS_follow_tree_edge(DEST) \
707 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
708
709 enum tree_code code;
710
711 code = TREE_CODE (expr);
712
713 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
714 {
715 if (TREE_CODE (expr) != IDENTIFIER_NODE)
716 DFS_follow_tree_edge (TREE_TYPE (expr));
717 }
718
719 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
720 {
721 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
722 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
723 }
724
725 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
726 {
727 DFS_follow_tree_edge (TREE_REALPART (expr));
728 DFS_follow_tree_edge (TREE_IMAGPART (expr));
729 }
730
731 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
732 {
733 /* Drop names that were created for anonymous entities. */
734 if (DECL_NAME (expr)
735 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
736 && anon_aggrname_p (DECL_NAME (expr)))
737 ;
738 else
739 DFS_follow_tree_edge (DECL_NAME (expr));
740 DFS_follow_tree_edge (DECL_CONTEXT (expr));
741 }
742
743 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
744 {
745 DFS_follow_tree_edge (DECL_SIZE (expr));
746 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
747
748 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
749 special handling in LTO, it must be handled by streamer hooks. */
750
751 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
752
753 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
754 for early inlining so drop it on the floor instead of ICEing in
755 dwarf2out.c. */
756
757 if ((TREE_CODE (expr) == VAR_DECL
758 || TREE_CODE (expr) == PARM_DECL)
759 && DECL_HAS_VALUE_EXPR_P (expr))
760 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
761 if (TREE_CODE (expr) == VAR_DECL)
762 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
763 }
764
765 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
766 {
767 if (TREE_CODE (expr) == TYPE_DECL)
768 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
769 }
770
771 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
772 {
773 /* Make sure we don't inadvertently set the assembler name. */
774 if (DECL_ASSEMBLER_NAME_SET_P (expr))
775 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
776 }
777
778 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
779 {
780 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
781 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
782 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
783 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
784 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
785 }
786
787 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
788 {
789 DFS_follow_tree_edge (DECL_VINDEX (expr));
790 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
791 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
792 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
793 }
794
795 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
796 {
797 DFS_follow_tree_edge (TYPE_SIZE (expr));
798 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
799 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
800 DFS_follow_tree_edge (TYPE_NAME (expr));
801 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
802 reconstructed during fixup. */
803 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
804 during fixup. */
805 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
806 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
807 /* TYPE_CANONICAL is re-computed during type merging, so no need
808 to follow it here. */
809 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
810 }
811
812 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
813 {
814 if (TREE_CODE (expr) == ENUMERAL_TYPE)
815 DFS_follow_tree_edge (TYPE_VALUES (expr));
816 else if (TREE_CODE (expr) == ARRAY_TYPE)
817 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
818 else if (RECORD_OR_UNION_TYPE_P (expr))
819 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
820 DFS_follow_tree_edge (t);
821 else if (TREE_CODE (expr) == FUNCTION_TYPE
822 || TREE_CODE (expr) == METHOD_TYPE)
823 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
824
825 if (!POINTER_TYPE_P (expr))
826 DFS_follow_tree_edge (TYPE_MINVAL (expr));
827 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
828 if (RECORD_OR_UNION_TYPE_P (expr))
829 DFS_follow_tree_edge (TYPE_BINFO (expr));
830 }
831
832 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
833 {
834 DFS_follow_tree_edge (TREE_PURPOSE (expr));
835 DFS_follow_tree_edge (TREE_VALUE (expr));
836 DFS_follow_tree_edge (TREE_CHAIN (expr));
837 }
838
839 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
840 {
841 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
842 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
843 }
844
845 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
846 {
847 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
848 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
849 DFS_follow_tree_edge (TREE_BLOCK (expr));
850 }
851
852 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
853 {
854 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
855 if (VAR_OR_FUNCTION_DECL_P (t)
856 && DECL_EXTERNAL (t))
857 /* We have to stream externals in the block chain as
858 non-references. See also
859 tree-streamer-out.c:streamer_write_chain. */
860 DFS_write_tree (ob, expr_state, t, ref_p, false);
861 else
862 DFS_follow_tree_edge (t);
863
864 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
865
866 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
867 handle - those that represent inlined function scopes.
868 For the drop rest them on the floor instead of ICEing
869 in dwarf2out.c. */
870 if (inlined_function_outer_scope_p (expr))
871 {
872 tree ultimate_origin = block_ultimate_origin (expr);
873 DFS_follow_tree_edge (ultimate_origin);
874 }
875 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
876 information for early inlined BLOCKs so drop it on the floor instead
877 of ICEing in dwarf2out.c. */
878
879 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
880 streaming time. */
881
882 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
883 list is re-constructed from BLOCK_SUPERCONTEXT. */
884 }
885
886 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
887 {
888 unsigned i;
889 tree t;
890
891 /* Note that the number of BINFO slots has already been emitted in
892 EXPR's header (see streamer_write_tree_header) because this length
893 is needed to build the empty BINFO node on the reader side. */
894 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
895 DFS_follow_tree_edge (t);
896 DFS_follow_tree_edge (BINFO_OFFSET (expr));
897 DFS_follow_tree_edge (BINFO_VTABLE (expr));
898 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
899
900 /* The number of BINFO_BASE_ACCESSES has already been emitted in
901 EXPR's bitfield section. */
902 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
903 DFS_follow_tree_edge (t);
904
905 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
906 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
907 }
908
909 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
910 {
911 unsigned i;
912 tree index, value;
913
914 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
915 {
916 DFS_follow_tree_edge (index);
917 DFS_follow_tree_edge (value);
918 }
919 }
920
921 if (code == OMP_CLAUSE)
922 {
923 int i;
924 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
925 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
926 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
927 }
928
929 #undef DFS_follow_tree_edge
930 }
931
932 /* Return a hash value for the tree T.
933 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
934 may hold hash values if trees inside current SCC. */
935
936 static hashval_t
937 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
938 {
939 inchash::hash hstate;
940
941 #define visit(SIBLING) \
942 do { \
943 unsigned ix; \
944 if (!SIBLING) \
945 hstate.add_int (0); \
946 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
947 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
948 else if (map) \
949 hstate.add_int (*map->get (SIBLING)); \
950 else \
951 hstate.add_int (1); \
952 } while (0)
953
954 /* Hash TS_BASE. */
955 enum tree_code code = TREE_CODE (t);
956 hstate.add_int (code);
957 if (!TYPE_P (t))
958 {
959 hstate.add_flag (TREE_SIDE_EFFECTS (t));
960 hstate.add_flag (TREE_CONSTANT (t));
961 hstate.add_flag (TREE_READONLY (t));
962 hstate.add_flag (TREE_PUBLIC (t));
963 }
964 hstate.add_flag (TREE_ADDRESSABLE (t));
965 hstate.add_flag (TREE_THIS_VOLATILE (t));
966 if (DECL_P (t))
967 hstate.add_flag (DECL_UNSIGNED (t));
968 else if (TYPE_P (t))
969 hstate.add_flag (TYPE_UNSIGNED (t));
970 if (TYPE_P (t))
971 hstate.add_flag (TYPE_ARTIFICIAL (t));
972 else
973 hstate.add_flag (TREE_NO_WARNING (t));
974 hstate.add_flag (TREE_NOTHROW (t));
975 hstate.add_flag (TREE_STATIC (t));
976 hstate.add_flag (TREE_PROTECTED (t));
977 hstate.add_flag (TREE_DEPRECATED (t));
978 if (code != TREE_BINFO)
979 hstate.add_flag (TREE_PRIVATE (t));
980 if (TYPE_P (t))
981 {
982 hstate.add_flag (TYPE_SATURATING (t));
983 hstate.add_flag (TYPE_ADDR_SPACE (t));
984 }
985 else if (code == SSA_NAME)
986 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
987 hstate.commit_flag ();
988
989 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
990 {
991 int i;
992 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
993 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
994 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
995 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
996 }
997
998 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
999 {
1000 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1001 hstate.add_flag (r.cl);
1002 hstate.add_flag (r.sign);
1003 hstate.add_flag (r.signalling);
1004 hstate.add_flag (r.canonical);
1005 hstate.commit_flag ();
1006 hstate.add_int (r.uexp);
1007 hstate.add (r.sig, sizeof (r.sig));
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1011 {
1012 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1013 hstate.add_int (f.mode);
1014 hstate.add_int (f.data.low);
1015 hstate.add_int (f.data.high);
1016 }
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1019 {
1020 hstate.add_wide_int (DECL_MODE (t));
1021 hstate.add_flag (DECL_NONLOCAL (t));
1022 hstate.add_flag (DECL_VIRTUAL_P (t));
1023 hstate.add_flag (DECL_IGNORED_P (t));
1024 hstate.add_flag (DECL_ABSTRACT_P (t));
1025 hstate.add_flag (DECL_ARTIFICIAL (t));
1026 hstate.add_flag (DECL_USER_ALIGN (t));
1027 hstate.add_flag (DECL_PRESERVE_P (t));
1028 hstate.add_flag (DECL_EXTERNAL (t));
1029 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1030 hstate.commit_flag ();
1031 hstate.add_int (DECL_ALIGN (t));
1032 if (code == LABEL_DECL)
1033 {
1034 hstate.add_int (EH_LANDING_PAD_NR (t));
1035 hstate.add_int (LABEL_DECL_UID (t));
1036 }
1037 else if (code == FIELD_DECL)
1038 {
1039 hstate.add_flag (DECL_PACKED (t));
1040 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1041 hstate.add_int (DECL_OFFSET_ALIGN (t));
1042 }
1043 else if (code == VAR_DECL)
1044 {
1045 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1046 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1047 }
1048 if (code == RESULT_DECL
1049 || code == PARM_DECL
1050 || code == VAR_DECL)
1051 {
1052 hstate.add_flag (DECL_BY_REFERENCE (t));
1053 if (code == VAR_DECL
1054 || code == PARM_DECL)
1055 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1056 }
1057 hstate.commit_flag ();
1058 }
1059
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1061 hstate.add_int (DECL_REGISTER (t));
1062
1063 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1064 {
1065 hstate.add_flag (DECL_COMMON (t));
1066 hstate.add_flag (DECL_DLLIMPORT_P (t));
1067 hstate.add_flag (DECL_WEAK (t));
1068 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1069 hstate.add_flag (DECL_COMDAT (t));
1070 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1071 hstate.add_int (DECL_VISIBILITY (t));
1072 if (code == VAR_DECL)
1073 {
1074 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1075 hstate.add_flag (DECL_HARD_REGISTER (t));
1076 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1077 }
1078 if (TREE_CODE (t) == FUNCTION_DECL)
1079 {
1080 hstate.add_flag (DECL_FINAL_P (t));
1081 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1082 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1083 }
1084 hstate.commit_flag ();
1085 }
1086
1087 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1088 {
1089 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1090 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1091 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1092 hstate.add_flag (DECL_UNINLINABLE (t));
1093 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1094 hstate.add_flag (DECL_IS_NOVOPS (t));
1095 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1096 hstate.add_flag (DECL_IS_MALLOC (t));
1097 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1098 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1099 hstate.add_flag (DECL_STATIC_CHAIN (t));
1100 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1101 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1102 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1103 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1104 hstate.add_flag (DECL_PURE_P (t));
1105 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1106 hstate.commit_flag ();
1107 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1108 hstate.add_int (DECL_FUNCTION_CODE (t));
1109 }
1110
1111 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1112 {
1113 hstate.add_wide_int (TYPE_MODE (t));
1114 hstate.add_flag (TYPE_STRING_FLAG (t));
1115 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1116 no streaming. */
1117 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1118 hstate.add_flag (TYPE_PACKED (t));
1119 hstate.add_flag (TYPE_RESTRICT (t));
1120 hstate.add_flag (TYPE_USER_ALIGN (t));
1121 hstate.add_flag (TYPE_READONLY (t));
1122 if (RECORD_OR_UNION_TYPE_P (t))
1123 {
1124 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1125 hstate.add_flag (TYPE_FINAL_P (t));
1126 }
1127 else if (code == ARRAY_TYPE)
1128 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1129 hstate.commit_flag ();
1130 hstate.add_int (TYPE_PRECISION (t));
1131 hstate.add_int (TYPE_ALIGN (t));
1132 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1133 || (!in_lto_p
1134 && get_alias_set (t) == 0))
1135 ? 0 : -1);
1136 }
1137
1138 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1139 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1140 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1141
1142 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1143 /* We don't stream these when passing things to a different target. */
1144 && !lto_stream_offload_p)
1145 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1146
1147 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1148 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1149
1150 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1151 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1152
1153 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1154 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1155
1156 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1157 {
1158 if (code != IDENTIFIER_NODE)
1159 visit (TREE_TYPE (t));
1160 }
1161
1162 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1163 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1164 visit (VECTOR_CST_ELT (t, i));
1165
1166 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1167 {
1168 visit (TREE_REALPART (t));
1169 visit (TREE_IMAGPART (t));
1170 }
1171
1172 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1173 {
1174 /* Drop names that were created for anonymous entities. */
1175 if (DECL_NAME (t)
1176 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1177 && anon_aggrname_p (DECL_NAME (t)))
1178 ;
1179 else
1180 visit (DECL_NAME (t));
1181 if (DECL_FILE_SCOPE_P (t))
1182 ;
1183 else
1184 visit (DECL_CONTEXT (t));
1185 }
1186
1187 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1188 {
1189 visit (DECL_SIZE (t));
1190 visit (DECL_SIZE_UNIT (t));
1191 visit (DECL_ATTRIBUTES (t));
1192 if ((code == VAR_DECL
1193 || code == PARM_DECL)
1194 && DECL_HAS_VALUE_EXPR_P (t))
1195 visit (DECL_VALUE_EXPR (t));
1196 if (code == VAR_DECL
1197 && DECL_HAS_DEBUG_EXPR_P (t))
1198 visit (DECL_DEBUG_EXPR (t));
1199 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1200 be able to call get_symbol_initial_value. */
1201 }
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1204 {
1205 if (code == TYPE_DECL)
1206 visit (DECL_ORIGINAL_TYPE (t));
1207 }
1208
1209 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1210 {
1211 if (DECL_ASSEMBLER_NAME_SET_P (t))
1212 visit (DECL_ASSEMBLER_NAME (t));
1213 }
1214
1215 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1216 {
1217 visit (DECL_FIELD_OFFSET (t));
1218 visit (DECL_BIT_FIELD_TYPE (t));
1219 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1220 visit (DECL_FIELD_BIT_OFFSET (t));
1221 visit (DECL_FCONTEXT (t));
1222 }
1223
1224 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1225 {
1226 visit (DECL_VINDEX (t));
1227 visit (DECL_FUNCTION_PERSONALITY (t));
1228 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1229 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1230 }
1231
1232 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1233 {
1234 visit (TYPE_SIZE (t));
1235 visit (TYPE_SIZE_UNIT (t));
1236 visit (TYPE_ATTRIBUTES (t));
1237 visit (TYPE_NAME (t));
1238 visit (TYPE_MAIN_VARIANT (t));
1239 if (TYPE_FILE_SCOPE_P (t))
1240 ;
1241 else
1242 visit (TYPE_CONTEXT (t));
1243 visit (TYPE_STUB_DECL (t));
1244 }
1245
1246 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1247 {
1248 if (code == ENUMERAL_TYPE)
1249 visit (TYPE_VALUES (t));
1250 else if (code == ARRAY_TYPE)
1251 visit (TYPE_DOMAIN (t));
1252 else if (RECORD_OR_UNION_TYPE_P (t))
1253 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1254 visit (f);
1255 else if (code == FUNCTION_TYPE
1256 || code == METHOD_TYPE)
1257 visit (TYPE_ARG_TYPES (t));
1258 if (!POINTER_TYPE_P (t))
1259 visit (TYPE_MINVAL (t));
1260 visit (TYPE_MAXVAL (t));
1261 if (RECORD_OR_UNION_TYPE_P (t))
1262 visit (TYPE_BINFO (t));
1263 }
1264
1265 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1266 {
1267 visit (TREE_PURPOSE (t));
1268 visit (TREE_VALUE (t));
1269 visit (TREE_CHAIN (t));
1270 }
1271
1272 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1273 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1274 visit (TREE_VEC_ELT (t, i));
1275
1276 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1277 {
1278 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1279 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1280 visit (TREE_OPERAND (t, i));
1281 }
1282
1283 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1284 {
1285 unsigned i;
1286 tree b;
1287 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1288 visit (b);
1289 visit (BINFO_OFFSET (t));
1290 visit (BINFO_VTABLE (t));
1291 visit (BINFO_VPTR_FIELD (t));
1292 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1293 visit (b);
1294 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1295 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1296 }
1297
1298 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1299 {
1300 unsigned i;
1301 tree index, value;
1302 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1304 {
1305 visit (index);
1306 visit (value);
1307 }
1308 }
1309
1310 if (code == OMP_CLAUSE)
1311 {
1312 int i;
1313 HOST_WIDE_INT val;
1314
1315 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1316 switch (OMP_CLAUSE_CODE (t))
1317 {
1318 case OMP_CLAUSE_DEFAULT:
1319 val = OMP_CLAUSE_DEFAULT_KIND (t);
1320 break;
1321 case OMP_CLAUSE_SCHEDULE:
1322 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1323 break;
1324 case OMP_CLAUSE_DEPEND:
1325 val = OMP_CLAUSE_DEPEND_KIND (t);
1326 break;
1327 case OMP_CLAUSE_MAP:
1328 val = OMP_CLAUSE_MAP_KIND (t);
1329 break;
1330 case OMP_CLAUSE_PROC_BIND:
1331 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1332 break;
1333 case OMP_CLAUSE_REDUCTION:
1334 val = OMP_CLAUSE_REDUCTION_CODE (t);
1335 break;
1336 default:
1337 val = 0;
1338 break;
1339 }
1340 hstate.add_wide_int (val);
1341 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1342 visit (OMP_CLAUSE_OPERAND (t, i));
1343 visit (OMP_CLAUSE_CHAIN (t));
1344 }
1345
1346 return hstate.end ();
1347
1348 #undef visit
1349 }
1350
1351 /* Compare two SCC entries by their hash value for qsorting them. */
1352
1353 int
1354 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1355 {
1356 const scc_entry *p1 = (const scc_entry *) p1_;
1357 const scc_entry *p2 = (const scc_entry *) p2_;
1358 if (p1->hash < p2->hash)
1359 return -1;
1360 else if (p1->hash > p2->hash)
1361 return 1;
1362 return 0;
1363 }
1364
1365 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE. */
1366
1367 hashval_t
1368 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size)
1369 {
1370 unsigned int last_classes = 0, iterations = 0;
1371
1372 /* Compute hash values for the SCC members. */
1373 for (unsigned i = 0; i < size; ++i)
1374 sccstack[first+i].hash
1375 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1376
1377 if (size == 1)
1378 return sccstack[first].hash;
1379
1380 /* We aim to get unique hash for every tree within SCC and compute hash value
1381 of the whole SCC by combining all values together in a stable (entry-point
1382 independent) order. This guarantees that the same SCC regions within
1383 different translation units will get the same hash values and therefore
1384 will be merged at WPA time.
1385
1386 Often the hashes are already unique. In that case we compute the SCC hash
1387 by combining individual hash values in an increasing order.
1388
1389 If there are duplicates, we seek at least one tree with unique hash (and
1390 pick one with minimal hash and this property). Then we obtain a stable
1391 order by DFS walk starting from this unique tree and then use the index
1392 within this order to make individual hash values unique.
1393
1394 If there is no tree with unique hash, we iteratively propagate the hash
1395 values across the internal edges of SCC. This usually quickly leads
1396 to unique hashes. Consider, for example, an SCC containing two pointers
1397 that are identical except for the types they point to and assume that
1398 these types are also part of the SCC. The propagation will add the
1399 points-to type information into their hash values. */
1400 do
1401 {
1402 /* Sort the SCC so we can easily check for uniqueness. */
1403 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1404
1405 unsigned int classes = 1;
1406 int firstunique = -1;
1407
1408 /* Find the tree with lowest unique hash (if it exists) and compute
1409 the number of equivalence classes. */
1410 if (sccstack[first].hash != sccstack[first+1].hash)
1411 firstunique = 0;
1412 for (unsigned i = 1; i < size; ++i)
1413 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1414 {
1415 classes++;
1416 if (firstunique == -1
1417 && (i == size - 1
1418 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1419 firstunique = i;
1420 }
1421
1422 /* If we found a tree with unique hash, stop the iteration. */
1423 if (firstunique != -1
1424 /* Also terminate if we run out of iterations or if the number of
1425 equivalence classes is no longer increasing.
1426 For example a cyclic list of trees that are all equivalent will
1427 never have unique entry point; we however do not build such SCCs
1428 in our IL. */
1429 || classes <= last_classes || iterations > 16)
1430 {
1431 hashval_t scc_hash;
1432
1433 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1434 starting from FIRSTUNIQUE to obtain a stable order. */
1435 if (classes != size && firstunique != -1)
1436 {
1437 hash_map <tree, hashval_t> map(size*2);
1438
1439 /* Store hash values into a map, so we can associate them with
1440 the reordered SCC. */
1441 for (unsigned i = 0; i < size; ++i)
1442 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1443
1444 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1445 gcc_assert (again.sccstack.length () == size);
1446
1447 memcpy (sccstack.address () + first,
1448 again.sccstack.address (),
1449 sizeof (scc_entry) * size);
1450
1451 /* Update hash values of individual members by hashing in the
1452 index within the stable order. This ensures uniqueness.
1453 Also compute the SCC hash by mixing in all hash values in
1454 the stable order we obtained. */
1455 sccstack[first].hash = *map.get (sccstack[first].t);
1456 scc_hash = sccstack[first].hash;
1457 for (unsigned i = 1; i < size; ++i)
1458 {
1459 sccstack[first+i].hash
1460 = iterative_hash_hashval_t (i,
1461 *map.get (sccstack[first+i].t));
1462 scc_hash
1463 = iterative_hash_hashval_t (scc_hash,
1464 sccstack[first+i].hash);
1465 }
1466 }
1467 /* If we got a unique hash value for each tree, then sort already
1468 ensured entry-point independent order. Only compute the final
1469 SCC hash.
1470
1471 If we failed to find the unique entry point, we go by the same
1472 route. We will eventually introduce unwanted hash conflicts. */
1473 else
1474 {
1475 scc_hash = sccstack[first].hash;
1476 for (unsigned i = 1; i < size; ++i)
1477 scc_hash
1478 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1479
1480 /* We cannot 100% guarantee that the hash won't conflict so as
1481 to make it impossible to find a unique hash. This however
1482 should be an extremely rare case. ICE for now so possible
1483 issues are found and evaluated. */
1484 gcc_checking_assert (classes == size);
1485 }
1486
1487 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1488 hash into the hash of each element. */
1489 for (unsigned i = 0; i < size; ++i)
1490 sccstack[first+i].hash
1491 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1492 return scc_hash;
1493 }
1494
1495 last_classes = classes;
1496 iterations++;
1497
1498 /* We failed to identify the entry point; propagate hash values across
1499 the edges. */
1500 hash_map <tree, hashval_t> map(size*2);
1501
1502 for (unsigned i = 0; i < size; ++i)
1503 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1504
1505 for (unsigned i = 0; i < size; i++)
1506 sccstack[first+i].hash
1507 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1508 }
1509 while (true);
1510 }
1511
1512 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1513 already in the streamer cache. Main routine called for
1514 each visit of EXPR. */
1515
1516 void
1517 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1518 tree expr, bool ref_p, bool this_ref_p)
1519 {
1520 /* Handle special cases. */
1521 if (expr == NULL_TREE)
1522 return;
1523
1524 /* Do not DFS walk into indexable trees. */
1525 if (this_ref_p && tree_is_indexable (expr))
1526 return;
1527
1528 /* Check if we already streamed EXPR. */
1529 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1530 return;
1531
1532 worklist w;
1533 w.expr = expr;
1534 w.from_state = from_state;
1535 w.cstate = NULL;
1536 w.ref_p = ref_p;
1537 w.this_ref_p = this_ref_p;
1538 worklist_vec.safe_push (w);
1539 }
1540
1541
1542 /* Emit the physical representation of tree node EXPR to output block
1543 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1544 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1545
1546 void
1547 lto_output_tree (struct output_block *ob, tree expr,
1548 bool ref_p, bool this_ref_p)
1549 {
1550 unsigned ix;
1551 bool existed_p;
1552
1553 if (expr == NULL_TREE)
1554 {
1555 streamer_write_record_start (ob, LTO_null);
1556 return;
1557 }
1558
1559 if (this_ref_p && tree_is_indexable (expr))
1560 {
1561 lto_output_tree_ref (ob, expr);
1562 return;
1563 }
1564
1565 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1566 if (existed_p)
1567 {
1568 /* If a node has already been streamed out, make sure that
1569 we don't write it more than once. Otherwise, the reader
1570 will instantiate two different nodes for the same object. */
1571 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1572 streamer_write_uhwi (ob, ix);
1573 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1574 lto_tree_code_to_tag (TREE_CODE (expr)));
1575 lto_stats.num_pickle_refs_output++;
1576 }
1577 else
1578 {
1579 /* This is the first time we see EXPR, write all reachable
1580 trees to OB. */
1581 static bool in_dfs_walk;
1582
1583 /* Protect against recursion which means disconnect between
1584 what tree edges we walk in the DFS walk and what edges
1585 we stream out. */
1586 gcc_assert (!in_dfs_walk);
1587
1588 /* Start the DFS walk. */
1589 /* Save ob state ... */
1590 /* let's see ... */
1591 in_dfs_walk = true;
1592 DFS (ob, expr, ref_p, this_ref_p, false);
1593 in_dfs_walk = false;
1594
1595 /* Finally append a reference to the tree we were writing.
1596 ??? If expr ended up as a singleton we could have
1597 inlined it here and avoid outputting a reference. */
1598 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1599 gcc_assert (existed_p);
1600 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1601 streamer_write_uhwi (ob, ix);
1602 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1603 lto_tree_code_to_tag (TREE_CODE (expr)));
1604 lto_stats.num_pickle_refs_output++;
1605 }
1606 }
1607
1608
1609 /* Output to OB a list of try/catch handlers starting with FIRST. */
1610
1611 static void
1612 output_eh_try_list (struct output_block *ob, eh_catch first)
1613 {
1614 eh_catch n;
1615
1616 for (n = first; n; n = n->next_catch)
1617 {
1618 streamer_write_record_start (ob, LTO_eh_catch);
1619 stream_write_tree (ob, n->type_list, true);
1620 stream_write_tree (ob, n->filter_list, true);
1621 stream_write_tree (ob, n->label, true);
1622 }
1623
1624 streamer_write_record_start (ob, LTO_null);
1625 }
1626
1627
1628 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1629 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1630 detect EH region sharing. */
1631
1632 static void
1633 output_eh_region (struct output_block *ob, eh_region r)
1634 {
1635 enum LTO_tags tag;
1636
1637 if (r == NULL)
1638 {
1639 streamer_write_record_start (ob, LTO_null);
1640 return;
1641 }
1642
1643 if (r->type == ERT_CLEANUP)
1644 tag = LTO_ert_cleanup;
1645 else if (r->type == ERT_TRY)
1646 tag = LTO_ert_try;
1647 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1648 tag = LTO_ert_allowed_exceptions;
1649 else if (r->type == ERT_MUST_NOT_THROW)
1650 tag = LTO_ert_must_not_throw;
1651 else
1652 gcc_unreachable ();
1653
1654 streamer_write_record_start (ob, tag);
1655 streamer_write_hwi (ob, r->index);
1656
1657 if (r->outer)
1658 streamer_write_hwi (ob, r->outer->index);
1659 else
1660 streamer_write_zero (ob);
1661
1662 if (r->inner)
1663 streamer_write_hwi (ob, r->inner->index);
1664 else
1665 streamer_write_zero (ob);
1666
1667 if (r->next_peer)
1668 streamer_write_hwi (ob, r->next_peer->index);
1669 else
1670 streamer_write_zero (ob);
1671
1672 if (r->type == ERT_TRY)
1673 {
1674 output_eh_try_list (ob, r->u.eh_try.first_catch);
1675 }
1676 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1677 {
1678 stream_write_tree (ob, r->u.allowed.type_list, true);
1679 stream_write_tree (ob, r->u.allowed.label, true);
1680 streamer_write_uhwi (ob, r->u.allowed.filter);
1681 }
1682 else if (r->type == ERT_MUST_NOT_THROW)
1683 {
1684 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1685 bitpack_d bp = bitpack_create (ob->main_stream);
1686 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1687 streamer_write_bitpack (&bp);
1688 }
1689
1690 if (r->landing_pads)
1691 streamer_write_hwi (ob, r->landing_pads->index);
1692 else
1693 streamer_write_zero (ob);
1694 }
1695
1696
1697 /* Output landing pad LP to OB. */
1698
1699 static void
1700 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1701 {
1702 if (lp == NULL)
1703 {
1704 streamer_write_record_start (ob, LTO_null);
1705 return;
1706 }
1707
1708 streamer_write_record_start (ob, LTO_eh_landing_pad);
1709 streamer_write_hwi (ob, lp->index);
1710 if (lp->next_lp)
1711 streamer_write_hwi (ob, lp->next_lp->index);
1712 else
1713 streamer_write_zero (ob);
1714
1715 if (lp->region)
1716 streamer_write_hwi (ob, lp->region->index);
1717 else
1718 streamer_write_zero (ob);
1719
1720 stream_write_tree (ob, lp->post_landing_pad, true);
1721 }
1722
1723
1724 /* Output the existing eh_table to OB. */
1725
1726 static void
1727 output_eh_regions (struct output_block *ob, struct function *fn)
1728 {
1729 if (fn->eh && fn->eh->region_tree)
1730 {
1731 unsigned i;
1732 eh_region eh;
1733 eh_landing_pad lp;
1734 tree ttype;
1735
1736 streamer_write_record_start (ob, LTO_eh_table);
1737
1738 /* Emit the index of the root of the EH region tree. */
1739 streamer_write_hwi (ob, fn->eh->region_tree->index);
1740
1741 /* Emit all the EH regions in the region array. */
1742 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1743 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1744 output_eh_region (ob, eh);
1745
1746 /* Emit all landing pads. */
1747 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1748 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1749 output_eh_lp (ob, lp);
1750
1751 /* Emit all the runtime type data. */
1752 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1753 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1754 stream_write_tree (ob, ttype, true);
1755
1756 /* Emit the table of action chains. */
1757 if (targetm.arm_eabi_unwinder)
1758 {
1759 tree t;
1760 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1761 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1762 stream_write_tree (ob, t, true);
1763 }
1764 else
1765 {
1766 uchar c;
1767 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1768 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1769 streamer_write_char_stream (ob->main_stream, c);
1770 }
1771 }
1772
1773 /* The LTO_null either terminates the record or indicates that there
1774 are no eh_records at all. */
1775 streamer_write_record_start (ob, LTO_null);
1776 }
1777
1778
1779 /* Output all of the active ssa names to the ssa_names stream. */
1780
1781 static void
1782 output_ssa_names (struct output_block *ob, struct function *fn)
1783 {
1784 unsigned int i, len;
1785
1786 len = vec_safe_length (SSANAMES (fn));
1787 streamer_write_uhwi (ob, len);
1788
1789 for (i = 1; i < len; i++)
1790 {
1791 tree ptr = (*SSANAMES (fn))[i];
1792
1793 if (ptr == NULL_TREE
1794 || SSA_NAME_IN_FREE_LIST (ptr)
1795 || virtual_operand_p (ptr))
1796 continue;
1797
1798 streamer_write_uhwi (ob, i);
1799 streamer_write_char_stream (ob->main_stream,
1800 SSA_NAME_IS_DEFAULT_DEF (ptr));
1801 if (SSA_NAME_VAR (ptr))
1802 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1803 else
1804 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1805 stream_write_tree (ob, TREE_TYPE (ptr), true);
1806 }
1807
1808 streamer_write_zero (ob);
1809 }
1810
1811
1812 /* Output a wide-int. */
1813
1814 static void
1815 streamer_write_wi (struct output_block *ob,
1816 const widest_int &w)
1817 {
1818 int len = w.get_len ();
1819
1820 streamer_write_uhwi (ob, w.get_precision ());
1821 streamer_write_uhwi (ob, len);
1822 for (int i = 0; i < len; i++)
1823 streamer_write_hwi (ob, w.elt (i));
1824 }
1825
1826
1827 /* Output the cfg. */
1828
1829 static void
1830 output_cfg (struct output_block *ob, struct function *fn)
1831 {
1832 struct lto_output_stream *tmp_stream = ob->main_stream;
1833 basic_block bb;
1834
1835 ob->main_stream = ob->cfg_stream;
1836
1837 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1838 profile_status_for_fn (fn));
1839
1840 /* Output the number of the highest basic block. */
1841 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1842
1843 FOR_ALL_BB_FN (bb, fn)
1844 {
1845 edge_iterator ei;
1846 edge e;
1847
1848 streamer_write_hwi (ob, bb->index);
1849
1850 /* Output the successors and the edge flags. */
1851 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1852 FOR_EACH_EDGE (e, ei, bb->succs)
1853 {
1854 streamer_write_uhwi (ob, e->dest->index);
1855 streamer_write_hwi (ob, e->probability);
1856 streamer_write_gcov_count (ob, e->count);
1857 streamer_write_uhwi (ob, e->flags);
1858 }
1859 }
1860
1861 streamer_write_hwi (ob, -1);
1862
1863 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1864 while (bb->next_bb)
1865 {
1866 streamer_write_hwi (ob, bb->next_bb->index);
1867 bb = bb->next_bb;
1868 }
1869
1870 streamer_write_hwi (ob, -1);
1871
1872 /* ??? The cfgloop interface is tied to cfun. */
1873 gcc_assert (cfun == fn);
1874
1875 /* Output the number of loops. */
1876 streamer_write_uhwi (ob, number_of_loops (fn));
1877
1878 /* Output each loop, skipping the tree root which has number zero. */
1879 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1880 {
1881 struct loop *loop = get_loop (fn, i);
1882
1883 /* Write the index of the loop header. That's enough to rebuild
1884 the loop tree on the reader side. Stream -1 for an unused
1885 loop entry. */
1886 if (!loop)
1887 {
1888 streamer_write_hwi (ob, -1);
1889 continue;
1890 }
1891 else
1892 streamer_write_hwi (ob, loop->header->index);
1893
1894 /* Write everything copy_loop_info copies. */
1895 streamer_write_enum (ob->main_stream,
1896 loop_estimation, EST_LAST, loop->estimate_state);
1897 streamer_write_hwi (ob, loop->any_upper_bound);
1898 if (loop->any_upper_bound)
1899 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1900 streamer_write_hwi (ob, loop->any_estimate);
1901 if (loop->any_estimate)
1902 streamer_write_wi (ob, loop->nb_iterations_estimate);
1903
1904 /* Write OMP SIMD related info. */
1905 streamer_write_hwi (ob, loop->safelen);
1906 streamer_write_hwi (ob, loop->dont_vectorize);
1907 streamer_write_hwi (ob, loop->force_vectorize);
1908 stream_write_tree (ob, loop->simduid, true);
1909 }
1910
1911 ob->main_stream = tmp_stream;
1912 }
1913
1914
1915 /* Create the header in the file using OB. If the section type is for
1916 a function, set FN to the decl for that function. */
1917
1918 void
1919 produce_asm (struct output_block *ob, tree fn)
1920 {
1921 enum lto_section_type section_type = ob->section_type;
1922 struct lto_function_header header;
1923 char *section_name;
1924
1925 if (section_type == LTO_section_function_body)
1926 {
1927 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1928 section_name = lto_get_section_name (section_type, name, NULL);
1929 }
1930 else
1931 section_name = lto_get_section_name (section_type, NULL, NULL);
1932
1933 lto_begin_section (section_name, !flag_wpa);
1934 free (section_name);
1935
1936 /* The entire header is stream computed here. */
1937 memset (&header, 0, sizeof (struct lto_function_header));
1938
1939 /* Write the header. */
1940 header.major_version = LTO_major_version;
1941 header.minor_version = LTO_minor_version;
1942
1943 if (section_type == LTO_section_function_body)
1944 header.cfg_size = ob->cfg_stream->total_size;
1945 header.main_size = ob->main_stream->total_size;
1946 header.string_size = ob->string_stream->total_size;
1947 lto_write_data (&header, sizeof header);
1948
1949 /* Put all of the gimple and the string table out the asm file as a
1950 block of text. */
1951 if (section_type == LTO_section_function_body)
1952 lto_write_stream (ob->cfg_stream);
1953 lto_write_stream (ob->main_stream);
1954 lto_write_stream (ob->string_stream);
1955
1956 lto_end_section ();
1957 }
1958
1959
1960 /* Output the base body of struct function FN using output block OB. */
1961
1962 static void
1963 output_struct_function_base (struct output_block *ob, struct function *fn)
1964 {
1965 struct bitpack_d bp;
1966 unsigned i;
1967 tree t;
1968
1969 /* Output the static chain and non-local goto save area. */
1970 stream_write_tree (ob, fn->static_chain_decl, true);
1971 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1972
1973 /* Output all the local variables in the function. */
1974 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1975 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1976 stream_write_tree (ob, t, true);
1977
1978 /* Output current IL state of the function. */
1979 streamer_write_uhwi (ob, fn->curr_properties);
1980
1981 /* Write all the attributes for FN. */
1982 bp = bitpack_create (ob->main_stream);
1983 bp_pack_value (&bp, fn->is_thunk, 1);
1984 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1985 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1986 bp_pack_value (&bp, fn->returns_struct, 1);
1987 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1988 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1989 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1990 bp_pack_value (&bp, fn->after_inlining, 1);
1991 bp_pack_value (&bp, fn->stdarg, 1);
1992 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1993 bp_pack_value (&bp, fn->calls_alloca, 1);
1994 bp_pack_value (&bp, fn->calls_setjmp, 1);
1995 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1996 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1997 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1998 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1999 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2000
2001 /* Output the function start and end loci. */
2002 stream_output_location (ob, &bp, fn->function_start_locus);
2003 stream_output_location (ob, &bp, fn->function_end_locus);
2004
2005 streamer_write_bitpack (&bp);
2006 }
2007
2008
2009 /* Output the body of function NODE->DECL. */
2010
2011 static void
2012 output_function (struct cgraph_node *node)
2013 {
2014 tree function;
2015 struct function *fn;
2016 basic_block bb;
2017 struct output_block *ob;
2018
2019 function = node->decl;
2020 fn = DECL_STRUCT_FUNCTION (function);
2021 ob = create_output_block (LTO_section_function_body);
2022
2023 clear_line_info (ob);
2024 ob->symbol = node;
2025
2026 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2027
2028 /* Set current_function_decl and cfun. */
2029 push_cfun (fn);
2030
2031 /* Make string 0 be a NULL string. */
2032 streamer_write_char_stream (ob->string_stream, 0);
2033
2034 streamer_write_record_start (ob, LTO_function);
2035
2036 /* Output decls for parameters and args. */
2037 stream_write_tree (ob, DECL_RESULT (function), true);
2038 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2039
2040 /* Output DECL_INITIAL for the function, which contains the tree of
2041 lexical scopes. */
2042 stream_write_tree (ob, DECL_INITIAL (function), true);
2043
2044 /* We also stream abstract functions where we stream only stuff needed for
2045 debug info. */
2046 if (gimple_has_body_p (function))
2047 {
2048 streamer_write_uhwi (ob, 1);
2049 output_struct_function_base (ob, fn);
2050
2051 /* Output all the SSA names used in the function. */
2052 output_ssa_names (ob, fn);
2053
2054 /* Output any exception handling regions. */
2055 output_eh_regions (ob, fn);
2056
2057
2058 /* We will renumber the statements. The code that does this uses
2059 the same ordering that we use for serializing them so we can use
2060 the same code on the other end and not have to write out the
2061 statement numbers. We do not assign UIDs to PHIs here because
2062 virtual PHIs get re-computed on-the-fly which would make numbers
2063 inconsistent. */
2064 set_gimple_stmt_max_uid (cfun, 0);
2065 FOR_ALL_BB_FN (bb, cfun)
2066 {
2067 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2068 gsi_next (&gsi))
2069 {
2070 gphi *stmt = gsi.phi ();
2071
2072 /* Virtual PHIs are not going to be streamed. */
2073 if (!virtual_operand_p (gimple_phi_result (stmt)))
2074 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2075 }
2076 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2077 gsi_next (&gsi))
2078 {
2079 gimple stmt = gsi_stmt (gsi);
2080 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2081 }
2082 }
2083 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2084 virtual phis now. */
2085 FOR_ALL_BB_FN (bb, cfun)
2086 {
2087 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2088 gsi_next (&gsi))
2089 {
2090 gphi *stmt = gsi.phi ();
2091 if (virtual_operand_p (gimple_phi_result (stmt)))
2092 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2093 }
2094 }
2095
2096 /* Output the code for the function. */
2097 FOR_ALL_BB_FN (bb, fn)
2098 output_bb (ob, bb, fn);
2099
2100 /* The terminator for this function. */
2101 streamer_write_record_start (ob, LTO_null);
2102
2103 output_cfg (ob, fn);
2104
2105 pop_cfun ();
2106 }
2107 else
2108 streamer_write_uhwi (ob, 0);
2109
2110 /* Create a section to hold the pickled output of this function. */
2111 produce_asm (ob, function);
2112
2113 destroy_output_block (ob);
2114 }
2115
2116 /* Output the body of function NODE->DECL. */
2117
2118 static void
2119 output_constructor (struct varpool_node *node)
2120 {
2121 tree var = node->decl;
2122 struct output_block *ob;
2123
2124 ob = create_output_block (LTO_section_function_body);
2125
2126 clear_line_info (ob);
2127 ob->symbol = node;
2128
2129 /* Make string 0 be a NULL string. */
2130 streamer_write_char_stream (ob->string_stream, 0);
2131
2132 /* Output DECL_INITIAL for the function, which contains the tree of
2133 lexical scopes. */
2134 stream_write_tree (ob, DECL_INITIAL (var), true);
2135
2136 /* Create a section to hold the pickled output of this function. */
2137 produce_asm (ob, var);
2138
2139 destroy_output_block (ob);
2140 }
2141
2142
2143 /* Emit toplevel asms. */
2144
2145 void
2146 lto_output_toplevel_asms (void)
2147 {
2148 struct output_block *ob;
2149 struct asm_node *can;
2150 char *section_name;
2151 struct lto_simple_header_with_strings header;
2152
2153 if (!symtab->first_asm_symbol ())
2154 return;
2155
2156 ob = create_output_block (LTO_section_asm);
2157
2158 /* Make string 0 be a NULL string. */
2159 streamer_write_char_stream (ob->string_stream, 0);
2160
2161 for (can = symtab->first_asm_symbol (); can; can = can->next)
2162 {
2163 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2164 streamer_write_hwi (ob, can->order);
2165 }
2166
2167 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2168
2169 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2170 lto_begin_section (section_name, !flag_wpa);
2171 free (section_name);
2172
2173 /* The entire header stream is computed here. */
2174 memset (&header, 0, sizeof (header));
2175
2176 /* Write the header. */
2177 header.major_version = LTO_major_version;
2178 header.minor_version = LTO_minor_version;
2179
2180 header.main_size = ob->main_stream->total_size;
2181 header.string_size = ob->string_stream->total_size;
2182 lto_write_data (&header, sizeof header);
2183
2184 /* Put all of the gimple and the string table out the asm file as a
2185 block of text. */
2186 lto_write_stream (ob->main_stream);
2187 lto_write_stream (ob->string_stream);
2188
2189 lto_end_section ();
2190
2191 destroy_output_block (ob);
2192 }
2193
2194
2195 /* Copy the function body or variable constructor of NODE without deserializing. */
2196
2197 static void
2198 copy_function_or_variable (struct symtab_node *node)
2199 {
2200 tree function = node->decl;
2201 struct lto_file_decl_data *file_data = node->lto_file_data;
2202 const char *data;
2203 size_t len;
2204 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2205 char *section_name =
2206 lto_get_section_name (LTO_section_function_body, name, NULL);
2207 size_t i, j;
2208 struct lto_in_decl_state *in_state;
2209 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2210
2211 lto_begin_section (section_name, !flag_wpa);
2212 free (section_name);
2213
2214 /* We may have renamed the declaration, e.g., a static function. */
2215 name = lto_get_decl_name_mapping (file_data, name);
2216
2217 data = lto_get_section_data (file_data, LTO_section_function_body,
2218 name, &len);
2219 gcc_assert (data);
2220
2221 /* Do a bit copy of the function body. */
2222 lto_write_data (data, len);
2223
2224 /* Copy decls. */
2225 in_state =
2226 lto_get_function_in_decl_state (node->lto_file_data, function);
2227 gcc_assert (in_state);
2228
2229 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2230 {
2231 size_t n = vec_safe_length (in_state->streams[i]);
2232 vec<tree, va_gc> *trees = in_state->streams[i];
2233 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2234
2235 /* The out state must have the same indices and the in state.
2236 So just copy the vector. All the encoders in the in state
2237 must be empty where we reach here. */
2238 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2239 encoder->trees.reserve_exact (n);
2240 for (j = 0; j < n; j++)
2241 encoder->trees.safe_push ((*trees)[j]);
2242 }
2243
2244 lto_free_section_data (file_data, LTO_section_function_body, name,
2245 data, len);
2246 lto_end_section ();
2247 }
2248
2249 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2250
2251 static tree
2252 wrap_refs (tree *tp, int *ws, void *)
2253 {
2254 tree t = *tp;
2255 if (handled_component_p (t)
2256 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2257 {
2258 tree decl = TREE_OPERAND (t, 0);
2259 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2260 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2261 build1 (ADDR_EXPR, ptrtype, decl),
2262 build_int_cst (ptrtype, 0));
2263 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2264 *ws = 0;
2265 }
2266 else if (TREE_CODE (t) == CONSTRUCTOR)
2267 ;
2268 else if (!EXPR_P (t))
2269 *ws = 0;
2270 return NULL_TREE;
2271 }
2272
2273 /* Main entry point from the pass manager. */
2274
2275 void
2276 lto_output (void)
2277 {
2278 struct lto_out_decl_state *decl_state;
2279 #ifdef ENABLE_CHECKING
2280 bitmap output = lto_bitmap_alloc ();
2281 #endif
2282 int i, n_nodes;
2283 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2284
2285 /* Initialize the streamer. */
2286 lto_streamer_init ();
2287
2288 n_nodes = lto_symtab_encoder_size (encoder);
2289 /* Process only the functions with bodies. */
2290 for (i = 0; i < n_nodes; i++)
2291 {
2292 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2293 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2294 {
2295 if (lto_symtab_encoder_encode_body_p (encoder, node)
2296 && !node->alias)
2297 {
2298 #ifdef ENABLE_CHECKING
2299 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2300 bitmap_set_bit (output, DECL_UID (node->decl));
2301 #endif
2302 decl_state = lto_new_out_decl_state ();
2303 lto_push_out_decl_state (decl_state);
2304 if (gimple_has_body_p (node->decl) || !flag_wpa
2305 /* Thunks have no body but they may be synthetized
2306 at WPA time. */
2307 || DECL_ARGUMENTS (node->decl))
2308 output_function (node);
2309 else
2310 copy_function_or_variable (node);
2311 gcc_assert (lto_get_out_decl_state () == decl_state);
2312 lto_pop_out_decl_state ();
2313 lto_record_function_out_decl_state (node->decl, decl_state);
2314 }
2315 }
2316 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2317 {
2318 /* Wrap symbol references inside the ctor in a type
2319 preserving MEM_REF. */
2320 tree ctor = DECL_INITIAL (node->decl);
2321 if (ctor && !in_lto_p)
2322 walk_tree (&ctor, wrap_refs, NULL, NULL);
2323 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2324 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2325 && !node->alias)
2326 {
2327 timevar_push (TV_IPA_LTO_CTORS_OUT);
2328 #ifdef ENABLE_CHECKING
2329 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2330 bitmap_set_bit (output, DECL_UID (node->decl));
2331 #endif
2332 decl_state = lto_new_out_decl_state ();
2333 lto_push_out_decl_state (decl_state);
2334 if (DECL_INITIAL (node->decl) != error_mark_node
2335 || !flag_wpa)
2336 output_constructor (node);
2337 else
2338 copy_function_or_variable (node);
2339 gcc_assert (lto_get_out_decl_state () == decl_state);
2340 lto_pop_out_decl_state ();
2341 lto_record_function_out_decl_state (node->decl, decl_state);
2342 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2343 }
2344 }
2345 }
2346
2347 /* Emit the callgraph after emitting function bodies. This needs to
2348 be done now to make sure that all the statements in every function
2349 have been renumbered so that edges can be associated with call
2350 statements using the statement UIDs. */
2351 output_symtab ();
2352
2353 output_offload_tables ();
2354
2355 #ifdef ENABLE_CHECKING
2356 lto_bitmap_free (output);
2357 #endif
2358 }
2359
2360 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2361 from it and required for correct representation of its semantics.
2362 Each node in ENCODER must be a global declaration or a type. A node
2363 is written only once, even if it appears multiple times in the
2364 vector. Certain transitively-reachable nodes, such as those
2365 representing expressions, may be duplicated, but such nodes
2366 must not appear in ENCODER itself. */
2367
2368 static void
2369 write_global_stream (struct output_block *ob,
2370 struct lto_tree_ref_encoder *encoder)
2371 {
2372 tree t;
2373 size_t index;
2374 const size_t size = lto_tree_ref_encoder_size (encoder);
2375
2376 for (index = 0; index < size; index++)
2377 {
2378 t = lto_tree_ref_encoder_get_tree (encoder, index);
2379 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2380 stream_write_tree (ob, t, false);
2381 }
2382 }
2383
2384
2385 /* Write a sequence of indices into the globals vector corresponding
2386 to the trees in ENCODER. These are used by the reader to map the
2387 indices used to refer to global entities within function bodies to
2388 their referents. */
2389
2390 static void
2391 write_global_references (struct output_block *ob,
2392 struct lto_tree_ref_encoder *encoder)
2393 {
2394 tree t;
2395 uint32_t index;
2396 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2397
2398 /* Write size and slot indexes as 32-bit unsigned numbers. */
2399 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2400 data[0] = size;
2401
2402 for (index = 0; index < size; index++)
2403 {
2404 uint32_t slot_num;
2405
2406 t = lto_tree_ref_encoder_get_tree (encoder, index);
2407 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2408 gcc_assert (slot_num != (unsigned)-1);
2409 data[index + 1] = slot_num;
2410 }
2411
2412 lto_write_data (data, sizeof (int32_t) * (size + 1));
2413 free (data);
2414 }
2415
2416
2417 /* Write all the streams in an lto_out_decl_state STATE using
2418 output block OB and output stream OUT_STREAM. */
2419
2420 void
2421 lto_output_decl_state_streams (struct output_block *ob,
2422 struct lto_out_decl_state *state)
2423 {
2424 int i;
2425
2426 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2427 write_global_stream (ob, &state->streams[i]);
2428 }
2429
2430
2431 /* Write all the references in an lto_out_decl_state STATE using
2432 output block OB and output stream OUT_STREAM. */
2433
2434 void
2435 lto_output_decl_state_refs (struct output_block *ob,
2436 struct lto_out_decl_state *state)
2437 {
2438 unsigned i;
2439 uint32_t ref;
2440 tree decl;
2441
2442 /* Write reference to FUNCTION_DECL. If there is not function,
2443 write reference to void_type_node. */
2444 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2445 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2446 gcc_assert (ref != (unsigned)-1);
2447 lto_write_data (&ref, sizeof (uint32_t));
2448
2449 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2450 write_global_references (ob, &state->streams[i]);
2451 }
2452
2453
2454 /* Return the written size of STATE. */
2455
2456 static size_t
2457 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2458 {
2459 int i;
2460 size_t size;
2461
2462 size = sizeof (int32_t); /* fn_ref. */
2463 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2464 {
2465 size += sizeof (int32_t); /* vector size. */
2466 size += (lto_tree_ref_encoder_size (&state->streams[i])
2467 * sizeof (int32_t));
2468 }
2469 return size;
2470 }
2471
2472
2473 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2474 so far. */
2475
2476 static void
2477 write_symbol (struct streamer_tree_cache_d *cache,
2478 tree t, hash_set<const char *> *seen, bool alias)
2479 {
2480 const char *name;
2481 enum gcc_plugin_symbol_kind kind;
2482 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2483 unsigned slot_num;
2484 uint64_t size;
2485 const char *comdat;
2486 unsigned char c;
2487
2488 /* None of the following kinds of symbols are needed in the
2489 symbol table. */
2490 if (!TREE_PUBLIC (t)
2491 || is_builtin_fn (t)
2492 || DECL_ABSTRACT_P (t)
2493 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2494 return;
2495 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2496
2497 gcc_assert (TREE_CODE (t) == VAR_DECL
2498 || TREE_CODE (t) == FUNCTION_DECL);
2499
2500 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2501
2502 /* This behaves like assemble_name_raw in varasm.c, performing the
2503 same name manipulations that ASM_OUTPUT_LABELREF does. */
2504 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2505
2506 if (seen->add (name))
2507 return;
2508
2509 streamer_tree_cache_lookup (cache, t, &slot_num);
2510 gcc_assert (slot_num != (unsigned)-1);
2511
2512 if (DECL_EXTERNAL (t))
2513 {
2514 if (DECL_WEAK (t))
2515 kind = GCCPK_WEAKUNDEF;
2516 else
2517 kind = GCCPK_UNDEF;
2518 }
2519 else
2520 {
2521 if (DECL_WEAK (t))
2522 kind = GCCPK_WEAKDEF;
2523 else if (DECL_COMMON (t))
2524 kind = GCCPK_COMMON;
2525 else
2526 kind = GCCPK_DEF;
2527
2528 /* When something is defined, it should have node attached. */
2529 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2530 || varpool_node::get (t)->definition);
2531 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2532 || (cgraph_node::get (t)
2533 && cgraph_node::get (t)->definition));
2534 }
2535
2536 /* Imitate what default_elf_asm_output_external do.
2537 When symbol is external, we need to output it with DEFAULT visibility
2538 when compiling with -fvisibility=default, while with HIDDEN visibility
2539 when symbol has attribute (visibility("hidden")) specified.
2540 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2541 right. */
2542
2543 if (DECL_EXTERNAL (t)
2544 && !targetm.binds_local_p (t))
2545 visibility = GCCPV_DEFAULT;
2546 else
2547 switch (DECL_VISIBILITY (t))
2548 {
2549 case VISIBILITY_DEFAULT:
2550 visibility = GCCPV_DEFAULT;
2551 break;
2552 case VISIBILITY_PROTECTED:
2553 visibility = GCCPV_PROTECTED;
2554 break;
2555 case VISIBILITY_HIDDEN:
2556 visibility = GCCPV_HIDDEN;
2557 break;
2558 case VISIBILITY_INTERNAL:
2559 visibility = GCCPV_INTERNAL;
2560 break;
2561 }
2562
2563 if (kind == GCCPK_COMMON
2564 && DECL_SIZE_UNIT (t)
2565 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2566 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2567 else
2568 size = 0;
2569
2570 if (DECL_ONE_ONLY (t))
2571 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2572 else
2573 comdat = "";
2574
2575 lto_write_data (name, strlen (name) + 1);
2576 lto_write_data (comdat, strlen (comdat) + 1);
2577 c = (unsigned char) kind;
2578 lto_write_data (&c, 1);
2579 c = (unsigned char) visibility;
2580 lto_write_data (&c, 1);
2581 lto_write_data (&size, 8);
2582 lto_write_data (&slot_num, 4);
2583 }
2584
2585 /* Return true if NODE should appear in the plugin symbol table. */
2586
2587 bool
2588 output_symbol_p (symtab_node *node)
2589 {
2590 struct cgraph_node *cnode;
2591 if (!node->real_symbol_p ())
2592 return false;
2593 /* We keep external functions in symtab for sake of inlining
2594 and devirtualization. We do not want to see them in symbol table as
2595 references unless they are really used. */
2596 cnode = dyn_cast <cgraph_node *> (node);
2597 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2598 && cnode->callers)
2599 return true;
2600
2601 /* Ignore all references from external vars initializers - they are not really
2602 part of the compilation unit until they are used by folding. Some symbols,
2603 like references to external construction vtables can not be referred to at all.
2604 We decide this at can_refer_decl_in_current_unit_p. */
2605 if (!node->definition || DECL_EXTERNAL (node->decl))
2606 {
2607 int i;
2608 struct ipa_ref *ref;
2609 for (i = 0; node->iterate_referring (i, ref); i++)
2610 {
2611 if (ref->use == IPA_REF_ALIAS)
2612 continue;
2613 if (is_a <cgraph_node *> (ref->referring))
2614 return true;
2615 if (!DECL_EXTERNAL (ref->referring->decl))
2616 return true;
2617 }
2618 return false;
2619 }
2620 return true;
2621 }
2622
2623
2624 /* Write an IL symbol table to OB.
2625 SET and VSET are cgraph/varpool node sets we are outputting. */
2626
2627 static void
2628 produce_symtab (struct output_block *ob)
2629 {
2630 struct streamer_tree_cache_d *cache = ob->writer_cache;
2631 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2632 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2633 lto_symtab_encoder_iterator lsei;
2634
2635 lto_begin_section (section_name, false);
2636 free (section_name);
2637
2638 hash_set<const char *> seen;
2639
2640 /* Write the symbol table.
2641 First write everything defined and then all declarations.
2642 This is necessary to handle cases where we have duplicated symbols. */
2643 for (lsei = lsei_start (encoder);
2644 !lsei_end_p (lsei); lsei_next (&lsei))
2645 {
2646 symtab_node *node = lsei_node (lsei);
2647
2648 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2649 continue;
2650 write_symbol (cache, node->decl, &seen, false);
2651 }
2652 for (lsei = lsei_start (encoder);
2653 !lsei_end_p (lsei); lsei_next (&lsei))
2654 {
2655 symtab_node *node = lsei_node (lsei);
2656
2657 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2658 continue;
2659 write_symbol (cache, node->decl, &seen, false);
2660 }
2661
2662 lto_end_section ();
2663 }
2664
2665
2666 /* Init the streamer_mode_table for output, where we collect info on what
2667 machine_mode values have been streamed. */
2668 void
2669 lto_output_init_mode_table (void)
2670 {
2671 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2672 }
2673
2674
2675 /* Write the mode table. */
2676 static void
2677 lto_write_mode_table (void)
2678 {
2679 struct output_block *ob;
2680 ob = create_output_block (LTO_section_mode_table);
2681 bitpack_d bp = bitpack_create (ob->main_stream);
2682
2683 /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have
2684 also the inner mode marked. */
2685 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2686 if (streamer_mode_table[i])
2687 {
2688 machine_mode m = (machine_mode) i;
2689 if (GET_MODE_INNER (m) != VOIDmode)
2690 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2691 }
2692 /* First stream modes that have GET_MODE_INNER (m) == VOIDmode,
2693 so that we can refer to them afterwards. */
2694 for (int pass = 0; pass < 2; pass++)
2695 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2696 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2697 {
2698 machine_mode m = (machine_mode) i;
2699 if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0))
2700 continue;
2701 bp_pack_value (&bp, m, 8);
2702 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2703 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2704 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2705 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2706 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2707 switch (GET_MODE_CLASS (m))
2708 {
2709 case MODE_FRACT:
2710 case MODE_UFRACT:
2711 case MODE_ACCUM:
2712 case MODE_UACCUM:
2713 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2714 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2715 break;
2716 case MODE_FLOAT:
2717 case MODE_DECIMAL_FLOAT:
2718 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2719 break;
2720 default:
2721 break;
2722 }
2723 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2724 }
2725 bp_pack_value (&bp, VOIDmode, 8);
2726
2727 streamer_write_bitpack (&bp);
2728
2729 char *section_name
2730 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2731 lto_begin_section (section_name, !flag_wpa);
2732 free (section_name);
2733
2734 /* The entire header stream is computed here. */
2735 struct lto_simple_header_with_strings header;
2736 memset (&header, 0, sizeof (header));
2737
2738 /* Write the header. */
2739 header.major_version = LTO_major_version;
2740 header.minor_version = LTO_minor_version;
2741
2742 header.main_size = ob->main_stream->total_size;
2743 header.string_size = ob->string_stream->total_size;
2744 lto_write_data (&header, sizeof header);
2745
2746 /* Put all of the gimple and the string table out the asm file as a
2747 block of text. */
2748 lto_write_stream (ob->main_stream);
2749 lto_write_stream (ob->string_stream);
2750
2751 lto_end_section ();
2752 destroy_output_block (ob);
2753 }
2754
2755
2756 /* This pass is run after all of the functions are serialized and all
2757 of the IPA passes have written their serialized forms. This pass
2758 causes the vector of all of the global decls and types used from
2759 this file to be written in to a section that can then be read in to
2760 recover these on other side. */
2761
2762 void
2763 produce_asm_for_decls (void)
2764 {
2765 struct lto_out_decl_state *out_state;
2766 struct lto_out_decl_state *fn_out_state;
2767 struct lto_decl_header header;
2768 char *section_name;
2769 struct output_block *ob;
2770 unsigned idx, num_fns;
2771 size_t decl_state_size;
2772 int32_t num_decl_states;
2773
2774 ob = create_output_block (LTO_section_decls);
2775
2776 memset (&header, 0, sizeof (struct lto_decl_header));
2777
2778 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2779 lto_begin_section (section_name, !flag_wpa);
2780 free (section_name);
2781
2782 /* Make string 0 be a NULL string. */
2783 streamer_write_char_stream (ob->string_stream, 0);
2784
2785 gcc_assert (!alias_pairs);
2786
2787 /* Get rid of the global decl state hash tables to save some memory. */
2788 out_state = lto_get_out_decl_state ();
2789 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2790 if (out_state->streams[i].tree_hash_table)
2791 {
2792 delete out_state->streams[i].tree_hash_table;
2793 out_state->streams[i].tree_hash_table = NULL;
2794 }
2795
2796 /* Write the global symbols. */
2797 lto_output_decl_state_streams (ob, out_state);
2798 num_fns = lto_function_decl_states.length ();
2799 for (idx = 0; idx < num_fns; idx++)
2800 {
2801 fn_out_state =
2802 lto_function_decl_states[idx];
2803 lto_output_decl_state_streams (ob, fn_out_state);
2804 }
2805
2806 header.major_version = LTO_major_version;
2807 header.minor_version = LTO_minor_version;
2808
2809 /* Currently not used. This field would allow us to preallocate
2810 the globals vector, so that it need not be resized as it is extended. */
2811 header.num_nodes = -1;
2812
2813 /* Compute the total size of all decl out states. */
2814 decl_state_size = sizeof (int32_t);
2815 decl_state_size += lto_out_decl_state_written_size (out_state);
2816 for (idx = 0; idx < num_fns; idx++)
2817 {
2818 fn_out_state =
2819 lto_function_decl_states[idx];
2820 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2821 }
2822 header.decl_state_size = decl_state_size;
2823
2824 header.main_size = ob->main_stream->total_size;
2825 header.string_size = ob->string_stream->total_size;
2826
2827 lto_write_data (&header, sizeof header);
2828
2829 /* Write the main out-decl state, followed by out-decl states of
2830 functions. */
2831 num_decl_states = num_fns + 1;
2832 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2833 lto_output_decl_state_refs (ob, out_state);
2834 for (idx = 0; idx < num_fns; idx++)
2835 {
2836 fn_out_state = lto_function_decl_states[idx];
2837 lto_output_decl_state_refs (ob, fn_out_state);
2838 }
2839
2840 lto_write_stream (ob->main_stream);
2841 lto_write_stream (ob->string_stream);
2842
2843 lto_end_section ();
2844
2845 /* Write the symbol table. It is used by linker to determine dependencies
2846 and thus we can skip it for WPA. */
2847 if (!flag_wpa)
2848 produce_symtab (ob);
2849
2850 /* Write command line opts. */
2851 lto_write_options ();
2852
2853 /* Deallocate memory and clean up. */
2854 for (idx = 0; idx < num_fns; idx++)
2855 {
2856 fn_out_state =
2857 lto_function_decl_states[idx];
2858 lto_delete_out_decl_state (fn_out_state);
2859 }
2860 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2861 lto_function_decl_states.release ();
2862 destroy_output_block (ob);
2863 if (lto_stream_offload_p)
2864 lto_write_mode_table ();
2865 }