decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "stringpool.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "rtl.h"
36 #include "flags.h"
37 #include "insn-config.h"
38 #include "expmed.h"
39 #include "dojump.h"
40 #include "explow.h"
41 #include "calls.h"
42 #include "emit-rtl.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "params.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-pass.h"
59 #include "diagnostic-core.h"
60 #include "except.h"
61 #include "lto-symtab.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "lto-streamer.h"
66 #include "data-streamer.h"
67 #include "gimple-streamer.h"
68 #include "tree-streamer.h"
69 #include "streamer-hooks.h"
70 #include "cfgloop.h"
71 #include "builtins.h"
72 #include "gomp-constants.h"
73
74
75 static void lto_write_tree (struct output_block*, tree, bool);
76
77 /* Clear the line info stored in DATA_IN. */
78
79 static void
80 clear_line_info (struct output_block *ob)
81 {
82 ob->current_file = NULL;
83 ob->current_line = 0;
84 ob->current_col = 0;
85 }
86
87
88 /* Create the output block and return it. SECTION_TYPE is
89 LTO_section_function_body or LTO_static_initializer. */
90
91 struct output_block *
92 create_output_block (enum lto_section_type section_type)
93 {
94 struct output_block *ob = XCNEW (struct output_block);
95
96 ob->section_type = section_type;
97 ob->decl_state = lto_get_out_decl_state ();
98 ob->main_stream = XCNEW (struct lto_output_stream);
99 ob->string_stream = XCNEW (struct lto_output_stream);
100 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
101
102 if (section_type == LTO_section_function_body)
103 ob->cfg_stream = XCNEW (struct lto_output_stream);
104
105 clear_line_info (ob);
106
107 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
108 gcc_obstack_init (&ob->obstack);
109
110 return ob;
111 }
112
113
114 /* Destroy the output block OB. */
115
116 void
117 destroy_output_block (struct output_block *ob)
118 {
119 enum lto_section_type section_type = ob->section_type;
120
121 delete ob->string_hash_table;
122 ob->string_hash_table = NULL;
123
124 free (ob->main_stream);
125 free (ob->string_stream);
126 if (section_type == LTO_section_function_body)
127 free (ob->cfg_stream);
128
129 streamer_tree_cache_delete (ob->writer_cache);
130 obstack_free (&ob->obstack, NULL);
131
132 free (ob);
133 }
134
135
136 /* Look up NODE in the type table and write the index for it to OB. */
137
138 static void
139 output_type_ref (struct output_block *ob, tree node)
140 {
141 streamer_write_record_start (ob, LTO_type_ref);
142 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
143 }
144
145
146 /* Return true if tree node T is written to various tables. For these
147 nodes, we sometimes want to write their phyiscal representation
148 (via lto_output_tree), and sometimes we need to emit an index
149 reference into a table (via lto_output_tree_ref). */
150
151 static bool
152 tree_is_indexable (tree t)
153 {
154 /* Parameters and return values of functions of variably modified types
155 must go to global stream, because they may be used in the type
156 definition. */
157 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
158 && DECL_CONTEXT (t))
159 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
160 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
161 else if (TREE_CODE (t) == IMPORTED_DECL)
162 return false;
163 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
164 || TREE_CODE (t) == TYPE_DECL
165 || TREE_CODE (t) == CONST_DECL
166 || TREE_CODE (t) == NAMELIST_DECL)
167 && decl_function_context (t))
168 return false;
169 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
170 return false;
171 /* Variably modified types need to be streamed alongside function
172 bodies because they can refer to local entities. Together with
173 them we have to localize their members as well.
174 ??? In theory that includes non-FIELD_DECLs as well. */
175 else if (TYPE_P (t)
176 && variably_modified_type_p (t, NULL_TREE))
177 return false;
178 else if (TREE_CODE (t) == FIELD_DECL
179 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
180 return false;
181 else
182 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
183 }
184
185
186 /* Output info about new location into bitpack BP.
187 After outputting bitpack, lto_output_location_data has
188 to be done to output actual data. */
189
190 void
191 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
192 location_t loc)
193 {
194 expanded_location xloc;
195
196 loc = LOCATION_LOCUS (loc);
197 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
198 loc < RESERVED_LOCATION_COUNT
199 ? loc : RESERVED_LOCATION_COUNT);
200 if (loc < RESERVED_LOCATION_COUNT)
201 return;
202
203 xloc = expand_location (loc);
204
205 bp_pack_value (bp, ob->current_file != xloc.file, 1);
206 bp_pack_value (bp, ob->current_line != xloc.line, 1);
207 bp_pack_value (bp, ob->current_col != xloc.column, 1);
208
209 if (ob->current_file != xloc.file)
210 bp_pack_string (ob, bp, xloc.file, true);
211 ob->current_file = xloc.file;
212
213 if (ob->current_line != xloc.line)
214 bp_pack_var_len_unsigned (bp, xloc.line);
215 ob->current_line = xloc.line;
216
217 if (ob->current_col != xloc.column)
218 bp_pack_var_len_unsigned (bp, xloc.column);
219 ob->current_col = xloc.column;
220 }
221
222
223 /* If EXPR is an indexable tree node, output a reference to it to
224 output block OB. Otherwise, output the physical representation of
225 EXPR to OB. */
226
227 static void
228 lto_output_tree_ref (struct output_block *ob, tree expr)
229 {
230 enum tree_code code;
231
232 if (TYPE_P (expr))
233 {
234 output_type_ref (ob, expr);
235 return;
236 }
237
238 code = TREE_CODE (expr);
239 switch (code)
240 {
241 case SSA_NAME:
242 streamer_write_record_start (ob, LTO_ssa_name_ref);
243 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
244 break;
245
246 case FIELD_DECL:
247 streamer_write_record_start (ob, LTO_field_decl_ref);
248 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case FUNCTION_DECL:
252 streamer_write_record_start (ob, LTO_function_decl_ref);
253 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
255
256 case VAR_DECL:
257 case DEBUG_EXPR_DECL:
258 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
259 case PARM_DECL:
260 streamer_write_record_start (ob, LTO_global_decl_ref);
261 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case CONST_DECL:
265 streamer_write_record_start (ob, LTO_const_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 case IMPORTED_DECL:
270 gcc_assert (decl_function_context (expr) == NULL);
271 streamer_write_record_start (ob, LTO_imported_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case TYPE_DECL:
276 streamer_write_record_start (ob, LTO_type_decl_ref);
277 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case NAMELIST_DECL:
281 streamer_write_record_start (ob, LTO_namelist_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case NAMESPACE_DECL:
286 streamer_write_record_start (ob, LTO_namespace_decl_ref);
287 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 case LABEL_DECL:
291 streamer_write_record_start (ob, LTO_label_decl_ref);
292 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
293 break;
294
295 case RESULT_DECL:
296 streamer_write_record_start (ob, LTO_result_decl_ref);
297 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
298 break;
299
300 case TRANSLATION_UNIT_DECL:
301 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
302 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
303 break;
304
305 default:
306 /* No other node is indexable, so it should have been handled by
307 lto_output_tree. */
308 gcc_unreachable ();
309 }
310 }
311
312
313 /* Return true if EXPR is a tree node that can be written to disk. */
314
315 static inline bool
316 lto_is_streamable (tree expr)
317 {
318 enum tree_code code = TREE_CODE (expr);
319
320 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
321 name version in lto_output_tree_ref (see output_ssa_names). */
322 return !is_lang_specific (expr)
323 && code != SSA_NAME
324 && code != CALL_EXPR
325 && code != LANG_TYPE
326 && code != MODIFY_EXPR
327 && code != INIT_EXPR
328 && code != TARGET_EXPR
329 && code != BIND_EXPR
330 && code != WITH_CLEANUP_EXPR
331 && code != STATEMENT_LIST
332 && (code == CASE_LABEL_EXPR
333 || code == DECL_EXPR
334 || TREE_CODE_CLASS (code) != tcc_statement);
335 }
336
337
338 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
339
340 static tree
341 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
342 {
343 gcc_checking_assert (DECL_P (expr)
344 && TREE_CODE (expr) != FUNCTION_DECL
345 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
346
347 /* Handle DECL_INITIAL for symbols. */
348 tree initial = DECL_INITIAL (expr);
349 if (TREE_CODE (expr) == VAR_DECL
350 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
351 && !DECL_IN_CONSTANT_POOL (expr)
352 && initial)
353 {
354 varpool_node *vnode;
355 /* Extra section needs about 30 bytes; do not produce it for simple
356 scalar values. */
357 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
358 || !(vnode = varpool_node::get (expr))
359 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
360 initial = error_mark_node;
361 }
362
363 return initial;
364 }
365
366
367 /* Write a physical representation of tree node EXPR to output block
368 OB. If REF_P is true, the leaves of EXPR are emitted as references
369 via lto_output_tree_ref. IX is the index into the streamer cache
370 where EXPR is stored. */
371
372 static void
373 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
374 {
375 /* Pack all the non-pointer fields in EXPR into a bitpack and write
376 the resulting bitpack. */
377 streamer_write_tree_bitfields (ob, expr);
378
379 /* Write all the pointer fields in EXPR. */
380 streamer_write_tree_body (ob, expr, ref_p);
381
382 /* Write any LTO-specific data to OB. */
383 if (DECL_P (expr)
384 && TREE_CODE (expr) != FUNCTION_DECL
385 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
386 {
387 /* Handle DECL_INITIAL for symbols. */
388 tree initial = get_symbol_initial_value
389 (ob->decl_state->symtab_node_encoder, expr);
390 stream_write_tree (ob, initial, ref_p);
391 }
392 }
393
394 /* Write a physical representation of tree node EXPR to output block
395 OB. If REF_P is true, the leaves of EXPR are emitted as references
396 via lto_output_tree_ref. IX is the index into the streamer cache
397 where EXPR is stored. */
398
399 static void
400 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
401 {
402 if (!lto_is_streamable (expr))
403 internal_error ("tree code %qs is not supported in LTO streams",
404 get_tree_code_name (TREE_CODE (expr)));
405
406 /* Write the header, containing everything needed to materialize
407 EXPR on the reading side. */
408 streamer_write_tree_header (ob, expr);
409
410 lto_write_tree_1 (ob, expr, ref_p);
411
412 /* Mark the end of EXPR. */
413 streamer_write_zero (ob);
414 }
415
416 /* Emit the physical representation of tree node EXPR to output block
417 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
418 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
419
420 static void
421 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
422 bool ref_p, bool this_ref_p)
423 {
424 unsigned ix;
425
426 gcc_checking_assert (expr != NULL_TREE
427 && !(this_ref_p && tree_is_indexable (expr)));
428
429 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
430 expr, hash, &ix);
431 gcc_assert (!exists_p);
432 if (streamer_handle_as_builtin_p (expr))
433 {
434 /* MD and NORMAL builtins do not need to be written out
435 completely as they are always instantiated by the
436 compiler on startup. The only builtins that need to
437 be written out are BUILT_IN_FRONTEND. For all other
438 builtins, we simply write the class and code. */
439 streamer_write_builtin (ob, expr);
440 }
441 else if (TREE_CODE (expr) == INTEGER_CST
442 && !TREE_OVERFLOW (expr))
443 {
444 /* Shared INTEGER_CST nodes are special because they need their
445 original type to be materialized by the reader (to implement
446 TYPE_CACHED_VALUES). */
447 streamer_write_integer_cst (ob, expr, ref_p);
448 }
449 else
450 {
451 /* This is the first time we see EXPR, write its fields
452 to OB. */
453 lto_write_tree (ob, expr, ref_p);
454 }
455 }
456
457 class DFS
458 {
459 public:
460 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
461 bool single_p);
462 ~DFS ();
463
464 struct scc_entry
465 {
466 tree t;
467 hashval_t hash;
468 };
469 vec<scc_entry> sccstack;
470
471 private:
472 struct sccs
473 {
474 unsigned int dfsnum;
475 unsigned int low;
476 };
477 struct worklist
478 {
479 tree expr;
480 sccs *from_state;
481 sccs *cstate;
482 bool ref_p;
483 bool this_ref_p;
484 };
485
486 static int scc_entry_compare (const void *, const void *);
487
488 void DFS_write_tree_body (struct output_block *ob,
489 tree expr, sccs *expr_state, bool ref_p);
490
491 void DFS_write_tree (struct output_block *ob, sccs *from_state,
492 tree expr, bool ref_p, bool this_ref_p);
493
494 hashval_t
495 hash_scc (struct output_block *ob, unsigned first, unsigned size);
496
497 hash_map<tree, sccs *> sccstate;
498 vec<worklist> worklist_vec;
499 struct obstack sccstate_obstack;
500 };
501
502 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
503 bool single_p)
504 {
505 unsigned int next_dfs_num = 1;
506 sccstack.create (0);
507 gcc_obstack_init (&sccstate_obstack);
508 worklist_vec = vNULL;
509 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
510 while (!worklist_vec.is_empty ())
511 {
512 worklist &w = worklist_vec.last ();
513 expr = w.expr;
514 sccs *from_state = w.from_state;
515 sccs *cstate = w.cstate;
516 ref_p = w.ref_p;
517 this_ref_p = w.this_ref_p;
518 if (cstate == NULL)
519 {
520 sccs **slot = &sccstate.get_or_insert (expr);
521 cstate = *slot;
522 if (cstate)
523 {
524 gcc_checking_assert (from_state);
525 if (cstate->dfsnum < from_state->dfsnum)
526 from_state->low = MIN (cstate->dfsnum, from_state->low);
527 worklist_vec.pop ();
528 continue;
529 }
530
531 scc_entry e = { expr, 0 };
532 /* Not yet visited. DFS recurse and push it onto the stack. */
533 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
534 sccstack.safe_push (e);
535 cstate->dfsnum = next_dfs_num++;
536 cstate->low = cstate->dfsnum;
537 w.cstate = cstate;
538
539 if (streamer_handle_as_builtin_p (expr))
540 ;
541 else if (TREE_CODE (expr) == INTEGER_CST
542 && !TREE_OVERFLOW (expr))
543 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
544 else
545 {
546 DFS_write_tree_body (ob, expr, cstate, ref_p);
547
548 /* Walk any LTO-specific edges. */
549 if (DECL_P (expr)
550 && TREE_CODE (expr) != FUNCTION_DECL
551 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
552 {
553 /* Handle DECL_INITIAL for symbols. */
554 tree initial
555 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
556 expr);
557 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
558 }
559 }
560 continue;
561 }
562
563 /* See if we found an SCC. */
564 if (cstate->low == cstate->dfsnum)
565 {
566 unsigned first, size;
567 tree x;
568
569 /* If we are re-walking a single leaf-SCC just pop it,
570 let earlier worklist item access the sccstack. */
571 if (single_p)
572 {
573 worklist_vec.pop ();
574 continue;
575 }
576
577 /* Pop the SCC and compute its size. */
578 first = sccstack.length ();
579 do
580 {
581 x = sccstack[--first].t;
582 }
583 while (x != expr);
584 size = sccstack.length () - first;
585
586 /* No need to compute hashes for LTRANS units, we don't perform
587 any merging there. */
588 hashval_t scc_hash = 0;
589 unsigned scc_entry_len = 0;
590 if (!flag_wpa)
591 {
592 scc_hash = hash_scc (ob, first, size);
593
594 /* Put the entries with the least number of collisions first. */
595 unsigned entry_start = 0;
596 scc_entry_len = size + 1;
597 for (unsigned i = 0; i < size;)
598 {
599 unsigned from = i;
600 for (i = i + 1; i < size
601 && (sccstack[first + i].hash
602 == sccstack[first + from].hash); ++i)
603 ;
604 if (i - from < scc_entry_len)
605 {
606 scc_entry_len = i - from;
607 entry_start = from;
608 }
609 }
610 for (unsigned i = 0; i < scc_entry_len; ++i)
611 std::swap (sccstack[first + i],
612 sccstack[first + entry_start + i]);
613
614 if (scc_entry_len == 1)
615 ; /* We already sorted SCC deterministically in hash_scc. */
616 else
617 /* Check that we have only one SCC.
618 Naturally we may have conflicts if hash function is not
619 strong enough. Lets see how far this gets. */
620 {
621 #ifdef ENABLE_CHECKING
622 gcc_unreachable ();
623 #endif
624 }
625 }
626
627 /* Write LTO_tree_scc. */
628 streamer_write_record_start (ob, LTO_tree_scc);
629 streamer_write_uhwi (ob, size);
630 streamer_write_uhwi (ob, scc_hash);
631
632 /* Write size-1 SCCs without wrapping them inside SCC bundles.
633 All INTEGER_CSTs need to be handled this way as we need
634 their type to materialize them. Also builtins are handled
635 this way.
636 ??? We still wrap these in LTO_tree_scc so at the
637 input side we can properly identify the tree we want
638 to ultimatively return. */
639 if (size == 1)
640 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
641 else
642 {
643 /* Write the size of the SCC entry candidates. */
644 streamer_write_uhwi (ob, scc_entry_len);
645
646 /* Write all headers and populate the streamer cache. */
647 for (unsigned i = 0; i < size; ++i)
648 {
649 hashval_t hash = sccstack[first+i].hash;
650 tree t = sccstack[first+i].t;
651 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
652 t, hash, NULL);
653 gcc_assert (!exists_p);
654
655 if (!lto_is_streamable (t))
656 internal_error ("tree code %qs is not supported "
657 "in LTO streams",
658 get_tree_code_name (TREE_CODE (t)));
659
660 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
661
662 /* Write the header, containing everything needed to
663 materialize EXPR on the reading side. */
664 streamer_write_tree_header (ob, t);
665 }
666
667 /* Write the bitpacks and tree references. */
668 for (unsigned i = 0; i < size; ++i)
669 {
670 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
671
672 /* Mark the end of the tree. */
673 streamer_write_zero (ob);
674 }
675 }
676
677 /* Finally truncate the vector. */
678 sccstack.truncate (first);
679
680 if (from_state)
681 from_state->low = MIN (from_state->low, cstate->low);
682 worklist_vec.pop ();
683 continue;
684 }
685
686 gcc_checking_assert (from_state);
687 from_state->low = MIN (from_state->low, cstate->low);
688 if (cstate->dfsnum < from_state->dfsnum)
689 from_state->low = MIN (cstate->dfsnum, from_state->low);
690 worklist_vec.pop ();
691 }
692 worklist_vec.release ();
693 }
694
695 DFS::~DFS ()
696 {
697 sccstack.release ();
698 obstack_free (&sccstate_obstack, NULL);
699 }
700
701 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
702 DFS recurse for all tree edges originating from it. */
703
704 void
705 DFS::DFS_write_tree_body (struct output_block *ob,
706 tree expr, sccs *expr_state, bool ref_p)
707 {
708 #define DFS_follow_tree_edge(DEST) \
709 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
710
711 enum tree_code code;
712
713 code = TREE_CODE (expr);
714
715 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
716 {
717 if (TREE_CODE (expr) != IDENTIFIER_NODE)
718 DFS_follow_tree_edge (TREE_TYPE (expr));
719 }
720
721 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
722 {
723 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
724 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
725 }
726
727 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
728 {
729 DFS_follow_tree_edge (TREE_REALPART (expr));
730 DFS_follow_tree_edge (TREE_IMAGPART (expr));
731 }
732
733 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
734 {
735 /* Drop names that were created for anonymous entities. */
736 if (DECL_NAME (expr)
737 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
738 && anon_aggrname_p (DECL_NAME (expr)))
739 ;
740 else
741 DFS_follow_tree_edge (DECL_NAME (expr));
742 DFS_follow_tree_edge (DECL_CONTEXT (expr));
743 }
744
745 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
746 {
747 DFS_follow_tree_edge (DECL_SIZE (expr));
748 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
749
750 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
751 special handling in LTO, it must be handled by streamer hooks. */
752
753 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
754
755 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
756 for early inlining so drop it on the floor instead of ICEing in
757 dwarf2out.c. */
758
759 if ((TREE_CODE (expr) == VAR_DECL
760 || TREE_CODE (expr) == PARM_DECL)
761 && DECL_HAS_VALUE_EXPR_P (expr))
762 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
763 if (TREE_CODE (expr) == VAR_DECL)
764 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
765 }
766
767 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
768 {
769 if (TREE_CODE (expr) == TYPE_DECL)
770 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
771 }
772
773 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
774 {
775 /* Make sure we don't inadvertently set the assembler name. */
776 if (DECL_ASSEMBLER_NAME_SET_P (expr))
777 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
781 {
782 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
783 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
784 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
785 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
786 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
787 }
788
789 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
790 {
791 DFS_follow_tree_edge (DECL_VINDEX (expr));
792 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
793 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
794 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
795 }
796
797 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
798 {
799 DFS_follow_tree_edge (TYPE_SIZE (expr));
800 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
801 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
802 DFS_follow_tree_edge (TYPE_NAME (expr));
803 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
804 reconstructed during fixup. */
805 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
806 during fixup. */
807 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
808 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
809 /* TYPE_CANONICAL is re-computed during type merging, so no need
810 to follow it here. */
811 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
815 {
816 if (TREE_CODE (expr) == ENUMERAL_TYPE)
817 DFS_follow_tree_edge (TYPE_VALUES (expr));
818 else if (TREE_CODE (expr) == ARRAY_TYPE)
819 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
820 else if (RECORD_OR_UNION_TYPE_P (expr))
821 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
822 DFS_follow_tree_edge (t);
823 else if (TREE_CODE (expr) == FUNCTION_TYPE
824 || TREE_CODE (expr) == METHOD_TYPE)
825 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
826
827 if (!POINTER_TYPE_P (expr))
828 DFS_follow_tree_edge (TYPE_MINVAL (expr));
829 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
830 if (RECORD_OR_UNION_TYPE_P (expr))
831 DFS_follow_tree_edge (TYPE_BINFO (expr));
832 }
833
834 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
835 {
836 DFS_follow_tree_edge (TREE_PURPOSE (expr));
837 DFS_follow_tree_edge (TREE_VALUE (expr));
838 DFS_follow_tree_edge (TREE_CHAIN (expr));
839 }
840
841 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
842 {
843 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
844 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
845 }
846
847 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
848 {
849 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
850 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
851 DFS_follow_tree_edge (TREE_BLOCK (expr));
852 }
853
854 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
855 {
856 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
857 if (VAR_OR_FUNCTION_DECL_P (t)
858 && DECL_EXTERNAL (t))
859 /* We have to stream externals in the block chain as
860 non-references. See also
861 tree-streamer-out.c:streamer_write_chain. */
862 DFS_write_tree (ob, expr_state, t, ref_p, false);
863 else
864 DFS_follow_tree_edge (t);
865
866 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
867
868 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
869 handle - those that represent inlined function scopes.
870 For the drop rest them on the floor instead of ICEing
871 in dwarf2out.c. */
872 if (inlined_function_outer_scope_p (expr))
873 {
874 tree ultimate_origin = block_ultimate_origin (expr);
875 DFS_follow_tree_edge (ultimate_origin);
876 }
877 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
878 information for early inlined BLOCKs so drop it on the floor instead
879 of ICEing in dwarf2out.c. */
880
881 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
882 streaming time. */
883
884 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
885 list is re-constructed from BLOCK_SUPERCONTEXT. */
886 }
887
888 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
889 {
890 unsigned i;
891 tree t;
892
893 /* Note that the number of BINFO slots has already been emitted in
894 EXPR's header (see streamer_write_tree_header) because this length
895 is needed to build the empty BINFO node on the reader side. */
896 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
897 DFS_follow_tree_edge (t);
898 DFS_follow_tree_edge (BINFO_OFFSET (expr));
899 DFS_follow_tree_edge (BINFO_VTABLE (expr));
900 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
901
902 /* The number of BINFO_BASE_ACCESSES has already been emitted in
903 EXPR's bitfield section. */
904 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
905 DFS_follow_tree_edge (t);
906
907 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
908 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
909 }
910
911 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
912 {
913 unsigned i;
914 tree index, value;
915
916 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
917 {
918 DFS_follow_tree_edge (index);
919 DFS_follow_tree_edge (value);
920 }
921 }
922
923 if (code == OMP_CLAUSE)
924 {
925 int i;
926 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
927 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
928 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
929 }
930
931 #undef DFS_follow_tree_edge
932 }
933
934 /* Return a hash value for the tree T.
935 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
936 may hold hash values if trees inside current SCC. */
937
938 static hashval_t
939 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
940 {
941 inchash::hash hstate;
942
943 #define visit(SIBLING) \
944 do { \
945 unsigned ix; \
946 if (!SIBLING) \
947 hstate.add_int (0); \
948 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
949 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
950 else if (map) \
951 hstate.add_int (*map->get (SIBLING)); \
952 else \
953 hstate.add_int (1); \
954 } while (0)
955
956 /* Hash TS_BASE. */
957 enum tree_code code = TREE_CODE (t);
958 hstate.add_int (code);
959 if (!TYPE_P (t))
960 {
961 hstate.add_flag (TREE_SIDE_EFFECTS (t));
962 hstate.add_flag (TREE_CONSTANT (t));
963 hstate.add_flag (TREE_READONLY (t));
964 hstate.add_flag (TREE_PUBLIC (t));
965 }
966 hstate.add_flag (TREE_ADDRESSABLE (t));
967 hstate.add_flag (TREE_THIS_VOLATILE (t));
968 if (DECL_P (t))
969 hstate.add_flag (DECL_UNSIGNED (t));
970 else if (TYPE_P (t))
971 hstate.add_flag (TYPE_UNSIGNED (t));
972 if (TYPE_P (t))
973 hstate.add_flag (TYPE_ARTIFICIAL (t));
974 else
975 hstate.add_flag (TREE_NO_WARNING (t));
976 hstate.add_flag (TREE_NOTHROW (t));
977 hstate.add_flag (TREE_STATIC (t));
978 hstate.add_flag (TREE_PROTECTED (t));
979 hstate.add_flag (TREE_DEPRECATED (t));
980 if (code != TREE_BINFO)
981 hstate.add_flag (TREE_PRIVATE (t));
982 if (TYPE_P (t))
983 {
984 hstate.add_flag (TYPE_SATURATING (t));
985 hstate.add_flag (TYPE_ADDR_SPACE (t));
986 }
987 else if (code == SSA_NAME)
988 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
989 hstate.commit_flag ();
990
991 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
992 {
993 int i;
994 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
995 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
996 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
997 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
998 }
999
1000 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1001 {
1002 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1003 hstate.add_flag (r.cl);
1004 hstate.add_flag (r.sign);
1005 hstate.add_flag (r.signalling);
1006 hstate.add_flag (r.canonical);
1007 hstate.commit_flag ();
1008 hstate.add_int (r.uexp);
1009 hstate.add (r.sig, sizeof (r.sig));
1010 }
1011
1012 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1013 {
1014 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1015 hstate.add_int (f.mode);
1016 hstate.add_int (f.data.low);
1017 hstate.add_int (f.data.high);
1018 }
1019
1020 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1021 {
1022 hstate.add_wide_int (DECL_MODE (t));
1023 hstate.add_flag (DECL_NONLOCAL (t));
1024 hstate.add_flag (DECL_VIRTUAL_P (t));
1025 hstate.add_flag (DECL_IGNORED_P (t));
1026 hstate.add_flag (DECL_ABSTRACT_P (t));
1027 hstate.add_flag (DECL_ARTIFICIAL (t));
1028 hstate.add_flag (DECL_USER_ALIGN (t));
1029 hstate.add_flag (DECL_PRESERVE_P (t));
1030 hstate.add_flag (DECL_EXTERNAL (t));
1031 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1032 hstate.commit_flag ();
1033 hstate.add_int (DECL_ALIGN (t));
1034 if (code == LABEL_DECL)
1035 {
1036 hstate.add_int (EH_LANDING_PAD_NR (t));
1037 hstate.add_int (LABEL_DECL_UID (t));
1038 }
1039 else if (code == FIELD_DECL)
1040 {
1041 hstate.add_flag (DECL_PACKED (t));
1042 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1043 hstate.add_int (DECL_OFFSET_ALIGN (t));
1044 }
1045 else if (code == VAR_DECL)
1046 {
1047 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1048 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1049 }
1050 if (code == RESULT_DECL
1051 || code == PARM_DECL
1052 || code == VAR_DECL)
1053 {
1054 hstate.add_flag (DECL_BY_REFERENCE (t));
1055 if (code == VAR_DECL
1056 || code == PARM_DECL)
1057 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1058 }
1059 hstate.commit_flag ();
1060 }
1061
1062 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1063 hstate.add_int (DECL_REGISTER (t));
1064
1065 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1066 {
1067 hstate.add_flag (DECL_COMMON (t));
1068 hstate.add_flag (DECL_DLLIMPORT_P (t));
1069 hstate.add_flag (DECL_WEAK (t));
1070 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1071 hstate.add_flag (DECL_COMDAT (t));
1072 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1073 hstate.add_int (DECL_VISIBILITY (t));
1074 if (code == VAR_DECL)
1075 {
1076 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1077 hstate.add_flag (DECL_HARD_REGISTER (t));
1078 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1079 }
1080 if (TREE_CODE (t) == FUNCTION_DECL)
1081 {
1082 hstate.add_flag (DECL_FINAL_P (t));
1083 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1084 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1085 }
1086 hstate.commit_flag ();
1087 }
1088
1089 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1090 {
1091 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1092 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1093 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1094 hstate.add_flag (DECL_UNINLINABLE (t));
1095 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1096 hstate.add_flag (DECL_IS_NOVOPS (t));
1097 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1098 hstate.add_flag (DECL_IS_MALLOC (t));
1099 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1100 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1101 hstate.add_flag (DECL_STATIC_CHAIN (t));
1102 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1103 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1104 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1105 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1106 hstate.add_flag (DECL_PURE_P (t));
1107 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1108 hstate.commit_flag ();
1109 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1110 hstate.add_int (DECL_FUNCTION_CODE (t));
1111 }
1112
1113 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1114 {
1115 hstate.add_wide_int (TYPE_MODE (t));
1116 hstate.add_flag (TYPE_STRING_FLAG (t));
1117 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1118 no streaming. */
1119 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1120 hstate.add_flag (TYPE_PACKED (t));
1121 hstate.add_flag (TYPE_RESTRICT (t));
1122 hstate.add_flag (TYPE_USER_ALIGN (t));
1123 hstate.add_flag (TYPE_READONLY (t));
1124 if (RECORD_OR_UNION_TYPE_P (t))
1125 {
1126 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1127 hstate.add_flag (TYPE_FINAL_P (t));
1128 }
1129 else if (code == ARRAY_TYPE)
1130 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1131 hstate.commit_flag ();
1132 hstate.add_int (TYPE_PRECISION (t));
1133 hstate.add_int (TYPE_ALIGN (t));
1134 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1135 || (!in_lto_p
1136 && get_alias_set (t) == 0))
1137 ? 0 : -1);
1138 }
1139
1140 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1141 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1142 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1143
1144 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1145 /* We don't stream these when passing things to a different target. */
1146 && !lto_stream_offload_p)
1147 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1148
1149 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1150 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1151
1152 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1153 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1154
1155 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1156 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1157
1158 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1159 {
1160 if (code != IDENTIFIER_NODE)
1161 visit (TREE_TYPE (t));
1162 }
1163
1164 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1165 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1166 visit (VECTOR_CST_ELT (t, i));
1167
1168 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1169 {
1170 visit (TREE_REALPART (t));
1171 visit (TREE_IMAGPART (t));
1172 }
1173
1174 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1175 {
1176 /* Drop names that were created for anonymous entities. */
1177 if (DECL_NAME (t)
1178 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1179 && anon_aggrname_p (DECL_NAME (t)))
1180 ;
1181 else
1182 visit (DECL_NAME (t));
1183 if (DECL_FILE_SCOPE_P (t))
1184 ;
1185 else
1186 visit (DECL_CONTEXT (t));
1187 }
1188
1189 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1190 {
1191 visit (DECL_SIZE (t));
1192 visit (DECL_SIZE_UNIT (t));
1193 visit (DECL_ATTRIBUTES (t));
1194 if ((code == VAR_DECL
1195 || code == PARM_DECL)
1196 && DECL_HAS_VALUE_EXPR_P (t))
1197 visit (DECL_VALUE_EXPR (t));
1198 if (code == VAR_DECL
1199 && DECL_HAS_DEBUG_EXPR_P (t))
1200 visit (DECL_DEBUG_EXPR (t));
1201 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1202 be able to call get_symbol_initial_value. */
1203 }
1204
1205 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1206 {
1207 if (code == TYPE_DECL)
1208 visit (DECL_ORIGINAL_TYPE (t));
1209 }
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1212 {
1213 if (DECL_ASSEMBLER_NAME_SET_P (t))
1214 visit (DECL_ASSEMBLER_NAME (t));
1215 }
1216
1217 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1218 {
1219 visit (DECL_FIELD_OFFSET (t));
1220 visit (DECL_BIT_FIELD_TYPE (t));
1221 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1222 visit (DECL_FIELD_BIT_OFFSET (t));
1223 visit (DECL_FCONTEXT (t));
1224 }
1225
1226 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1227 {
1228 visit (DECL_VINDEX (t));
1229 visit (DECL_FUNCTION_PERSONALITY (t));
1230 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1231 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1232 }
1233
1234 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1235 {
1236 visit (TYPE_SIZE (t));
1237 visit (TYPE_SIZE_UNIT (t));
1238 visit (TYPE_ATTRIBUTES (t));
1239 visit (TYPE_NAME (t));
1240 visit (TYPE_MAIN_VARIANT (t));
1241 if (TYPE_FILE_SCOPE_P (t))
1242 ;
1243 else
1244 visit (TYPE_CONTEXT (t));
1245 visit (TYPE_STUB_DECL (t));
1246 }
1247
1248 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1249 {
1250 if (code == ENUMERAL_TYPE)
1251 visit (TYPE_VALUES (t));
1252 else if (code == ARRAY_TYPE)
1253 visit (TYPE_DOMAIN (t));
1254 else if (RECORD_OR_UNION_TYPE_P (t))
1255 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1256 visit (f);
1257 else if (code == FUNCTION_TYPE
1258 || code == METHOD_TYPE)
1259 visit (TYPE_ARG_TYPES (t));
1260 if (!POINTER_TYPE_P (t))
1261 visit (TYPE_MINVAL (t));
1262 visit (TYPE_MAXVAL (t));
1263 if (RECORD_OR_UNION_TYPE_P (t))
1264 visit (TYPE_BINFO (t));
1265 }
1266
1267 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1268 {
1269 visit (TREE_PURPOSE (t));
1270 visit (TREE_VALUE (t));
1271 visit (TREE_CHAIN (t));
1272 }
1273
1274 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1275 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1276 visit (TREE_VEC_ELT (t, i));
1277
1278 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1279 {
1280 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1281 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1282 visit (TREE_OPERAND (t, i));
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1286 {
1287 unsigned i;
1288 tree b;
1289 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1290 visit (b);
1291 visit (BINFO_OFFSET (t));
1292 visit (BINFO_VTABLE (t));
1293 visit (BINFO_VPTR_FIELD (t));
1294 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1295 visit (b);
1296 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1297 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1298 }
1299
1300 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1301 {
1302 unsigned i;
1303 tree index, value;
1304 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1306 {
1307 visit (index);
1308 visit (value);
1309 }
1310 }
1311
1312 if (code == OMP_CLAUSE)
1313 {
1314 int i;
1315 HOST_WIDE_INT val;
1316
1317 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1318 switch (OMP_CLAUSE_CODE (t))
1319 {
1320 case OMP_CLAUSE_DEFAULT:
1321 val = OMP_CLAUSE_DEFAULT_KIND (t);
1322 break;
1323 case OMP_CLAUSE_SCHEDULE:
1324 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1325 break;
1326 case OMP_CLAUSE_DEPEND:
1327 val = OMP_CLAUSE_DEPEND_KIND (t);
1328 break;
1329 case OMP_CLAUSE_MAP:
1330 val = OMP_CLAUSE_MAP_KIND (t);
1331 break;
1332 case OMP_CLAUSE_PROC_BIND:
1333 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1334 break;
1335 case OMP_CLAUSE_REDUCTION:
1336 val = OMP_CLAUSE_REDUCTION_CODE (t);
1337 break;
1338 default:
1339 val = 0;
1340 break;
1341 }
1342 hstate.add_wide_int (val);
1343 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1344 visit (OMP_CLAUSE_OPERAND (t, i));
1345 visit (OMP_CLAUSE_CHAIN (t));
1346 }
1347
1348 return hstate.end ();
1349
1350 #undef visit
1351 }
1352
1353 /* Compare two SCC entries by their hash value for qsorting them. */
1354
1355 int
1356 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1357 {
1358 const scc_entry *p1 = (const scc_entry *) p1_;
1359 const scc_entry *p2 = (const scc_entry *) p2_;
1360 if (p1->hash < p2->hash)
1361 return -1;
1362 else if (p1->hash > p2->hash)
1363 return 1;
1364 return 0;
1365 }
1366
1367 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE. */
1368
1369 hashval_t
1370 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size)
1371 {
1372 unsigned int last_classes = 0, iterations = 0;
1373
1374 /* Compute hash values for the SCC members. */
1375 for (unsigned i = 0; i < size; ++i)
1376 sccstack[first+i].hash
1377 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1378
1379 if (size == 1)
1380 return sccstack[first].hash;
1381
1382 /* We aim to get unique hash for every tree within SCC and compute hash value
1383 of the whole SCC by combining all values together in a stable (entry-point
1384 independent) order. This guarantees that the same SCC regions within
1385 different translation units will get the same hash values and therefore
1386 will be merged at WPA time.
1387
1388 Often the hashes are already unique. In that case we compute the SCC hash
1389 by combining individual hash values in an increasing order.
1390
1391 If there are duplicates, we seek at least one tree with unique hash (and
1392 pick one with minimal hash and this property). Then we obtain a stable
1393 order by DFS walk starting from this unique tree and then use the index
1394 within this order to make individual hash values unique.
1395
1396 If there is no tree with unique hash, we iteratively propagate the hash
1397 values across the internal edges of SCC. This usually quickly leads
1398 to unique hashes. Consider, for example, an SCC containing two pointers
1399 that are identical except for the types they point to and assume that
1400 these types are also part of the SCC. The propagation will add the
1401 points-to type information into their hash values. */
1402 do
1403 {
1404 /* Sort the SCC so we can easily check for uniqueness. */
1405 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1406
1407 unsigned int classes = 1;
1408 int firstunique = -1;
1409
1410 /* Find the tree with lowest unique hash (if it exists) and compute
1411 the number of equivalence classes. */
1412 if (sccstack[first].hash != sccstack[first+1].hash)
1413 firstunique = 0;
1414 for (unsigned i = 1; i < size; ++i)
1415 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1416 {
1417 classes++;
1418 if (firstunique == -1
1419 && (i == size - 1
1420 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1421 firstunique = i;
1422 }
1423
1424 /* If we found a tree with unique hash, stop the iteration. */
1425 if (firstunique != -1
1426 /* Also terminate if we run out of iterations or if the number of
1427 equivalence classes is no longer increasing.
1428 For example a cyclic list of trees that are all equivalent will
1429 never have unique entry point; we however do not build such SCCs
1430 in our IL. */
1431 || classes <= last_classes || iterations > 16)
1432 {
1433 hashval_t scc_hash;
1434
1435 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1436 starting from FIRSTUNIQUE to obtain a stable order. */
1437 if (classes != size && firstunique != -1)
1438 {
1439 hash_map <tree, hashval_t> map(size*2);
1440
1441 /* Store hash values into a map, so we can associate them with
1442 the reordered SCC. */
1443 for (unsigned i = 0; i < size; ++i)
1444 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1445
1446 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1447 gcc_assert (again.sccstack.length () == size);
1448
1449 memcpy (sccstack.address () + first,
1450 again.sccstack.address (),
1451 sizeof (scc_entry) * size);
1452
1453 /* Update hash values of individual members by hashing in the
1454 index within the stable order. This ensures uniqueness.
1455 Also compute the SCC hash by mixing in all hash values in
1456 the stable order we obtained. */
1457 sccstack[first].hash = *map.get (sccstack[first].t);
1458 scc_hash = sccstack[first].hash;
1459 for (unsigned i = 1; i < size; ++i)
1460 {
1461 sccstack[first+i].hash
1462 = iterative_hash_hashval_t (i,
1463 *map.get (sccstack[first+i].t));
1464 scc_hash
1465 = iterative_hash_hashval_t (scc_hash,
1466 sccstack[first+i].hash);
1467 }
1468 }
1469 /* If we got a unique hash value for each tree, then sort already
1470 ensured entry-point independent order. Only compute the final
1471 SCC hash.
1472
1473 If we failed to find the unique entry point, we go by the same
1474 route. We will eventually introduce unwanted hash conflicts. */
1475 else
1476 {
1477 scc_hash = sccstack[first].hash;
1478 for (unsigned i = 1; i < size; ++i)
1479 scc_hash
1480 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1481
1482 /* We cannot 100% guarantee that the hash won't conflict so as
1483 to make it impossible to find a unique hash. This however
1484 should be an extremely rare case. ICE for now so possible
1485 issues are found and evaluated. */
1486 gcc_checking_assert (classes == size);
1487 }
1488
1489 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1490 hash into the hash of each element. */
1491 for (unsigned i = 0; i < size; ++i)
1492 sccstack[first+i].hash
1493 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1494 return scc_hash;
1495 }
1496
1497 last_classes = classes;
1498 iterations++;
1499
1500 /* We failed to identify the entry point; propagate hash values across
1501 the edges. */
1502 hash_map <tree, hashval_t> map(size*2);
1503
1504 for (unsigned i = 0; i < size; ++i)
1505 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1506
1507 for (unsigned i = 0; i < size; i++)
1508 sccstack[first+i].hash
1509 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1510 }
1511 while (true);
1512 }
1513
1514 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1515 already in the streamer cache. Main routine called for
1516 each visit of EXPR. */
1517
1518 void
1519 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1520 tree expr, bool ref_p, bool this_ref_p)
1521 {
1522 /* Handle special cases. */
1523 if (expr == NULL_TREE)
1524 return;
1525
1526 /* Do not DFS walk into indexable trees. */
1527 if (this_ref_p && tree_is_indexable (expr))
1528 return;
1529
1530 /* Check if we already streamed EXPR. */
1531 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1532 return;
1533
1534 worklist w;
1535 w.expr = expr;
1536 w.from_state = from_state;
1537 w.cstate = NULL;
1538 w.ref_p = ref_p;
1539 w.this_ref_p = this_ref_p;
1540 worklist_vec.safe_push (w);
1541 }
1542
1543
1544 /* Emit the physical representation of tree node EXPR to output block
1545 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1546 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1547
1548 void
1549 lto_output_tree (struct output_block *ob, tree expr,
1550 bool ref_p, bool this_ref_p)
1551 {
1552 unsigned ix;
1553 bool existed_p;
1554
1555 if (expr == NULL_TREE)
1556 {
1557 streamer_write_record_start (ob, LTO_null);
1558 return;
1559 }
1560
1561 if (this_ref_p && tree_is_indexable (expr))
1562 {
1563 lto_output_tree_ref (ob, expr);
1564 return;
1565 }
1566
1567 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1568 if (existed_p)
1569 {
1570 /* If a node has already been streamed out, make sure that
1571 we don't write it more than once. Otherwise, the reader
1572 will instantiate two different nodes for the same object. */
1573 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1574 streamer_write_uhwi (ob, ix);
1575 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1576 lto_tree_code_to_tag (TREE_CODE (expr)));
1577 lto_stats.num_pickle_refs_output++;
1578 }
1579 else
1580 {
1581 /* This is the first time we see EXPR, write all reachable
1582 trees to OB. */
1583 static bool in_dfs_walk;
1584
1585 /* Protect against recursion which means disconnect between
1586 what tree edges we walk in the DFS walk and what edges
1587 we stream out. */
1588 gcc_assert (!in_dfs_walk);
1589
1590 /* Start the DFS walk. */
1591 /* Save ob state ... */
1592 /* let's see ... */
1593 in_dfs_walk = true;
1594 DFS (ob, expr, ref_p, this_ref_p, false);
1595 in_dfs_walk = false;
1596
1597 /* Finally append a reference to the tree we were writing.
1598 ??? If expr ended up as a singleton we could have
1599 inlined it here and avoid outputting a reference. */
1600 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1601 gcc_assert (existed_p);
1602 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1603 streamer_write_uhwi (ob, ix);
1604 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1605 lto_tree_code_to_tag (TREE_CODE (expr)));
1606 lto_stats.num_pickle_refs_output++;
1607 }
1608 }
1609
1610
1611 /* Output to OB a list of try/catch handlers starting with FIRST. */
1612
1613 static void
1614 output_eh_try_list (struct output_block *ob, eh_catch first)
1615 {
1616 eh_catch n;
1617
1618 for (n = first; n; n = n->next_catch)
1619 {
1620 streamer_write_record_start (ob, LTO_eh_catch);
1621 stream_write_tree (ob, n->type_list, true);
1622 stream_write_tree (ob, n->filter_list, true);
1623 stream_write_tree (ob, n->label, true);
1624 }
1625
1626 streamer_write_record_start (ob, LTO_null);
1627 }
1628
1629
1630 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1631 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1632 detect EH region sharing. */
1633
1634 static void
1635 output_eh_region (struct output_block *ob, eh_region r)
1636 {
1637 enum LTO_tags tag;
1638
1639 if (r == NULL)
1640 {
1641 streamer_write_record_start (ob, LTO_null);
1642 return;
1643 }
1644
1645 if (r->type == ERT_CLEANUP)
1646 tag = LTO_ert_cleanup;
1647 else if (r->type == ERT_TRY)
1648 tag = LTO_ert_try;
1649 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1650 tag = LTO_ert_allowed_exceptions;
1651 else if (r->type == ERT_MUST_NOT_THROW)
1652 tag = LTO_ert_must_not_throw;
1653 else
1654 gcc_unreachable ();
1655
1656 streamer_write_record_start (ob, tag);
1657 streamer_write_hwi (ob, r->index);
1658
1659 if (r->outer)
1660 streamer_write_hwi (ob, r->outer->index);
1661 else
1662 streamer_write_zero (ob);
1663
1664 if (r->inner)
1665 streamer_write_hwi (ob, r->inner->index);
1666 else
1667 streamer_write_zero (ob);
1668
1669 if (r->next_peer)
1670 streamer_write_hwi (ob, r->next_peer->index);
1671 else
1672 streamer_write_zero (ob);
1673
1674 if (r->type == ERT_TRY)
1675 {
1676 output_eh_try_list (ob, r->u.eh_try.first_catch);
1677 }
1678 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1679 {
1680 stream_write_tree (ob, r->u.allowed.type_list, true);
1681 stream_write_tree (ob, r->u.allowed.label, true);
1682 streamer_write_uhwi (ob, r->u.allowed.filter);
1683 }
1684 else if (r->type == ERT_MUST_NOT_THROW)
1685 {
1686 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1687 bitpack_d bp = bitpack_create (ob->main_stream);
1688 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1689 streamer_write_bitpack (&bp);
1690 }
1691
1692 if (r->landing_pads)
1693 streamer_write_hwi (ob, r->landing_pads->index);
1694 else
1695 streamer_write_zero (ob);
1696 }
1697
1698
1699 /* Output landing pad LP to OB. */
1700
1701 static void
1702 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1703 {
1704 if (lp == NULL)
1705 {
1706 streamer_write_record_start (ob, LTO_null);
1707 return;
1708 }
1709
1710 streamer_write_record_start (ob, LTO_eh_landing_pad);
1711 streamer_write_hwi (ob, lp->index);
1712 if (lp->next_lp)
1713 streamer_write_hwi (ob, lp->next_lp->index);
1714 else
1715 streamer_write_zero (ob);
1716
1717 if (lp->region)
1718 streamer_write_hwi (ob, lp->region->index);
1719 else
1720 streamer_write_zero (ob);
1721
1722 stream_write_tree (ob, lp->post_landing_pad, true);
1723 }
1724
1725
1726 /* Output the existing eh_table to OB. */
1727
1728 static void
1729 output_eh_regions (struct output_block *ob, struct function *fn)
1730 {
1731 if (fn->eh && fn->eh->region_tree)
1732 {
1733 unsigned i;
1734 eh_region eh;
1735 eh_landing_pad lp;
1736 tree ttype;
1737
1738 streamer_write_record_start (ob, LTO_eh_table);
1739
1740 /* Emit the index of the root of the EH region tree. */
1741 streamer_write_hwi (ob, fn->eh->region_tree->index);
1742
1743 /* Emit all the EH regions in the region array. */
1744 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1745 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1746 output_eh_region (ob, eh);
1747
1748 /* Emit all landing pads. */
1749 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1750 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1751 output_eh_lp (ob, lp);
1752
1753 /* Emit all the runtime type data. */
1754 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1755 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1756 stream_write_tree (ob, ttype, true);
1757
1758 /* Emit the table of action chains. */
1759 if (targetm.arm_eabi_unwinder)
1760 {
1761 tree t;
1762 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1763 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1764 stream_write_tree (ob, t, true);
1765 }
1766 else
1767 {
1768 uchar c;
1769 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1770 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1771 streamer_write_char_stream (ob->main_stream, c);
1772 }
1773 }
1774
1775 /* The LTO_null either terminates the record or indicates that there
1776 are no eh_records at all. */
1777 streamer_write_record_start (ob, LTO_null);
1778 }
1779
1780
1781 /* Output all of the active ssa names to the ssa_names stream. */
1782
1783 static void
1784 output_ssa_names (struct output_block *ob, struct function *fn)
1785 {
1786 unsigned int i, len;
1787
1788 len = vec_safe_length (SSANAMES (fn));
1789 streamer_write_uhwi (ob, len);
1790
1791 for (i = 1; i < len; i++)
1792 {
1793 tree ptr = (*SSANAMES (fn))[i];
1794
1795 if (ptr == NULL_TREE
1796 || SSA_NAME_IN_FREE_LIST (ptr)
1797 || virtual_operand_p (ptr))
1798 continue;
1799
1800 streamer_write_uhwi (ob, i);
1801 streamer_write_char_stream (ob->main_stream,
1802 SSA_NAME_IS_DEFAULT_DEF (ptr));
1803 if (SSA_NAME_VAR (ptr))
1804 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1805 else
1806 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1807 stream_write_tree (ob, TREE_TYPE (ptr), true);
1808 }
1809
1810 streamer_write_zero (ob);
1811 }
1812
1813
1814 /* Output a wide-int. */
1815
1816 static void
1817 streamer_write_wi (struct output_block *ob,
1818 const widest_int &w)
1819 {
1820 int len = w.get_len ();
1821
1822 streamer_write_uhwi (ob, w.get_precision ());
1823 streamer_write_uhwi (ob, len);
1824 for (int i = 0; i < len; i++)
1825 streamer_write_hwi (ob, w.elt (i));
1826 }
1827
1828
1829 /* Output the cfg. */
1830
1831 static void
1832 output_cfg (struct output_block *ob, struct function *fn)
1833 {
1834 struct lto_output_stream *tmp_stream = ob->main_stream;
1835 basic_block bb;
1836
1837 ob->main_stream = ob->cfg_stream;
1838
1839 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1840 profile_status_for_fn (fn));
1841
1842 /* Output the number of the highest basic block. */
1843 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1844
1845 FOR_ALL_BB_FN (bb, fn)
1846 {
1847 edge_iterator ei;
1848 edge e;
1849
1850 streamer_write_hwi (ob, bb->index);
1851
1852 /* Output the successors and the edge flags. */
1853 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1854 FOR_EACH_EDGE (e, ei, bb->succs)
1855 {
1856 streamer_write_uhwi (ob, e->dest->index);
1857 streamer_write_hwi (ob, e->probability);
1858 streamer_write_gcov_count (ob, e->count);
1859 streamer_write_uhwi (ob, e->flags);
1860 }
1861 }
1862
1863 streamer_write_hwi (ob, -1);
1864
1865 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1866 while (bb->next_bb)
1867 {
1868 streamer_write_hwi (ob, bb->next_bb->index);
1869 bb = bb->next_bb;
1870 }
1871
1872 streamer_write_hwi (ob, -1);
1873
1874 /* ??? The cfgloop interface is tied to cfun. */
1875 gcc_assert (cfun == fn);
1876
1877 /* Output the number of loops. */
1878 streamer_write_uhwi (ob, number_of_loops (fn));
1879
1880 /* Output each loop, skipping the tree root which has number zero. */
1881 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1882 {
1883 struct loop *loop = get_loop (fn, i);
1884
1885 /* Write the index of the loop header. That's enough to rebuild
1886 the loop tree on the reader side. Stream -1 for an unused
1887 loop entry. */
1888 if (!loop)
1889 {
1890 streamer_write_hwi (ob, -1);
1891 continue;
1892 }
1893 else
1894 streamer_write_hwi (ob, loop->header->index);
1895
1896 /* Write everything copy_loop_info copies. */
1897 streamer_write_enum (ob->main_stream,
1898 loop_estimation, EST_LAST, loop->estimate_state);
1899 streamer_write_hwi (ob, loop->any_upper_bound);
1900 if (loop->any_upper_bound)
1901 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1902 streamer_write_hwi (ob, loop->any_estimate);
1903 if (loop->any_estimate)
1904 streamer_write_wi (ob, loop->nb_iterations_estimate);
1905
1906 /* Write OMP SIMD related info. */
1907 streamer_write_hwi (ob, loop->safelen);
1908 streamer_write_hwi (ob, loop->dont_vectorize);
1909 streamer_write_hwi (ob, loop->force_vectorize);
1910 stream_write_tree (ob, loop->simduid, true);
1911 }
1912
1913 ob->main_stream = tmp_stream;
1914 }
1915
1916
1917 /* Create the header in the file using OB. If the section type is for
1918 a function, set FN to the decl for that function. */
1919
1920 void
1921 produce_asm (struct output_block *ob, tree fn)
1922 {
1923 enum lto_section_type section_type = ob->section_type;
1924 struct lto_function_header header;
1925 char *section_name;
1926
1927 if (section_type == LTO_section_function_body)
1928 {
1929 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1930 section_name = lto_get_section_name (section_type, name, NULL);
1931 }
1932 else
1933 section_name = lto_get_section_name (section_type, NULL, NULL);
1934
1935 lto_begin_section (section_name, !flag_wpa);
1936 free (section_name);
1937
1938 /* The entire header is stream computed here. */
1939 memset (&header, 0, sizeof (struct lto_function_header));
1940
1941 /* Write the header. */
1942 header.major_version = LTO_major_version;
1943 header.minor_version = LTO_minor_version;
1944
1945 if (section_type == LTO_section_function_body)
1946 header.cfg_size = ob->cfg_stream->total_size;
1947 header.main_size = ob->main_stream->total_size;
1948 header.string_size = ob->string_stream->total_size;
1949 lto_write_data (&header, sizeof header);
1950
1951 /* Put all of the gimple and the string table out the asm file as a
1952 block of text. */
1953 if (section_type == LTO_section_function_body)
1954 lto_write_stream (ob->cfg_stream);
1955 lto_write_stream (ob->main_stream);
1956 lto_write_stream (ob->string_stream);
1957
1958 lto_end_section ();
1959 }
1960
1961
1962 /* Output the base body of struct function FN using output block OB. */
1963
1964 static void
1965 output_struct_function_base (struct output_block *ob, struct function *fn)
1966 {
1967 struct bitpack_d bp;
1968 unsigned i;
1969 tree t;
1970
1971 /* Output the static chain and non-local goto save area. */
1972 stream_write_tree (ob, fn->static_chain_decl, true);
1973 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1974
1975 /* Output all the local variables in the function. */
1976 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1977 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1978 stream_write_tree (ob, t, true);
1979
1980 /* Output current IL state of the function. */
1981 streamer_write_uhwi (ob, fn->curr_properties);
1982
1983 /* Write all the attributes for FN. */
1984 bp = bitpack_create (ob->main_stream);
1985 bp_pack_value (&bp, fn->is_thunk, 1);
1986 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1987 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1988 bp_pack_value (&bp, fn->returns_struct, 1);
1989 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1990 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1991 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1992 bp_pack_value (&bp, fn->after_inlining, 1);
1993 bp_pack_value (&bp, fn->stdarg, 1);
1994 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1995 bp_pack_value (&bp, fn->calls_alloca, 1);
1996 bp_pack_value (&bp, fn->calls_setjmp, 1);
1997 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1998 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1999 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2000 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2001 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2002
2003 /* Output the function start and end loci. */
2004 stream_output_location (ob, &bp, fn->function_start_locus);
2005 stream_output_location (ob, &bp, fn->function_end_locus);
2006
2007 streamer_write_bitpack (&bp);
2008 }
2009
2010
2011 /* Output the body of function NODE->DECL. */
2012
2013 static void
2014 output_function (struct cgraph_node *node)
2015 {
2016 tree function;
2017 struct function *fn;
2018 basic_block bb;
2019 struct output_block *ob;
2020
2021 function = node->decl;
2022 fn = DECL_STRUCT_FUNCTION (function);
2023 ob = create_output_block (LTO_section_function_body);
2024
2025 clear_line_info (ob);
2026 ob->symbol = node;
2027
2028 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2029
2030 /* Set current_function_decl and cfun. */
2031 push_cfun (fn);
2032
2033 /* Make string 0 be a NULL string. */
2034 streamer_write_char_stream (ob->string_stream, 0);
2035
2036 streamer_write_record_start (ob, LTO_function);
2037
2038 /* Output decls for parameters and args. */
2039 stream_write_tree (ob, DECL_RESULT (function), true);
2040 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2041
2042 /* Output DECL_INITIAL for the function, which contains the tree of
2043 lexical scopes. */
2044 stream_write_tree (ob, DECL_INITIAL (function), true);
2045
2046 /* We also stream abstract functions where we stream only stuff needed for
2047 debug info. */
2048 if (gimple_has_body_p (function))
2049 {
2050 streamer_write_uhwi (ob, 1);
2051 output_struct_function_base (ob, fn);
2052
2053 /* Output all the SSA names used in the function. */
2054 output_ssa_names (ob, fn);
2055
2056 /* Output any exception handling regions. */
2057 output_eh_regions (ob, fn);
2058
2059
2060 /* We will renumber the statements. The code that does this uses
2061 the same ordering that we use for serializing them so we can use
2062 the same code on the other end and not have to write out the
2063 statement numbers. We do not assign UIDs to PHIs here because
2064 virtual PHIs get re-computed on-the-fly which would make numbers
2065 inconsistent. */
2066 set_gimple_stmt_max_uid (cfun, 0);
2067 FOR_ALL_BB_FN (bb, cfun)
2068 {
2069 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2070 gsi_next (&gsi))
2071 {
2072 gphi *stmt = gsi.phi ();
2073
2074 /* Virtual PHIs are not going to be streamed. */
2075 if (!virtual_operand_p (gimple_phi_result (stmt)))
2076 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2077 }
2078 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2079 gsi_next (&gsi))
2080 {
2081 gimple stmt = gsi_stmt (gsi);
2082 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2083 }
2084 }
2085 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2086 virtual phis now. */
2087 FOR_ALL_BB_FN (bb, cfun)
2088 {
2089 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2090 gsi_next (&gsi))
2091 {
2092 gphi *stmt = gsi.phi ();
2093 if (virtual_operand_p (gimple_phi_result (stmt)))
2094 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2095 }
2096 }
2097
2098 /* Output the code for the function. */
2099 FOR_ALL_BB_FN (bb, fn)
2100 output_bb (ob, bb, fn);
2101
2102 /* The terminator for this function. */
2103 streamer_write_record_start (ob, LTO_null);
2104
2105 output_cfg (ob, fn);
2106
2107 pop_cfun ();
2108 }
2109 else
2110 streamer_write_uhwi (ob, 0);
2111
2112 /* Create a section to hold the pickled output of this function. */
2113 produce_asm (ob, function);
2114
2115 destroy_output_block (ob);
2116 }
2117
2118 /* Output the body of function NODE->DECL. */
2119
2120 static void
2121 output_constructor (struct varpool_node *node)
2122 {
2123 tree var = node->decl;
2124 struct output_block *ob;
2125
2126 ob = create_output_block (LTO_section_function_body);
2127
2128 clear_line_info (ob);
2129 ob->symbol = node;
2130
2131 /* Make string 0 be a NULL string. */
2132 streamer_write_char_stream (ob->string_stream, 0);
2133
2134 /* Output DECL_INITIAL for the function, which contains the tree of
2135 lexical scopes. */
2136 stream_write_tree (ob, DECL_INITIAL (var), true);
2137
2138 /* Create a section to hold the pickled output of this function. */
2139 produce_asm (ob, var);
2140
2141 destroy_output_block (ob);
2142 }
2143
2144
2145 /* Emit toplevel asms. */
2146
2147 void
2148 lto_output_toplevel_asms (void)
2149 {
2150 struct output_block *ob;
2151 struct asm_node *can;
2152 char *section_name;
2153 struct lto_simple_header_with_strings header;
2154
2155 if (!symtab->first_asm_symbol ())
2156 return;
2157
2158 ob = create_output_block (LTO_section_asm);
2159
2160 /* Make string 0 be a NULL string. */
2161 streamer_write_char_stream (ob->string_stream, 0);
2162
2163 for (can = symtab->first_asm_symbol (); can; can = can->next)
2164 {
2165 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2166 streamer_write_hwi (ob, can->order);
2167 }
2168
2169 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2170
2171 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2172 lto_begin_section (section_name, !flag_wpa);
2173 free (section_name);
2174
2175 /* The entire header stream is computed here. */
2176 memset (&header, 0, sizeof (header));
2177
2178 /* Write the header. */
2179 header.major_version = LTO_major_version;
2180 header.minor_version = LTO_minor_version;
2181
2182 header.main_size = ob->main_stream->total_size;
2183 header.string_size = ob->string_stream->total_size;
2184 lto_write_data (&header, sizeof header);
2185
2186 /* Put all of the gimple and the string table out the asm file as a
2187 block of text. */
2188 lto_write_stream (ob->main_stream);
2189 lto_write_stream (ob->string_stream);
2190
2191 lto_end_section ();
2192
2193 destroy_output_block (ob);
2194 }
2195
2196
2197 /* Copy the function body or variable constructor of NODE without deserializing. */
2198
2199 static void
2200 copy_function_or_variable (struct symtab_node *node)
2201 {
2202 tree function = node->decl;
2203 struct lto_file_decl_data *file_data = node->lto_file_data;
2204 const char *data;
2205 size_t len;
2206 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2207 char *section_name =
2208 lto_get_section_name (LTO_section_function_body, name, NULL);
2209 size_t i, j;
2210 struct lto_in_decl_state *in_state;
2211 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2212
2213 lto_begin_section (section_name, !flag_wpa);
2214 free (section_name);
2215
2216 /* We may have renamed the declaration, e.g., a static function. */
2217 name = lto_get_decl_name_mapping (file_data, name);
2218
2219 data = lto_get_section_data (file_data, LTO_section_function_body,
2220 name, &len);
2221 gcc_assert (data);
2222
2223 /* Do a bit copy of the function body. */
2224 lto_write_data (data, len);
2225
2226 /* Copy decls. */
2227 in_state =
2228 lto_get_function_in_decl_state (node->lto_file_data, function);
2229 gcc_assert (in_state);
2230
2231 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2232 {
2233 size_t n = vec_safe_length (in_state->streams[i]);
2234 vec<tree, va_gc> *trees = in_state->streams[i];
2235 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2236
2237 /* The out state must have the same indices and the in state.
2238 So just copy the vector. All the encoders in the in state
2239 must be empty where we reach here. */
2240 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2241 encoder->trees.reserve_exact (n);
2242 for (j = 0; j < n; j++)
2243 encoder->trees.safe_push ((*trees)[j]);
2244 }
2245
2246 lto_free_section_data (file_data, LTO_section_function_body, name,
2247 data, len);
2248 lto_end_section ();
2249 }
2250
2251 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2252
2253 static tree
2254 wrap_refs (tree *tp, int *ws, void *)
2255 {
2256 tree t = *tp;
2257 if (handled_component_p (t)
2258 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2259 {
2260 tree decl = TREE_OPERAND (t, 0);
2261 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2262 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2263 build1 (ADDR_EXPR, ptrtype, decl),
2264 build_int_cst (ptrtype, 0));
2265 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2266 *ws = 0;
2267 }
2268 else if (TREE_CODE (t) == CONSTRUCTOR)
2269 ;
2270 else if (!EXPR_P (t))
2271 *ws = 0;
2272 return NULL_TREE;
2273 }
2274
2275 /* Main entry point from the pass manager. */
2276
2277 void
2278 lto_output (void)
2279 {
2280 struct lto_out_decl_state *decl_state;
2281 #ifdef ENABLE_CHECKING
2282 bitmap output = lto_bitmap_alloc ();
2283 #endif
2284 int i, n_nodes;
2285 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2286
2287 /* Initialize the streamer. */
2288 lto_streamer_init ();
2289
2290 n_nodes = lto_symtab_encoder_size (encoder);
2291 /* Process only the functions with bodies. */
2292 for (i = 0; i < n_nodes; i++)
2293 {
2294 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2295 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2296 {
2297 if (lto_symtab_encoder_encode_body_p (encoder, node)
2298 && !node->alias)
2299 {
2300 #ifdef ENABLE_CHECKING
2301 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2302 bitmap_set_bit (output, DECL_UID (node->decl));
2303 #endif
2304 decl_state = lto_new_out_decl_state ();
2305 lto_push_out_decl_state (decl_state);
2306 if (gimple_has_body_p (node->decl) || !flag_wpa
2307 /* Thunks have no body but they may be synthetized
2308 at WPA time. */
2309 || DECL_ARGUMENTS (node->decl))
2310 output_function (node);
2311 else
2312 copy_function_or_variable (node);
2313 gcc_assert (lto_get_out_decl_state () == decl_state);
2314 lto_pop_out_decl_state ();
2315 lto_record_function_out_decl_state (node->decl, decl_state);
2316 }
2317 }
2318 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2319 {
2320 /* Wrap symbol references inside the ctor in a type
2321 preserving MEM_REF. */
2322 tree ctor = DECL_INITIAL (node->decl);
2323 if (ctor && !in_lto_p)
2324 walk_tree (&ctor, wrap_refs, NULL, NULL);
2325 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2326 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2327 && !node->alias)
2328 {
2329 timevar_push (TV_IPA_LTO_CTORS_OUT);
2330 #ifdef ENABLE_CHECKING
2331 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2332 bitmap_set_bit (output, DECL_UID (node->decl));
2333 #endif
2334 decl_state = lto_new_out_decl_state ();
2335 lto_push_out_decl_state (decl_state);
2336 if (DECL_INITIAL (node->decl) != error_mark_node
2337 || !flag_wpa)
2338 output_constructor (node);
2339 else
2340 copy_function_or_variable (node);
2341 gcc_assert (lto_get_out_decl_state () == decl_state);
2342 lto_pop_out_decl_state ();
2343 lto_record_function_out_decl_state (node->decl, decl_state);
2344 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2345 }
2346 }
2347 }
2348
2349 /* Emit the callgraph after emitting function bodies. This needs to
2350 be done now to make sure that all the statements in every function
2351 have been renumbered so that edges can be associated with call
2352 statements using the statement UIDs. */
2353 output_symtab ();
2354
2355 output_offload_tables ();
2356
2357 #ifdef ENABLE_CHECKING
2358 lto_bitmap_free (output);
2359 #endif
2360 }
2361
2362 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2363 from it and required for correct representation of its semantics.
2364 Each node in ENCODER must be a global declaration or a type. A node
2365 is written only once, even if it appears multiple times in the
2366 vector. Certain transitively-reachable nodes, such as those
2367 representing expressions, may be duplicated, but such nodes
2368 must not appear in ENCODER itself. */
2369
2370 static void
2371 write_global_stream (struct output_block *ob,
2372 struct lto_tree_ref_encoder *encoder)
2373 {
2374 tree t;
2375 size_t index;
2376 const size_t size = lto_tree_ref_encoder_size (encoder);
2377
2378 for (index = 0; index < size; index++)
2379 {
2380 t = lto_tree_ref_encoder_get_tree (encoder, index);
2381 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2382 stream_write_tree (ob, t, false);
2383 }
2384 }
2385
2386
2387 /* Write a sequence of indices into the globals vector corresponding
2388 to the trees in ENCODER. These are used by the reader to map the
2389 indices used to refer to global entities within function bodies to
2390 their referents. */
2391
2392 static void
2393 write_global_references (struct output_block *ob,
2394 struct lto_tree_ref_encoder *encoder)
2395 {
2396 tree t;
2397 uint32_t index;
2398 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2399
2400 /* Write size and slot indexes as 32-bit unsigned numbers. */
2401 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2402 data[0] = size;
2403
2404 for (index = 0; index < size; index++)
2405 {
2406 uint32_t slot_num;
2407
2408 t = lto_tree_ref_encoder_get_tree (encoder, index);
2409 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2410 gcc_assert (slot_num != (unsigned)-1);
2411 data[index + 1] = slot_num;
2412 }
2413
2414 lto_write_data (data, sizeof (int32_t) * (size + 1));
2415 free (data);
2416 }
2417
2418
2419 /* Write all the streams in an lto_out_decl_state STATE using
2420 output block OB and output stream OUT_STREAM. */
2421
2422 void
2423 lto_output_decl_state_streams (struct output_block *ob,
2424 struct lto_out_decl_state *state)
2425 {
2426 int i;
2427
2428 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2429 write_global_stream (ob, &state->streams[i]);
2430 }
2431
2432
2433 /* Write all the references in an lto_out_decl_state STATE using
2434 output block OB and output stream OUT_STREAM. */
2435
2436 void
2437 lto_output_decl_state_refs (struct output_block *ob,
2438 struct lto_out_decl_state *state)
2439 {
2440 unsigned i;
2441 uint32_t ref;
2442 tree decl;
2443
2444 /* Write reference to FUNCTION_DECL. If there is not function,
2445 write reference to void_type_node. */
2446 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2447 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2448 gcc_assert (ref != (unsigned)-1);
2449 lto_write_data (&ref, sizeof (uint32_t));
2450
2451 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2452 write_global_references (ob, &state->streams[i]);
2453 }
2454
2455
2456 /* Return the written size of STATE. */
2457
2458 static size_t
2459 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2460 {
2461 int i;
2462 size_t size;
2463
2464 size = sizeof (int32_t); /* fn_ref. */
2465 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2466 {
2467 size += sizeof (int32_t); /* vector size. */
2468 size += (lto_tree_ref_encoder_size (&state->streams[i])
2469 * sizeof (int32_t));
2470 }
2471 return size;
2472 }
2473
2474
2475 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2476 so far. */
2477
2478 static void
2479 write_symbol (struct streamer_tree_cache_d *cache,
2480 tree t, hash_set<const char *> *seen, bool alias)
2481 {
2482 const char *name;
2483 enum gcc_plugin_symbol_kind kind;
2484 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2485 unsigned slot_num;
2486 uint64_t size;
2487 const char *comdat;
2488 unsigned char c;
2489
2490 /* None of the following kinds of symbols are needed in the
2491 symbol table. */
2492 if (!TREE_PUBLIC (t)
2493 || is_builtin_fn (t)
2494 || DECL_ABSTRACT_P (t)
2495 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2496 return;
2497 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2498
2499 gcc_assert (TREE_CODE (t) == VAR_DECL
2500 || TREE_CODE (t) == FUNCTION_DECL);
2501
2502 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2503
2504 /* This behaves like assemble_name_raw in varasm.c, performing the
2505 same name manipulations that ASM_OUTPUT_LABELREF does. */
2506 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2507
2508 if (seen->add (name))
2509 return;
2510
2511 streamer_tree_cache_lookup (cache, t, &slot_num);
2512 gcc_assert (slot_num != (unsigned)-1);
2513
2514 if (DECL_EXTERNAL (t))
2515 {
2516 if (DECL_WEAK (t))
2517 kind = GCCPK_WEAKUNDEF;
2518 else
2519 kind = GCCPK_UNDEF;
2520 }
2521 else
2522 {
2523 if (DECL_WEAK (t))
2524 kind = GCCPK_WEAKDEF;
2525 else if (DECL_COMMON (t))
2526 kind = GCCPK_COMMON;
2527 else
2528 kind = GCCPK_DEF;
2529
2530 /* When something is defined, it should have node attached. */
2531 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2532 || varpool_node::get (t)->definition);
2533 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2534 || (cgraph_node::get (t)
2535 && cgraph_node::get (t)->definition));
2536 }
2537
2538 /* Imitate what default_elf_asm_output_external do.
2539 When symbol is external, we need to output it with DEFAULT visibility
2540 when compiling with -fvisibility=default, while with HIDDEN visibility
2541 when symbol has attribute (visibility("hidden")) specified.
2542 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2543 right. */
2544
2545 if (DECL_EXTERNAL (t)
2546 && !targetm.binds_local_p (t))
2547 visibility = GCCPV_DEFAULT;
2548 else
2549 switch (DECL_VISIBILITY (t))
2550 {
2551 case VISIBILITY_DEFAULT:
2552 visibility = GCCPV_DEFAULT;
2553 break;
2554 case VISIBILITY_PROTECTED:
2555 visibility = GCCPV_PROTECTED;
2556 break;
2557 case VISIBILITY_HIDDEN:
2558 visibility = GCCPV_HIDDEN;
2559 break;
2560 case VISIBILITY_INTERNAL:
2561 visibility = GCCPV_INTERNAL;
2562 break;
2563 }
2564
2565 if (kind == GCCPK_COMMON
2566 && DECL_SIZE_UNIT (t)
2567 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2568 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2569 else
2570 size = 0;
2571
2572 if (DECL_ONE_ONLY (t))
2573 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2574 else
2575 comdat = "";
2576
2577 lto_write_data (name, strlen (name) + 1);
2578 lto_write_data (comdat, strlen (comdat) + 1);
2579 c = (unsigned char) kind;
2580 lto_write_data (&c, 1);
2581 c = (unsigned char) visibility;
2582 lto_write_data (&c, 1);
2583 lto_write_data (&size, 8);
2584 lto_write_data (&slot_num, 4);
2585 }
2586
2587 /* Return true if NODE should appear in the plugin symbol table. */
2588
2589 bool
2590 output_symbol_p (symtab_node *node)
2591 {
2592 struct cgraph_node *cnode;
2593 if (!node->real_symbol_p ())
2594 return false;
2595 /* We keep external functions in symtab for sake of inlining
2596 and devirtualization. We do not want to see them in symbol table as
2597 references unless they are really used. */
2598 cnode = dyn_cast <cgraph_node *> (node);
2599 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2600 && cnode->callers)
2601 return true;
2602
2603 /* Ignore all references from external vars initializers - they are not really
2604 part of the compilation unit until they are used by folding. Some symbols,
2605 like references to external construction vtables can not be referred to at all.
2606 We decide this at can_refer_decl_in_current_unit_p. */
2607 if (!node->definition || DECL_EXTERNAL (node->decl))
2608 {
2609 int i;
2610 struct ipa_ref *ref;
2611 for (i = 0; node->iterate_referring (i, ref); i++)
2612 {
2613 if (ref->use == IPA_REF_ALIAS)
2614 continue;
2615 if (is_a <cgraph_node *> (ref->referring))
2616 return true;
2617 if (!DECL_EXTERNAL (ref->referring->decl))
2618 return true;
2619 }
2620 return false;
2621 }
2622 return true;
2623 }
2624
2625
2626 /* Write an IL symbol table to OB.
2627 SET and VSET are cgraph/varpool node sets we are outputting. */
2628
2629 static void
2630 produce_symtab (struct output_block *ob)
2631 {
2632 struct streamer_tree_cache_d *cache = ob->writer_cache;
2633 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2634 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2635 lto_symtab_encoder_iterator lsei;
2636
2637 lto_begin_section (section_name, false);
2638 free (section_name);
2639
2640 hash_set<const char *> seen;
2641
2642 /* Write the symbol table.
2643 First write everything defined and then all declarations.
2644 This is necessary to handle cases where we have duplicated symbols. */
2645 for (lsei = lsei_start (encoder);
2646 !lsei_end_p (lsei); lsei_next (&lsei))
2647 {
2648 symtab_node *node = lsei_node (lsei);
2649
2650 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2651 continue;
2652 write_symbol (cache, node->decl, &seen, false);
2653 }
2654 for (lsei = lsei_start (encoder);
2655 !lsei_end_p (lsei); lsei_next (&lsei))
2656 {
2657 symtab_node *node = lsei_node (lsei);
2658
2659 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2660 continue;
2661 write_symbol (cache, node->decl, &seen, false);
2662 }
2663
2664 lto_end_section ();
2665 }
2666
2667
2668 /* Init the streamer_mode_table for output, where we collect info on what
2669 machine_mode values have been streamed. */
2670 void
2671 lto_output_init_mode_table (void)
2672 {
2673 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2674 }
2675
2676
2677 /* Write the mode table. */
2678 static void
2679 lto_write_mode_table (void)
2680 {
2681 struct output_block *ob;
2682 ob = create_output_block (LTO_section_mode_table);
2683 bitpack_d bp = bitpack_create (ob->main_stream);
2684
2685 /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have
2686 also the inner mode marked. */
2687 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2688 if (streamer_mode_table[i])
2689 {
2690 machine_mode m = (machine_mode) i;
2691 if (GET_MODE_INNER (m) != VOIDmode)
2692 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2693 }
2694 /* First stream modes that have GET_MODE_INNER (m) == VOIDmode,
2695 so that we can refer to them afterwards. */
2696 for (int pass = 0; pass < 2; pass++)
2697 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2698 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2699 {
2700 machine_mode m = (machine_mode) i;
2701 if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0))
2702 continue;
2703 bp_pack_value (&bp, m, 8);
2704 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2705 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2706 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2707 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2708 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2709 switch (GET_MODE_CLASS (m))
2710 {
2711 case MODE_FRACT:
2712 case MODE_UFRACT:
2713 case MODE_ACCUM:
2714 case MODE_UACCUM:
2715 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2716 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2717 break;
2718 case MODE_FLOAT:
2719 case MODE_DECIMAL_FLOAT:
2720 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2721 break;
2722 default:
2723 break;
2724 }
2725 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2726 }
2727 bp_pack_value (&bp, VOIDmode, 8);
2728
2729 streamer_write_bitpack (&bp);
2730
2731 char *section_name
2732 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2733 lto_begin_section (section_name, !flag_wpa);
2734 free (section_name);
2735
2736 /* The entire header stream is computed here. */
2737 struct lto_simple_header_with_strings header;
2738 memset (&header, 0, sizeof (header));
2739
2740 /* Write the header. */
2741 header.major_version = LTO_major_version;
2742 header.minor_version = LTO_minor_version;
2743
2744 header.main_size = ob->main_stream->total_size;
2745 header.string_size = ob->string_stream->total_size;
2746 lto_write_data (&header, sizeof header);
2747
2748 /* Put all of the gimple and the string table out the asm file as a
2749 block of text. */
2750 lto_write_stream (ob->main_stream);
2751 lto_write_stream (ob->string_stream);
2752
2753 lto_end_section ();
2754 destroy_output_block (ob);
2755 }
2756
2757
2758 /* This pass is run after all of the functions are serialized and all
2759 of the IPA passes have written their serialized forms. This pass
2760 causes the vector of all of the global decls and types used from
2761 this file to be written in to a section that can then be read in to
2762 recover these on other side. */
2763
2764 void
2765 produce_asm_for_decls (void)
2766 {
2767 struct lto_out_decl_state *out_state;
2768 struct lto_out_decl_state *fn_out_state;
2769 struct lto_decl_header header;
2770 char *section_name;
2771 struct output_block *ob;
2772 unsigned idx, num_fns;
2773 size_t decl_state_size;
2774 int32_t num_decl_states;
2775
2776 ob = create_output_block (LTO_section_decls);
2777
2778 memset (&header, 0, sizeof (struct lto_decl_header));
2779
2780 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2781 lto_begin_section (section_name, !flag_wpa);
2782 free (section_name);
2783
2784 /* Make string 0 be a NULL string. */
2785 streamer_write_char_stream (ob->string_stream, 0);
2786
2787 gcc_assert (!alias_pairs);
2788
2789 /* Get rid of the global decl state hash tables to save some memory. */
2790 out_state = lto_get_out_decl_state ();
2791 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2792 if (out_state->streams[i].tree_hash_table)
2793 {
2794 delete out_state->streams[i].tree_hash_table;
2795 out_state->streams[i].tree_hash_table = NULL;
2796 }
2797
2798 /* Write the global symbols. */
2799 lto_output_decl_state_streams (ob, out_state);
2800 num_fns = lto_function_decl_states.length ();
2801 for (idx = 0; idx < num_fns; idx++)
2802 {
2803 fn_out_state =
2804 lto_function_decl_states[idx];
2805 lto_output_decl_state_streams (ob, fn_out_state);
2806 }
2807
2808 header.major_version = LTO_major_version;
2809 header.minor_version = LTO_minor_version;
2810
2811 /* Currently not used. This field would allow us to preallocate
2812 the globals vector, so that it need not be resized as it is extended. */
2813 header.num_nodes = -1;
2814
2815 /* Compute the total size of all decl out states. */
2816 decl_state_size = sizeof (int32_t);
2817 decl_state_size += lto_out_decl_state_written_size (out_state);
2818 for (idx = 0; idx < num_fns; idx++)
2819 {
2820 fn_out_state =
2821 lto_function_decl_states[idx];
2822 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2823 }
2824 header.decl_state_size = decl_state_size;
2825
2826 header.main_size = ob->main_stream->total_size;
2827 header.string_size = ob->string_stream->total_size;
2828
2829 lto_write_data (&header, sizeof header);
2830
2831 /* Write the main out-decl state, followed by out-decl states of
2832 functions. */
2833 num_decl_states = num_fns + 1;
2834 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2835 lto_output_decl_state_refs (ob, out_state);
2836 for (idx = 0; idx < num_fns; idx++)
2837 {
2838 fn_out_state = lto_function_decl_states[idx];
2839 lto_output_decl_state_refs (ob, fn_out_state);
2840 }
2841
2842 lto_write_stream (ob->main_stream);
2843 lto_write_stream (ob->string_stream);
2844
2845 lto_end_section ();
2846
2847 /* Write the symbol table. It is used by linker to determine dependencies
2848 and thus we can skip it for WPA. */
2849 if (!flag_wpa)
2850 produce_symtab (ob);
2851
2852 /* Write command line opts. */
2853 lto_write_options ();
2854
2855 /* Deallocate memory and clean up. */
2856 for (idx = 0; idx < num_fns; idx++)
2857 {
2858 fn_out_state =
2859 lto_function_decl_states[idx];
2860 lto_delete_out_decl_state (fn_out_state);
2861 }
2862 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2863 lto_function_decl_states.release ();
2864 destroy_output_block (ob);
2865 if (lto_stream_offload_p)
2866 lto_write_mode_table ();
2867 }