alias.c: Reorder #include statements and remove duplicates.
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "expmed.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "gimple-streamer.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "flags.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "varasm.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "params.h"
48 #include "internal-fn.h"
49 #include "gimple-iterator.h"
50 #include "except.h"
51 #include "lto-symtab.h"
52 #include "cgraph.h"
53 #include "cfgloop.h"
54 #include "builtins.h"
55 #include "gomp-constants.h"
56
57
58 static void lto_write_tree (struct output_block*, tree, bool);
59
60 /* Clear the line info stored in DATA_IN. */
61
62 static void
63 clear_line_info (struct output_block *ob)
64 {
65 ob->current_file = NULL;
66 ob->current_line = 0;
67 ob->current_col = 0;
68 ob->current_sysp = false;
69 }
70
71
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
74
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
77 {
78 struct output_block *ob = XCNEW (struct output_block);
79
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
85
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
88
89 clear_line_info (ob);
90
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
93
94 return ob;
95 }
96
97
98 /* Destroy the output block OB. */
99
100 void
101 destroy_output_block (struct output_block *ob)
102 {
103 enum lto_section_type section_type = ob->section_type;
104
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
107
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
112
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
115
116 free (ob);
117 }
118
119
120 /* Look up NODE in the type table and write the index for it to OB. */
121
122 static void
123 output_type_ref (struct output_block *ob, tree node)
124 {
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
127 }
128
129
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
134
135 static bool
136 tree_is_indexable (tree t)
137 {
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 && DECL_CONTEXT (t))
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
167 }
168
169
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
173
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
177 {
178 expanded_location xloc;
179
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
182 loc < RESERVED_LOCATION_COUNT
183 ? loc : RESERVED_LOCATION_COUNT);
184 if (loc < RESERVED_LOCATION_COUNT)
185 return;
186
187 xloc = expand_location (loc);
188
189 bp_pack_value (bp, ob->current_file != xloc.file, 1);
190 bp_pack_value (bp, ob->current_line != xloc.line, 1);
191 bp_pack_value (bp, ob->current_col != xloc.column, 1);
192
193 if (ob->current_file != xloc.file)
194 {
195 bp_pack_string (ob, bp, xloc.file, true);
196 bp_pack_value (bp, xloc.sysp, 1);
197 }
198 ob->current_file = xloc.file;
199 ob->current_sysp = xloc.sysp;
200
201 if (ob->current_line != xloc.line)
202 bp_pack_var_len_unsigned (bp, xloc.line);
203 ob->current_line = xloc.line;
204
205 if (ob->current_col != xloc.column)
206 bp_pack_var_len_unsigned (bp, xloc.column);
207 ob->current_col = xloc.column;
208 }
209
210
211 /* If EXPR is an indexable tree node, output a reference to it to
212 output block OB. Otherwise, output the physical representation of
213 EXPR to OB. */
214
215 static void
216 lto_output_tree_ref (struct output_block *ob, tree expr)
217 {
218 enum tree_code code;
219
220 if (TYPE_P (expr))
221 {
222 output_type_ref (ob, expr);
223 return;
224 }
225
226 code = TREE_CODE (expr);
227 switch (code)
228 {
229 case SSA_NAME:
230 streamer_write_record_start (ob, LTO_ssa_name_ref);
231 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
232 break;
233
234 case FIELD_DECL:
235 streamer_write_record_start (ob, LTO_field_decl_ref);
236 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
238
239 case FUNCTION_DECL:
240 streamer_write_record_start (ob, LTO_function_decl_ref);
241 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case VAR_DECL:
245 case DEBUG_EXPR_DECL:
246 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
247 case PARM_DECL:
248 streamer_write_record_start (ob, LTO_global_decl_ref);
249 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
251
252 case CONST_DECL:
253 streamer_write_record_start (ob, LTO_const_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case IMPORTED_DECL:
258 gcc_assert (decl_function_context (expr) == NULL);
259 streamer_write_record_start (ob, LTO_imported_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case TYPE_DECL:
264 streamer_write_record_start (ob, LTO_type_decl_ref);
265 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case NAMELIST_DECL:
269 streamer_write_record_start (ob, LTO_namelist_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case NAMESPACE_DECL:
274 streamer_write_record_start (ob, LTO_namespace_decl_ref);
275 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case LABEL_DECL:
279 streamer_write_record_start (ob, LTO_label_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 case RESULT_DECL:
284 streamer_write_record_start (ob, LTO_result_decl_ref);
285 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
286 break;
287
288 case TRANSLATION_UNIT_DECL:
289 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
290 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
291 break;
292
293 default:
294 /* No other node is indexable, so it should have been handled by
295 lto_output_tree. */
296 gcc_unreachable ();
297 }
298 }
299
300
301 /* Return true if EXPR is a tree node that can be written to disk. */
302
303 static inline bool
304 lto_is_streamable (tree expr)
305 {
306 enum tree_code code = TREE_CODE (expr);
307
308 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
309 name version in lto_output_tree_ref (see output_ssa_names). */
310 return !is_lang_specific (expr)
311 && code != SSA_NAME
312 && code != CALL_EXPR
313 && code != LANG_TYPE
314 && code != MODIFY_EXPR
315 && code != INIT_EXPR
316 && code != TARGET_EXPR
317 && code != BIND_EXPR
318 && code != WITH_CLEANUP_EXPR
319 && code != STATEMENT_LIST
320 && (code == CASE_LABEL_EXPR
321 || code == DECL_EXPR
322 || TREE_CODE_CLASS (code) != tcc_statement);
323 }
324
325
326 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
327
328 static tree
329 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
330 {
331 gcc_checking_assert (DECL_P (expr)
332 && TREE_CODE (expr) != FUNCTION_DECL
333 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
334
335 /* Handle DECL_INITIAL for symbols. */
336 tree initial = DECL_INITIAL (expr);
337 if (TREE_CODE (expr) == VAR_DECL
338 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
339 && !DECL_IN_CONSTANT_POOL (expr)
340 && initial)
341 {
342 varpool_node *vnode;
343 /* Extra section needs about 30 bytes; do not produce it for simple
344 scalar values. */
345 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
346 || !(vnode = varpool_node::get (expr))
347 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
348 initial = error_mark_node;
349 }
350
351 return initial;
352 }
353
354
355 /* Write a physical representation of tree node EXPR to output block
356 OB. If REF_P is true, the leaves of EXPR are emitted as references
357 via lto_output_tree_ref. IX is the index into the streamer cache
358 where EXPR is stored. */
359
360 static void
361 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
362 {
363 /* Pack all the non-pointer fields in EXPR into a bitpack and write
364 the resulting bitpack. */
365 streamer_write_tree_bitfields (ob, expr);
366
367 /* Write all the pointer fields in EXPR. */
368 streamer_write_tree_body (ob, expr, ref_p);
369
370 /* Write any LTO-specific data to OB. */
371 if (DECL_P (expr)
372 && TREE_CODE (expr) != FUNCTION_DECL
373 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
374 {
375 /* Handle DECL_INITIAL for symbols. */
376 tree initial = get_symbol_initial_value
377 (ob->decl_state->symtab_node_encoder, expr);
378 stream_write_tree (ob, initial, ref_p);
379 }
380 }
381
382 /* Write a physical representation of tree node EXPR to output block
383 OB. If REF_P is true, the leaves of EXPR are emitted as references
384 via lto_output_tree_ref. IX is the index into the streamer cache
385 where EXPR is stored. */
386
387 static void
388 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
389 {
390 if (!lto_is_streamable (expr))
391 internal_error ("tree code %qs is not supported in LTO streams",
392 get_tree_code_name (TREE_CODE (expr)));
393
394 /* Write the header, containing everything needed to materialize
395 EXPR on the reading side. */
396 streamer_write_tree_header (ob, expr);
397
398 lto_write_tree_1 (ob, expr, ref_p);
399
400 /* Mark the end of EXPR. */
401 streamer_write_zero (ob);
402 }
403
404 /* Emit the physical representation of tree node EXPR to output block OB,
405 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
406 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
407
408 static void
409 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
410 bool ref_p, bool this_ref_p)
411 {
412 unsigned ix;
413
414 gcc_checking_assert (expr != NULL_TREE
415 && !(this_ref_p && tree_is_indexable (expr)));
416
417 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
418 expr, hash, &ix);
419 gcc_assert (!exists_p);
420 if (streamer_handle_as_builtin_p (expr))
421 {
422 /* MD and NORMAL builtins do not need to be written out
423 completely as they are always instantiated by the
424 compiler on startup. The only builtins that need to
425 be written out are BUILT_IN_FRONTEND. For all other
426 builtins, we simply write the class and code. */
427 streamer_write_builtin (ob, expr);
428 }
429 else if (TREE_CODE (expr) == INTEGER_CST
430 && !TREE_OVERFLOW (expr))
431 {
432 /* Shared INTEGER_CST nodes are special because they need their
433 original type to be materialized by the reader (to implement
434 TYPE_CACHED_VALUES). */
435 streamer_write_integer_cst (ob, expr, ref_p);
436 }
437 else
438 {
439 /* This is the first time we see EXPR, write its fields
440 to OB. */
441 lto_write_tree (ob, expr, ref_p);
442 }
443 }
444
445 class DFS
446 {
447 public:
448 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
449 bool single_p);
450 ~DFS ();
451
452 struct scc_entry
453 {
454 tree t;
455 hashval_t hash;
456 };
457 vec<scc_entry> sccstack;
458
459 private:
460 struct sccs
461 {
462 unsigned int dfsnum;
463 unsigned int low;
464 };
465 struct worklist
466 {
467 tree expr;
468 sccs *from_state;
469 sccs *cstate;
470 bool ref_p;
471 bool this_ref_p;
472 };
473
474 static int scc_entry_compare (const void *, const void *);
475
476 void DFS_write_tree_body (struct output_block *ob,
477 tree expr, sccs *expr_state, bool ref_p);
478
479 void DFS_write_tree (struct output_block *ob, sccs *from_state,
480 tree expr, bool ref_p, bool this_ref_p);
481
482 hashval_t
483 hash_scc (struct output_block *ob, unsigned first, unsigned size,
484 bool ref_p, bool this_ref_p);
485
486 hash_map<tree, sccs *> sccstate;
487 vec<worklist> worklist_vec;
488 struct obstack sccstate_obstack;
489 };
490
491 /* Emit the physical representation of tree node EXPR to output block OB,
492 using depth-first search on the subgraph. If THIS_REF_P is true, the
493 leaves of EXPR are emitted as references via lto_output_tree_ref.
494 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
495 this is for a rewalk of a single leaf SCC. */
496
497 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
498 bool single_p)
499 {
500 unsigned int next_dfs_num = 1;
501 sccstack.create (0);
502 gcc_obstack_init (&sccstate_obstack);
503 worklist_vec = vNULL;
504 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
505 while (!worklist_vec.is_empty ())
506 {
507 worklist &w = worklist_vec.last ();
508 expr = w.expr;
509 sccs *from_state = w.from_state;
510 sccs *cstate = w.cstate;
511 ref_p = w.ref_p;
512 this_ref_p = w.this_ref_p;
513 if (cstate == NULL)
514 {
515 sccs **slot = &sccstate.get_or_insert (expr);
516 cstate = *slot;
517 if (cstate)
518 {
519 gcc_checking_assert (from_state);
520 if (cstate->dfsnum < from_state->dfsnum)
521 from_state->low = MIN (cstate->dfsnum, from_state->low);
522 worklist_vec.pop ();
523 continue;
524 }
525
526 scc_entry e = { expr, 0 };
527 /* Not yet visited. DFS recurse and push it onto the stack. */
528 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
529 sccstack.safe_push (e);
530 cstate->dfsnum = next_dfs_num++;
531 cstate->low = cstate->dfsnum;
532 w.cstate = cstate;
533
534 if (streamer_handle_as_builtin_p (expr))
535 ;
536 else if (TREE_CODE (expr) == INTEGER_CST
537 && !TREE_OVERFLOW (expr))
538 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
539 else
540 {
541 DFS_write_tree_body (ob, expr, cstate, ref_p);
542
543 /* Walk any LTO-specific edges. */
544 if (DECL_P (expr)
545 && TREE_CODE (expr) != FUNCTION_DECL
546 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
547 {
548 /* Handle DECL_INITIAL for symbols. */
549 tree initial
550 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
551 expr);
552 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
553 }
554 }
555 continue;
556 }
557
558 /* See if we found an SCC. */
559 if (cstate->low == cstate->dfsnum)
560 {
561 unsigned first, size;
562 tree x;
563
564 /* If we are re-walking a single leaf SCC just pop it,
565 let earlier worklist item access the sccstack. */
566 if (single_p)
567 {
568 worklist_vec.pop ();
569 continue;
570 }
571
572 /* Pop the SCC and compute its size. */
573 first = sccstack.length ();
574 do
575 {
576 x = sccstack[--first].t;
577 }
578 while (x != expr);
579 size = sccstack.length () - first;
580
581 /* No need to compute hashes for LTRANS units, we don't perform
582 any merging there. */
583 hashval_t scc_hash = 0;
584 unsigned scc_entry_len = 0;
585 if (!flag_wpa)
586 {
587 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
588
589 /* Put the entries with the least number of collisions first. */
590 unsigned entry_start = 0;
591 scc_entry_len = size + 1;
592 for (unsigned i = 0; i < size;)
593 {
594 unsigned from = i;
595 for (i = i + 1; i < size
596 && (sccstack[first + i].hash
597 == sccstack[first + from].hash); ++i)
598 ;
599 if (i - from < scc_entry_len)
600 {
601 scc_entry_len = i - from;
602 entry_start = from;
603 }
604 }
605 for (unsigned i = 0; i < scc_entry_len; ++i)
606 std::swap (sccstack[first + i],
607 sccstack[first + entry_start + i]);
608
609 /* We already sorted SCC deterministically in hash_scc. */
610
611 /* Check that we have only one SCC.
612 Naturally we may have conflicts if hash function is not
613 strong enough. Lets see how far this gets. */
614 gcc_checking_assert (scc_entry_len == 1);
615 }
616
617 /* Write LTO_tree_scc. */
618 streamer_write_record_start (ob, LTO_tree_scc);
619 streamer_write_uhwi (ob, size);
620 streamer_write_uhwi (ob, scc_hash);
621
622 /* Write size-1 SCCs without wrapping them inside SCC bundles.
623 All INTEGER_CSTs need to be handled this way as we need
624 their type to materialize them. Also builtins are handled
625 this way.
626 ??? We still wrap these in LTO_tree_scc so at the
627 input side we can properly identify the tree we want
628 to ultimatively return. */
629 if (size == 1)
630 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
631 else
632 {
633 /* Write the size of the SCC entry candidates. */
634 streamer_write_uhwi (ob, scc_entry_len);
635
636 /* Write all headers and populate the streamer cache. */
637 for (unsigned i = 0; i < size; ++i)
638 {
639 hashval_t hash = sccstack[first+i].hash;
640 tree t = sccstack[first+i].t;
641 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
642 t, hash, NULL);
643 gcc_assert (!exists_p);
644
645 if (!lto_is_streamable (t))
646 internal_error ("tree code %qs is not supported "
647 "in LTO streams",
648 get_tree_code_name (TREE_CODE (t)));
649
650 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
651
652 /* Write the header, containing everything needed to
653 materialize EXPR on the reading side. */
654 streamer_write_tree_header (ob, t);
655 }
656
657 /* Write the bitpacks and tree references. */
658 for (unsigned i = 0; i < size; ++i)
659 {
660 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
661
662 /* Mark the end of the tree. */
663 streamer_write_zero (ob);
664 }
665 }
666
667 /* Finally truncate the vector. */
668 sccstack.truncate (first);
669
670 if (from_state)
671 from_state->low = MIN (from_state->low, cstate->low);
672 worklist_vec.pop ();
673 continue;
674 }
675
676 gcc_checking_assert (from_state);
677 from_state->low = MIN (from_state->low, cstate->low);
678 if (cstate->dfsnum < from_state->dfsnum)
679 from_state->low = MIN (cstate->dfsnum, from_state->low);
680 worklist_vec.pop ();
681 }
682 worklist_vec.release ();
683 }
684
685 DFS::~DFS ()
686 {
687 sccstack.release ();
688 obstack_free (&sccstate_obstack, NULL);
689 }
690
691 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
692 DFS recurse for all tree edges originating from it. */
693
694 void
695 DFS::DFS_write_tree_body (struct output_block *ob,
696 tree expr, sccs *expr_state, bool ref_p)
697 {
698 #define DFS_follow_tree_edge(DEST) \
699 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
700
701 enum tree_code code;
702
703 code = TREE_CODE (expr);
704
705 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
706 {
707 if (TREE_CODE (expr) != IDENTIFIER_NODE)
708 DFS_follow_tree_edge (TREE_TYPE (expr));
709 }
710
711 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
712 {
713 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
714 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
715 }
716
717 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
718 {
719 DFS_follow_tree_edge (TREE_REALPART (expr));
720 DFS_follow_tree_edge (TREE_IMAGPART (expr));
721 }
722
723 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
724 {
725 /* Drop names that were created for anonymous entities. */
726 if (DECL_NAME (expr)
727 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
728 && anon_aggrname_p (DECL_NAME (expr)))
729 ;
730 else
731 DFS_follow_tree_edge (DECL_NAME (expr));
732 DFS_follow_tree_edge (DECL_CONTEXT (expr));
733 }
734
735 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
736 {
737 DFS_follow_tree_edge (DECL_SIZE (expr));
738 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
739
740 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
741 special handling in LTO, it must be handled by streamer hooks. */
742
743 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
744
745 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
746 for early inlining so drop it on the floor instead of ICEing in
747 dwarf2out.c. */
748
749 if ((TREE_CODE (expr) == VAR_DECL
750 || TREE_CODE (expr) == PARM_DECL)
751 && DECL_HAS_VALUE_EXPR_P (expr))
752 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
753 if (TREE_CODE (expr) == VAR_DECL)
754 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
755 }
756
757 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
758 {
759 if (TREE_CODE (expr) == TYPE_DECL)
760 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
761 }
762
763 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
764 {
765 /* Make sure we don't inadvertently set the assembler name. */
766 if (DECL_ASSEMBLER_NAME_SET_P (expr))
767 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
768 }
769
770 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
771 {
772 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
773 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
774 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
775 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
776 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
777 }
778
779 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
780 {
781 DFS_follow_tree_edge (DECL_VINDEX (expr));
782 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
783 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
784 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
785 }
786
787 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
788 {
789 DFS_follow_tree_edge (TYPE_SIZE (expr));
790 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
791 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
792 DFS_follow_tree_edge (TYPE_NAME (expr));
793 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
794 reconstructed during fixup. */
795 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
796 during fixup. */
797 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
798 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
799 /* TYPE_CANONICAL is re-computed during type merging, so no need
800 to follow it here. */
801 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
802 }
803
804 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
805 {
806 if (TREE_CODE (expr) == ENUMERAL_TYPE)
807 DFS_follow_tree_edge (TYPE_VALUES (expr));
808 else if (TREE_CODE (expr) == ARRAY_TYPE)
809 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
810 else if (RECORD_OR_UNION_TYPE_P (expr))
811 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
812 DFS_follow_tree_edge (t);
813 else if (TREE_CODE (expr) == FUNCTION_TYPE
814 || TREE_CODE (expr) == METHOD_TYPE)
815 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
816
817 if (!POINTER_TYPE_P (expr))
818 DFS_follow_tree_edge (TYPE_MINVAL (expr));
819 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
820 if (RECORD_OR_UNION_TYPE_P (expr))
821 DFS_follow_tree_edge (TYPE_BINFO (expr));
822 }
823
824 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
825 {
826 DFS_follow_tree_edge (TREE_PURPOSE (expr));
827 DFS_follow_tree_edge (TREE_VALUE (expr));
828 DFS_follow_tree_edge (TREE_CHAIN (expr));
829 }
830
831 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
832 {
833 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
834 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
835 }
836
837 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
838 {
839 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
840 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
841 DFS_follow_tree_edge (TREE_BLOCK (expr));
842 }
843
844 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
845 {
846 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
847 if (VAR_OR_FUNCTION_DECL_P (t)
848 && DECL_EXTERNAL (t))
849 /* We have to stream externals in the block chain as
850 non-references. See also
851 tree-streamer-out.c:streamer_write_chain. */
852 DFS_write_tree (ob, expr_state, t, ref_p, false);
853 else
854 DFS_follow_tree_edge (t);
855
856 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
857
858 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
859 handle - those that represent inlined function scopes.
860 For the drop rest them on the floor instead of ICEing
861 in dwarf2out.c. */
862 if (inlined_function_outer_scope_p (expr))
863 {
864 tree ultimate_origin = block_ultimate_origin (expr);
865 DFS_follow_tree_edge (ultimate_origin);
866 }
867 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
868 information for early inlined BLOCKs so drop it on the floor instead
869 of ICEing in dwarf2out.c. */
870
871 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
872 streaming time. */
873
874 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
875 list is re-constructed from BLOCK_SUPERCONTEXT. */
876 }
877
878 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
879 {
880 unsigned i;
881 tree t;
882
883 /* Note that the number of BINFO slots has already been emitted in
884 EXPR's header (see streamer_write_tree_header) because this length
885 is needed to build the empty BINFO node on the reader side. */
886 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
887 DFS_follow_tree_edge (t);
888 DFS_follow_tree_edge (BINFO_OFFSET (expr));
889 DFS_follow_tree_edge (BINFO_VTABLE (expr));
890 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
891
892 /* The number of BINFO_BASE_ACCESSES has already been emitted in
893 EXPR's bitfield section. */
894 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
895 DFS_follow_tree_edge (t);
896
897 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
898 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
899 }
900
901 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
902 {
903 unsigned i;
904 tree index, value;
905
906 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
907 {
908 DFS_follow_tree_edge (index);
909 DFS_follow_tree_edge (value);
910 }
911 }
912
913 if (code == OMP_CLAUSE)
914 {
915 int i;
916 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
917 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
918 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
919 }
920
921 #undef DFS_follow_tree_edge
922 }
923
924 /* Return a hash value for the tree T.
925 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
926 may hold hash values if trees inside current SCC. */
927
928 static hashval_t
929 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
930 {
931 inchash::hash hstate;
932
933 #define visit(SIBLING) \
934 do { \
935 unsigned ix; \
936 if (!SIBLING) \
937 hstate.add_int (0); \
938 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
939 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
940 else if (map) \
941 hstate.add_int (*map->get (SIBLING)); \
942 else \
943 hstate.add_int (1); \
944 } while (0)
945
946 /* Hash TS_BASE. */
947 enum tree_code code = TREE_CODE (t);
948 hstate.add_int (code);
949 if (!TYPE_P (t))
950 {
951 hstate.add_flag (TREE_SIDE_EFFECTS (t));
952 hstate.add_flag (TREE_CONSTANT (t));
953 hstate.add_flag (TREE_READONLY (t));
954 hstate.add_flag (TREE_PUBLIC (t));
955 }
956 hstate.add_flag (TREE_ADDRESSABLE (t));
957 hstate.add_flag (TREE_THIS_VOLATILE (t));
958 if (DECL_P (t))
959 hstate.add_flag (DECL_UNSIGNED (t));
960 else if (TYPE_P (t))
961 hstate.add_flag (TYPE_UNSIGNED (t));
962 if (TYPE_P (t))
963 hstate.add_flag (TYPE_ARTIFICIAL (t));
964 else
965 hstate.add_flag (TREE_NO_WARNING (t));
966 hstate.add_flag (TREE_NOTHROW (t));
967 hstate.add_flag (TREE_STATIC (t));
968 hstate.add_flag (TREE_PROTECTED (t));
969 hstate.add_flag (TREE_DEPRECATED (t));
970 if (code != TREE_BINFO)
971 hstate.add_flag (TREE_PRIVATE (t));
972 if (TYPE_P (t))
973 {
974 hstate.add_flag (TYPE_SATURATING (t));
975 hstate.add_flag (TYPE_ADDR_SPACE (t));
976 }
977 else if (code == SSA_NAME)
978 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
979 hstate.commit_flag ();
980
981 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
982 {
983 int i;
984 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
985 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
986 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
987 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
988 }
989
990 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
991 {
992 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
993 hstate.add_flag (r.cl);
994 hstate.add_flag (r.sign);
995 hstate.add_flag (r.signalling);
996 hstate.add_flag (r.canonical);
997 hstate.commit_flag ();
998 hstate.add_int (r.uexp);
999 hstate.add (r.sig, sizeof (r.sig));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1003 {
1004 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1005 hstate.add_int (f.mode);
1006 hstate.add_int (f.data.low);
1007 hstate.add_int (f.data.high);
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1011 {
1012 hstate.add_wide_int (DECL_MODE (t));
1013 hstate.add_flag (DECL_NONLOCAL (t));
1014 hstate.add_flag (DECL_VIRTUAL_P (t));
1015 hstate.add_flag (DECL_IGNORED_P (t));
1016 hstate.add_flag (DECL_ABSTRACT_P (t));
1017 hstate.add_flag (DECL_ARTIFICIAL (t));
1018 hstate.add_flag (DECL_USER_ALIGN (t));
1019 hstate.add_flag (DECL_PRESERVE_P (t));
1020 hstate.add_flag (DECL_EXTERNAL (t));
1021 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1022 hstate.commit_flag ();
1023 hstate.add_int (DECL_ALIGN (t));
1024 if (code == LABEL_DECL)
1025 {
1026 hstate.add_int (EH_LANDING_PAD_NR (t));
1027 hstate.add_int (LABEL_DECL_UID (t));
1028 }
1029 else if (code == FIELD_DECL)
1030 {
1031 hstate.add_flag (DECL_PACKED (t));
1032 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1033 hstate.add_int (DECL_OFFSET_ALIGN (t));
1034 }
1035 else if (code == VAR_DECL)
1036 {
1037 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1038 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1039 }
1040 if (code == RESULT_DECL
1041 || code == PARM_DECL
1042 || code == VAR_DECL)
1043 {
1044 hstate.add_flag (DECL_BY_REFERENCE (t));
1045 if (code == VAR_DECL
1046 || code == PARM_DECL)
1047 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1048 }
1049 hstate.commit_flag ();
1050 }
1051
1052 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1053 hstate.add_int (DECL_REGISTER (t));
1054
1055 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1056 {
1057 hstate.add_flag (DECL_COMMON (t));
1058 hstate.add_flag (DECL_DLLIMPORT_P (t));
1059 hstate.add_flag (DECL_WEAK (t));
1060 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1061 hstate.add_flag (DECL_COMDAT (t));
1062 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1063 hstate.add_int (DECL_VISIBILITY (t));
1064 if (code == VAR_DECL)
1065 {
1066 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1067 hstate.add_flag (DECL_HARD_REGISTER (t));
1068 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1069 }
1070 if (TREE_CODE (t) == FUNCTION_DECL)
1071 {
1072 hstate.add_flag (DECL_FINAL_P (t));
1073 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1074 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1075 }
1076 hstate.commit_flag ();
1077 }
1078
1079 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1080 {
1081 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1082 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1083 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1084 hstate.add_flag (DECL_UNINLINABLE (t));
1085 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1086 hstate.add_flag (DECL_IS_NOVOPS (t));
1087 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1088 hstate.add_flag (DECL_IS_MALLOC (t));
1089 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1090 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1091 hstate.add_flag (DECL_STATIC_CHAIN (t));
1092 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1093 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1094 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1095 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1096 hstate.add_flag (DECL_PURE_P (t));
1097 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1098 hstate.commit_flag ();
1099 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1100 hstate.add_int (DECL_FUNCTION_CODE (t));
1101 }
1102
1103 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1104 {
1105 hstate.add_wide_int (TYPE_MODE (t));
1106 hstate.add_flag (TYPE_STRING_FLAG (t));
1107 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1108 no streaming. */
1109 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1110 hstate.add_flag (TYPE_PACKED (t));
1111 hstate.add_flag (TYPE_RESTRICT (t));
1112 hstate.add_flag (TYPE_USER_ALIGN (t));
1113 hstate.add_flag (TYPE_READONLY (t));
1114 if (RECORD_OR_UNION_TYPE_P (t))
1115 {
1116 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1117 hstate.add_flag (TYPE_FINAL_P (t));
1118 }
1119 else if (code == ARRAY_TYPE)
1120 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1121 hstate.commit_flag ();
1122 hstate.add_int (TYPE_PRECISION (t));
1123 hstate.add_int (TYPE_ALIGN (t));
1124 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1125 || (!in_lto_p
1126 && get_alias_set (t) == 0))
1127 ? 0 : -1);
1128 }
1129
1130 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1131 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1132 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1133
1134 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1135 /* We don't stream these when passing things to a different target. */
1136 && !lto_stream_offload_p)
1137 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1138
1139 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1140 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1141
1142 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1143 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1144
1145 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1146 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1147
1148 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1149 {
1150 if (code != IDENTIFIER_NODE)
1151 visit (TREE_TYPE (t));
1152 }
1153
1154 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1155 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1156 visit (VECTOR_CST_ELT (t, i));
1157
1158 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1159 {
1160 visit (TREE_REALPART (t));
1161 visit (TREE_IMAGPART (t));
1162 }
1163
1164 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1165 {
1166 /* Drop names that were created for anonymous entities. */
1167 if (DECL_NAME (t)
1168 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1169 && anon_aggrname_p (DECL_NAME (t)))
1170 ;
1171 else
1172 visit (DECL_NAME (t));
1173 if (DECL_FILE_SCOPE_P (t))
1174 ;
1175 else
1176 visit (DECL_CONTEXT (t));
1177 }
1178
1179 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1180 {
1181 visit (DECL_SIZE (t));
1182 visit (DECL_SIZE_UNIT (t));
1183 visit (DECL_ATTRIBUTES (t));
1184 if ((code == VAR_DECL
1185 || code == PARM_DECL)
1186 && DECL_HAS_VALUE_EXPR_P (t))
1187 visit (DECL_VALUE_EXPR (t));
1188 if (code == VAR_DECL
1189 && DECL_HAS_DEBUG_EXPR_P (t))
1190 visit (DECL_DEBUG_EXPR (t));
1191 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1192 be able to call get_symbol_initial_value. */
1193 }
1194
1195 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1196 {
1197 if (code == TYPE_DECL)
1198 visit (DECL_ORIGINAL_TYPE (t));
1199 }
1200
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1202 {
1203 if (DECL_ASSEMBLER_NAME_SET_P (t))
1204 visit (DECL_ASSEMBLER_NAME (t));
1205 }
1206
1207 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1208 {
1209 visit (DECL_FIELD_OFFSET (t));
1210 visit (DECL_BIT_FIELD_TYPE (t));
1211 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1212 visit (DECL_FIELD_BIT_OFFSET (t));
1213 visit (DECL_FCONTEXT (t));
1214 }
1215
1216 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1217 {
1218 visit (DECL_VINDEX (t));
1219 visit (DECL_FUNCTION_PERSONALITY (t));
1220 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1221 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1222 }
1223
1224 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1225 {
1226 visit (TYPE_SIZE (t));
1227 visit (TYPE_SIZE_UNIT (t));
1228 visit (TYPE_ATTRIBUTES (t));
1229 visit (TYPE_NAME (t));
1230 visit (TYPE_MAIN_VARIANT (t));
1231 if (TYPE_FILE_SCOPE_P (t))
1232 ;
1233 else
1234 visit (TYPE_CONTEXT (t));
1235 visit (TYPE_STUB_DECL (t));
1236 }
1237
1238 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1239 {
1240 if (code == ENUMERAL_TYPE)
1241 visit (TYPE_VALUES (t));
1242 else if (code == ARRAY_TYPE)
1243 visit (TYPE_DOMAIN (t));
1244 else if (RECORD_OR_UNION_TYPE_P (t))
1245 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1246 visit (f);
1247 else if (code == FUNCTION_TYPE
1248 || code == METHOD_TYPE)
1249 visit (TYPE_ARG_TYPES (t));
1250 if (!POINTER_TYPE_P (t))
1251 visit (TYPE_MINVAL (t));
1252 visit (TYPE_MAXVAL (t));
1253 if (RECORD_OR_UNION_TYPE_P (t))
1254 visit (TYPE_BINFO (t));
1255 }
1256
1257 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1258 {
1259 visit (TREE_PURPOSE (t));
1260 visit (TREE_VALUE (t));
1261 visit (TREE_CHAIN (t));
1262 }
1263
1264 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1265 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1266 visit (TREE_VEC_ELT (t, i));
1267
1268 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1269 {
1270 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1271 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1272 visit (TREE_OPERAND (t, i));
1273 }
1274
1275 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1276 {
1277 unsigned i;
1278 tree b;
1279 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1280 visit (b);
1281 visit (BINFO_OFFSET (t));
1282 visit (BINFO_VTABLE (t));
1283 visit (BINFO_VPTR_FIELD (t));
1284 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1285 visit (b);
1286 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1287 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1288 }
1289
1290 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1291 {
1292 unsigned i;
1293 tree index, value;
1294 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1295 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1296 {
1297 visit (index);
1298 visit (value);
1299 }
1300 }
1301
1302 if (code == OMP_CLAUSE)
1303 {
1304 int i;
1305 HOST_WIDE_INT val;
1306
1307 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1308 switch (OMP_CLAUSE_CODE (t))
1309 {
1310 case OMP_CLAUSE_DEFAULT:
1311 val = OMP_CLAUSE_DEFAULT_KIND (t);
1312 break;
1313 case OMP_CLAUSE_SCHEDULE:
1314 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1315 break;
1316 case OMP_CLAUSE_DEPEND:
1317 val = OMP_CLAUSE_DEPEND_KIND (t);
1318 break;
1319 case OMP_CLAUSE_MAP:
1320 val = OMP_CLAUSE_MAP_KIND (t);
1321 break;
1322 case OMP_CLAUSE_PROC_BIND:
1323 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1324 break;
1325 case OMP_CLAUSE_REDUCTION:
1326 val = OMP_CLAUSE_REDUCTION_CODE (t);
1327 break;
1328 default:
1329 val = 0;
1330 break;
1331 }
1332 hstate.add_wide_int (val);
1333 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1334 visit (OMP_CLAUSE_OPERAND (t, i));
1335 visit (OMP_CLAUSE_CHAIN (t));
1336 }
1337
1338 return hstate.end ();
1339
1340 #undef visit
1341 }
1342
1343 /* Compare two SCC entries by their hash value for qsorting them. */
1344
1345 int
1346 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1347 {
1348 const scc_entry *p1 = (const scc_entry *) p1_;
1349 const scc_entry *p2 = (const scc_entry *) p2_;
1350 if (p1->hash < p2->hash)
1351 return -1;
1352 else if (p1->hash > p2->hash)
1353 return 1;
1354 return 0;
1355 }
1356
1357 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1358 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1359
1360 hashval_t
1361 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1362 bool ref_p, bool this_ref_p)
1363 {
1364 unsigned int last_classes = 0, iterations = 0;
1365
1366 /* Compute hash values for the SCC members. */
1367 for (unsigned i = 0; i < size; ++i)
1368 sccstack[first+i].hash
1369 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1370
1371 if (size == 1)
1372 return sccstack[first].hash;
1373
1374 /* We aim to get unique hash for every tree within SCC and compute hash value
1375 of the whole SCC by combining all values together in a stable (entry-point
1376 independent) order. This guarantees that the same SCC regions within
1377 different translation units will get the same hash values and therefore
1378 will be merged at WPA time.
1379
1380 Often the hashes are already unique. In that case we compute the SCC hash
1381 by combining individual hash values in an increasing order.
1382
1383 If there are duplicates, we seek at least one tree with unique hash (and
1384 pick one with minimal hash and this property). Then we obtain a stable
1385 order by DFS walk starting from this unique tree and then use the index
1386 within this order to make individual hash values unique.
1387
1388 If there is no tree with unique hash, we iteratively propagate the hash
1389 values across the internal edges of SCC. This usually quickly leads
1390 to unique hashes. Consider, for example, an SCC containing two pointers
1391 that are identical except for the types they point to and assume that
1392 these types are also part of the SCC. The propagation will add the
1393 points-to type information into their hash values. */
1394 do
1395 {
1396 /* Sort the SCC so we can easily check for uniqueness. */
1397 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1398
1399 unsigned int classes = 1;
1400 int firstunique = -1;
1401
1402 /* Find the tree with lowest unique hash (if it exists) and compute
1403 the number of equivalence classes. */
1404 if (sccstack[first].hash != sccstack[first+1].hash)
1405 firstunique = 0;
1406 for (unsigned i = 1; i < size; ++i)
1407 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1408 {
1409 classes++;
1410 if (firstunique == -1
1411 && (i == size - 1
1412 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1413 firstunique = i;
1414 }
1415
1416 /* If we found a tree with unique hash, stop the iteration. */
1417 if (firstunique != -1
1418 /* Also terminate if we run out of iterations or if the number of
1419 equivalence classes is no longer increasing.
1420 For example a cyclic list of trees that are all equivalent will
1421 never have unique entry point; we however do not build such SCCs
1422 in our IL. */
1423 || classes <= last_classes || iterations > 16)
1424 {
1425 hashval_t scc_hash;
1426
1427 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1428 starting from FIRSTUNIQUE to obtain a stable order. */
1429 if (classes != size && firstunique != -1)
1430 {
1431 hash_map <tree, hashval_t> map(size*2);
1432
1433 /* Store hash values into a map, so we can associate them with
1434 the reordered SCC. */
1435 for (unsigned i = 0; i < size; ++i)
1436 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1437
1438 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1439 true);
1440 gcc_assert (again.sccstack.length () == size);
1441
1442 memcpy (sccstack.address () + first,
1443 again.sccstack.address (),
1444 sizeof (scc_entry) * size);
1445
1446 /* Update hash values of individual members by hashing in the
1447 index within the stable order. This ensures uniqueness.
1448 Also compute the SCC hash by mixing in all hash values in
1449 the stable order we obtained. */
1450 sccstack[first].hash = *map.get (sccstack[first].t);
1451 scc_hash = sccstack[first].hash;
1452 for (unsigned i = 1; i < size; ++i)
1453 {
1454 sccstack[first+i].hash
1455 = iterative_hash_hashval_t (i,
1456 *map.get (sccstack[first+i].t));
1457 scc_hash
1458 = iterative_hash_hashval_t (scc_hash,
1459 sccstack[first+i].hash);
1460 }
1461 }
1462 /* If we got a unique hash value for each tree, then sort already
1463 ensured entry-point independent order. Only compute the final
1464 SCC hash.
1465
1466 If we failed to find the unique entry point, we go by the same
1467 route. We will eventually introduce unwanted hash conflicts. */
1468 else
1469 {
1470 scc_hash = sccstack[first].hash;
1471 for (unsigned i = 1; i < size; ++i)
1472 scc_hash
1473 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1474
1475 /* We cannot 100% guarantee that the hash won't conflict so as
1476 to make it impossible to find a unique hash. This however
1477 should be an extremely rare case. ICE for now so possible
1478 issues are found and evaluated. */
1479 gcc_checking_assert (classes == size);
1480 }
1481
1482 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1483 hash into the hash of each element. */
1484 for (unsigned i = 0; i < size; ++i)
1485 sccstack[first+i].hash
1486 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1487 return scc_hash;
1488 }
1489
1490 last_classes = classes;
1491 iterations++;
1492
1493 /* We failed to identify the entry point; propagate hash values across
1494 the edges. */
1495 hash_map <tree, hashval_t> map(size*2);
1496
1497 for (unsigned i = 0; i < size; ++i)
1498 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1499
1500 for (unsigned i = 0; i < size; i++)
1501 sccstack[first+i].hash
1502 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1503 }
1504 while (true);
1505 }
1506
1507 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1508 already in the streamer cache. Main routine called for
1509 each visit of EXPR. */
1510
1511 void
1512 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1513 tree expr, bool ref_p, bool this_ref_p)
1514 {
1515 /* Handle special cases. */
1516 if (expr == NULL_TREE)
1517 return;
1518
1519 /* Do not DFS walk into indexable trees. */
1520 if (this_ref_p && tree_is_indexable (expr))
1521 return;
1522
1523 /* Check if we already streamed EXPR. */
1524 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1525 return;
1526
1527 worklist w;
1528 w.expr = expr;
1529 w.from_state = from_state;
1530 w.cstate = NULL;
1531 w.ref_p = ref_p;
1532 w.this_ref_p = this_ref_p;
1533 worklist_vec.safe_push (w);
1534 }
1535
1536
1537 /* Emit the physical representation of tree node EXPR to output block OB.
1538 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1539 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1540
1541 void
1542 lto_output_tree (struct output_block *ob, tree expr,
1543 bool ref_p, bool this_ref_p)
1544 {
1545 unsigned ix;
1546 bool existed_p;
1547
1548 if (expr == NULL_TREE)
1549 {
1550 streamer_write_record_start (ob, LTO_null);
1551 return;
1552 }
1553
1554 if (this_ref_p && tree_is_indexable (expr))
1555 {
1556 lto_output_tree_ref (ob, expr);
1557 return;
1558 }
1559
1560 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1561 if (existed_p)
1562 {
1563 /* If a node has already been streamed out, make sure that
1564 we don't write it more than once. Otherwise, the reader
1565 will instantiate two different nodes for the same object. */
1566 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1567 streamer_write_uhwi (ob, ix);
1568 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1569 lto_tree_code_to_tag (TREE_CODE (expr)));
1570 lto_stats.num_pickle_refs_output++;
1571 }
1572 else
1573 {
1574 /* This is the first time we see EXPR, write all reachable
1575 trees to OB. */
1576 static bool in_dfs_walk;
1577
1578 /* Protect against recursion which means disconnect between
1579 what tree edges we walk in the DFS walk and what edges
1580 we stream out. */
1581 gcc_assert (!in_dfs_walk);
1582
1583 /* Start the DFS walk. */
1584 /* Save ob state ... */
1585 /* let's see ... */
1586 in_dfs_walk = true;
1587 DFS (ob, expr, ref_p, this_ref_p, false);
1588 in_dfs_walk = false;
1589
1590 /* Finally append a reference to the tree we were writing.
1591 ??? If expr ended up as a singleton we could have
1592 inlined it here and avoid outputting a reference. */
1593 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1594 gcc_assert (existed_p);
1595 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1596 streamer_write_uhwi (ob, ix);
1597 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1598 lto_tree_code_to_tag (TREE_CODE (expr)));
1599 lto_stats.num_pickle_refs_output++;
1600 }
1601 }
1602
1603
1604 /* Output to OB a list of try/catch handlers starting with FIRST. */
1605
1606 static void
1607 output_eh_try_list (struct output_block *ob, eh_catch first)
1608 {
1609 eh_catch n;
1610
1611 for (n = first; n; n = n->next_catch)
1612 {
1613 streamer_write_record_start (ob, LTO_eh_catch);
1614 stream_write_tree (ob, n->type_list, true);
1615 stream_write_tree (ob, n->filter_list, true);
1616 stream_write_tree (ob, n->label, true);
1617 }
1618
1619 streamer_write_record_start (ob, LTO_null);
1620 }
1621
1622
1623 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1624 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1625 detect EH region sharing. */
1626
1627 static void
1628 output_eh_region (struct output_block *ob, eh_region r)
1629 {
1630 enum LTO_tags tag;
1631
1632 if (r == NULL)
1633 {
1634 streamer_write_record_start (ob, LTO_null);
1635 return;
1636 }
1637
1638 if (r->type == ERT_CLEANUP)
1639 tag = LTO_ert_cleanup;
1640 else if (r->type == ERT_TRY)
1641 tag = LTO_ert_try;
1642 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1643 tag = LTO_ert_allowed_exceptions;
1644 else if (r->type == ERT_MUST_NOT_THROW)
1645 tag = LTO_ert_must_not_throw;
1646 else
1647 gcc_unreachable ();
1648
1649 streamer_write_record_start (ob, tag);
1650 streamer_write_hwi (ob, r->index);
1651
1652 if (r->outer)
1653 streamer_write_hwi (ob, r->outer->index);
1654 else
1655 streamer_write_zero (ob);
1656
1657 if (r->inner)
1658 streamer_write_hwi (ob, r->inner->index);
1659 else
1660 streamer_write_zero (ob);
1661
1662 if (r->next_peer)
1663 streamer_write_hwi (ob, r->next_peer->index);
1664 else
1665 streamer_write_zero (ob);
1666
1667 if (r->type == ERT_TRY)
1668 {
1669 output_eh_try_list (ob, r->u.eh_try.first_catch);
1670 }
1671 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1672 {
1673 stream_write_tree (ob, r->u.allowed.type_list, true);
1674 stream_write_tree (ob, r->u.allowed.label, true);
1675 streamer_write_uhwi (ob, r->u.allowed.filter);
1676 }
1677 else if (r->type == ERT_MUST_NOT_THROW)
1678 {
1679 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1680 bitpack_d bp = bitpack_create (ob->main_stream);
1681 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1682 streamer_write_bitpack (&bp);
1683 }
1684
1685 if (r->landing_pads)
1686 streamer_write_hwi (ob, r->landing_pads->index);
1687 else
1688 streamer_write_zero (ob);
1689 }
1690
1691
1692 /* Output landing pad LP to OB. */
1693
1694 static void
1695 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1696 {
1697 if (lp == NULL)
1698 {
1699 streamer_write_record_start (ob, LTO_null);
1700 return;
1701 }
1702
1703 streamer_write_record_start (ob, LTO_eh_landing_pad);
1704 streamer_write_hwi (ob, lp->index);
1705 if (lp->next_lp)
1706 streamer_write_hwi (ob, lp->next_lp->index);
1707 else
1708 streamer_write_zero (ob);
1709
1710 if (lp->region)
1711 streamer_write_hwi (ob, lp->region->index);
1712 else
1713 streamer_write_zero (ob);
1714
1715 stream_write_tree (ob, lp->post_landing_pad, true);
1716 }
1717
1718
1719 /* Output the existing eh_table to OB. */
1720
1721 static void
1722 output_eh_regions (struct output_block *ob, struct function *fn)
1723 {
1724 if (fn->eh && fn->eh->region_tree)
1725 {
1726 unsigned i;
1727 eh_region eh;
1728 eh_landing_pad lp;
1729 tree ttype;
1730
1731 streamer_write_record_start (ob, LTO_eh_table);
1732
1733 /* Emit the index of the root of the EH region tree. */
1734 streamer_write_hwi (ob, fn->eh->region_tree->index);
1735
1736 /* Emit all the EH regions in the region array. */
1737 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1738 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1739 output_eh_region (ob, eh);
1740
1741 /* Emit all landing pads. */
1742 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1743 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1744 output_eh_lp (ob, lp);
1745
1746 /* Emit all the runtime type data. */
1747 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1748 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1749 stream_write_tree (ob, ttype, true);
1750
1751 /* Emit the table of action chains. */
1752 if (targetm.arm_eabi_unwinder)
1753 {
1754 tree t;
1755 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1756 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1757 stream_write_tree (ob, t, true);
1758 }
1759 else
1760 {
1761 uchar c;
1762 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1763 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1764 streamer_write_char_stream (ob->main_stream, c);
1765 }
1766 }
1767
1768 /* The LTO_null either terminates the record or indicates that there
1769 are no eh_records at all. */
1770 streamer_write_record_start (ob, LTO_null);
1771 }
1772
1773
1774 /* Output all of the active ssa names to the ssa_names stream. */
1775
1776 static void
1777 output_ssa_names (struct output_block *ob, struct function *fn)
1778 {
1779 unsigned int i, len;
1780
1781 len = vec_safe_length (SSANAMES (fn));
1782 streamer_write_uhwi (ob, len);
1783
1784 for (i = 1; i < len; i++)
1785 {
1786 tree ptr = (*SSANAMES (fn))[i];
1787
1788 if (ptr == NULL_TREE
1789 || SSA_NAME_IN_FREE_LIST (ptr)
1790 || virtual_operand_p (ptr))
1791 continue;
1792
1793 streamer_write_uhwi (ob, i);
1794 streamer_write_char_stream (ob->main_stream,
1795 SSA_NAME_IS_DEFAULT_DEF (ptr));
1796 if (SSA_NAME_VAR (ptr))
1797 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1798 else
1799 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1800 stream_write_tree (ob, TREE_TYPE (ptr), true);
1801 }
1802
1803 streamer_write_zero (ob);
1804 }
1805
1806
1807 /* Output a wide-int. */
1808
1809 static void
1810 streamer_write_wi (struct output_block *ob,
1811 const widest_int &w)
1812 {
1813 int len = w.get_len ();
1814
1815 streamer_write_uhwi (ob, w.get_precision ());
1816 streamer_write_uhwi (ob, len);
1817 for (int i = 0; i < len; i++)
1818 streamer_write_hwi (ob, w.elt (i));
1819 }
1820
1821
1822 /* Output the cfg. */
1823
1824 static void
1825 output_cfg (struct output_block *ob, struct function *fn)
1826 {
1827 struct lto_output_stream *tmp_stream = ob->main_stream;
1828 basic_block bb;
1829
1830 ob->main_stream = ob->cfg_stream;
1831
1832 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1833 profile_status_for_fn (fn));
1834
1835 /* Output the number of the highest basic block. */
1836 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1837
1838 FOR_ALL_BB_FN (bb, fn)
1839 {
1840 edge_iterator ei;
1841 edge e;
1842
1843 streamer_write_hwi (ob, bb->index);
1844
1845 /* Output the successors and the edge flags. */
1846 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1847 FOR_EACH_EDGE (e, ei, bb->succs)
1848 {
1849 streamer_write_uhwi (ob, e->dest->index);
1850 streamer_write_hwi (ob, e->probability);
1851 streamer_write_gcov_count (ob, e->count);
1852 streamer_write_uhwi (ob, e->flags);
1853 }
1854 }
1855
1856 streamer_write_hwi (ob, -1);
1857
1858 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1859 while (bb->next_bb)
1860 {
1861 streamer_write_hwi (ob, bb->next_bb->index);
1862 bb = bb->next_bb;
1863 }
1864
1865 streamer_write_hwi (ob, -1);
1866
1867 /* ??? The cfgloop interface is tied to cfun. */
1868 gcc_assert (cfun == fn);
1869
1870 /* Output the number of loops. */
1871 streamer_write_uhwi (ob, number_of_loops (fn));
1872
1873 /* Output each loop, skipping the tree root which has number zero. */
1874 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1875 {
1876 struct loop *loop = get_loop (fn, i);
1877
1878 /* Write the index of the loop header. That's enough to rebuild
1879 the loop tree on the reader side. Stream -1 for an unused
1880 loop entry. */
1881 if (!loop)
1882 {
1883 streamer_write_hwi (ob, -1);
1884 continue;
1885 }
1886 else
1887 streamer_write_hwi (ob, loop->header->index);
1888
1889 /* Write everything copy_loop_info copies. */
1890 streamer_write_enum (ob->main_stream,
1891 loop_estimation, EST_LAST, loop->estimate_state);
1892 streamer_write_hwi (ob, loop->any_upper_bound);
1893 if (loop->any_upper_bound)
1894 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1895 streamer_write_hwi (ob, loop->any_estimate);
1896 if (loop->any_estimate)
1897 streamer_write_wi (ob, loop->nb_iterations_estimate);
1898
1899 /* Write OMP SIMD related info. */
1900 streamer_write_hwi (ob, loop->safelen);
1901 streamer_write_hwi (ob, loop->dont_vectorize);
1902 streamer_write_hwi (ob, loop->force_vectorize);
1903 stream_write_tree (ob, loop->simduid, true);
1904 }
1905
1906 ob->main_stream = tmp_stream;
1907 }
1908
1909
1910 /* Create the header in the file using OB. If the section type is for
1911 a function, set FN to the decl for that function. */
1912
1913 void
1914 produce_asm (struct output_block *ob, tree fn)
1915 {
1916 enum lto_section_type section_type = ob->section_type;
1917 struct lto_function_header header;
1918 char *section_name;
1919
1920 if (section_type == LTO_section_function_body)
1921 {
1922 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1923 section_name = lto_get_section_name (section_type, name, NULL);
1924 }
1925 else
1926 section_name = lto_get_section_name (section_type, NULL, NULL);
1927
1928 lto_begin_section (section_name, !flag_wpa);
1929 free (section_name);
1930
1931 /* The entire header is stream computed here. */
1932 memset (&header, 0, sizeof (struct lto_function_header));
1933
1934 /* Write the header. */
1935 header.major_version = LTO_major_version;
1936 header.minor_version = LTO_minor_version;
1937
1938 if (section_type == LTO_section_function_body)
1939 header.cfg_size = ob->cfg_stream->total_size;
1940 header.main_size = ob->main_stream->total_size;
1941 header.string_size = ob->string_stream->total_size;
1942 lto_write_data (&header, sizeof header);
1943
1944 /* Put all of the gimple and the string table out the asm file as a
1945 block of text. */
1946 if (section_type == LTO_section_function_body)
1947 lto_write_stream (ob->cfg_stream);
1948 lto_write_stream (ob->main_stream);
1949 lto_write_stream (ob->string_stream);
1950
1951 lto_end_section ();
1952 }
1953
1954
1955 /* Output the base body of struct function FN using output block OB. */
1956
1957 static void
1958 output_struct_function_base (struct output_block *ob, struct function *fn)
1959 {
1960 struct bitpack_d bp;
1961 unsigned i;
1962 tree t;
1963
1964 /* Output the static chain and non-local goto save area. */
1965 stream_write_tree (ob, fn->static_chain_decl, true);
1966 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1967
1968 /* Output all the local variables in the function. */
1969 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1970 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1971 stream_write_tree (ob, t, true);
1972
1973 /* Output current IL state of the function. */
1974 streamer_write_uhwi (ob, fn->curr_properties);
1975
1976 /* Write all the attributes for FN. */
1977 bp = bitpack_create (ob->main_stream);
1978 bp_pack_value (&bp, fn->is_thunk, 1);
1979 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1980 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1981 bp_pack_value (&bp, fn->returns_struct, 1);
1982 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1983 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1984 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1985 bp_pack_value (&bp, fn->after_inlining, 1);
1986 bp_pack_value (&bp, fn->stdarg, 1);
1987 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1988 bp_pack_value (&bp, fn->calls_alloca, 1);
1989 bp_pack_value (&bp, fn->calls_setjmp, 1);
1990 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1991 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1992 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1993 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1994 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
1995
1996 /* Output the function start and end loci. */
1997 stream_output_location (ob, &bp, fn->function_start_locus);
1998 stream_output_location (ob, &bp, fn->function_end_locus);
1999
2000 streamer_write_bitpack (&bp);
2001 }
2002
2003
2004 /* Output the body of function NODE->DECL. */
2005
2006 static void
2007 output_function (struct cgraph_node *node)
2008 {
2009 tree function;
2010 struct function *fn;
2011 basic_block bb;
2012 struct output_block *ob;
2013
2014 function = node->decl;
2015 fn = DECL_STRUCT_FUNCTION (function);
2016 ob = create_output_block (LTO_section_function_body);
2017
2018 clear_line_info (ob);
2019 ob->symbol = node;
2020
2021 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2022
2023 /* Set current_function_decl and cfun. */
2024 push_cfun (fn);
2025
2026 /* Make string 0 be a NULL string. */
2027 streamer_write_char_stream (ob->string_stream, 0);
2028
2029 streamer_write_record_start (ob, LTO_function);
2030
2031 /* Output decls for parameters and args. */
2032 stream_write_tree (ob, DECL_RESULT (function), true);
2033 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2034
2035 /* Output DECL_INITIAL for the function, which contains the tree of
2036 lexical scopes. */
2037 stream_write_tree (ob, DECL_INITIAL (function), true);
2038
2039 /* We also stream abstract functions where we stream only stuff needed for
2040 debug info. */
2041 if (gimple_has_body_p (function))
2042 {
2043 streamer_write_uhwi (ob, 1);
2044 output_struct_function_base (ob, fn);
2045
2046 /* Output all the SSA names used in the function. */
2047 output_ssa_names (ob, fn);
2048
2049 /* Output any exception handling regions. */
2050 output_eh_regions (ob, fn);
2051
2052
2053 /* We will renumber the statements. The code that does this uses
2054 the same ordering that we use for serializing them so we can use
2055 the same code on the other end and not have to write out the
2056 statement numbers. We do not assign UIDs to PHIs here because
2057 virtual PHIs get re-computed on-the-fly which would make numbers
2058 inconsistent. */
2059 set_gimple_stmt_max_uid (cfun, 0);
2060 FOR_ALL_BB_FN (bb, cfun)
2061 {
2062 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2063 gsi_next (&gsi))
2064 {
2065 gphi *stmt = gsi.phi ();
2066
2067 /* Virtual PHIs are not going to be streamed. */
2068 if (!virtual_operand_p (gimple_phi_result (stmt)))
2069 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2070 }
2071 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2072 gsi_next (&gsi))
2073 {
2074 gimple *stmt = gsi_stmt (gsi);
2075 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2076 }
2077 }
2078 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2079 virtual phis now. */
2080 FOR_ALL_BB_FN (bb, cfun)
2081 {
2082 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2083 gsi_next (&gsi))
2084 {
2085 gphi *stmt = gsi.phi ();
2086 if (virtual_operand_p (gimple_phi_result (stmt)))
2087 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2088 }
2089 }
2090
2091 /* Output the code for the function. */
2092 FOR_ALL_BB_FN (bb, fn)
2093 output_bb (ob, bb, fn);
2094
2095 /* The terminator for this function. */
2096 streamer_write_record_start (ob, LTO_null);
2097
2098 output_cfg (ob, fn);
2099
2100 pop_cfun ();
2101 }
2102 else
2103 streamer_write_uhwi (ob, 0);
2104
2105 /* Create a section to hold the pickled output of this function. */
2106 produce_asm (ob, function);
2107
2108 destroy_output_block (ob);
2109 }
2110
2111 /* Output the body of function NODE->DECL. */
2112
2113 static void
2114 output_constructor (struct varpool_node *node)
2115 {
2116 tree var = node->decl;
2117 struct output_block *ob;
2118
2119 ob = create_output_block (LTO_section_function_body);
2120
2121 clear_line_info (ob);
2122 ob->symbol = node;
2123
2124 /* Make string 0 be a NULL string. */
2125 streamer_write_char_stream (ob->string_stream, 0);
2126
2127 /* Output DECL_INITIAL for the function, which contains the tree of
2128 lexical scopes. */
2129 stream_write_tree (ob, DECL_INITIAL (var), true);
2130
2131 /* Create a section to hold the pickled output of this function. */
2132 produce_asm (ob, var);
2133
2134 destroy_output_block (ob);
2135 }
2136
2137
2138 /* Emit toplevel asms. */
2139
2140 void
2141 lto_output_toplevel_asms (void)
2142 {
2143 struct output_block *ob;
2144 struct asm_node *can;
2145 char *section_name;
2146 struct lto_simple_header_with_strings header;
2147
2148 if (!symtab->first_asm_symbol ())
2149 return;
2150
2151 ob = create_output_block (LTO_section_asm);
2152
2153 /* Make string 0 be a NULL string. */
2154 streamer_write_char_stream (ob->string_stream, 0);
2155
2156 for (can = symtab->first_asm_symbol (); can; can = can->next)
2157 {
2158 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2159 streamer_write_hwi (ob, can->order);
2160 }
2161
2162 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2163
2164 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2165 lto_begin_section (section_name, !flag_wpa);
2166 free (section_name);
2167
2168 /* The entire header stream is computed here. */
2169 memset (&header, 0, sizeof (header));
2170
2171 /* Write the header. */
2172 header.major_version = LTO_major_version;
2173 header.minor_version = LTO_minor_version;
2174
2175 header.main_size = ob->main_stream->total_size;
2176 header.string_size = ob->string_stream->total_size;
2177 lto_write_data (&header, sizeof header);
2178
2179 /* Put all of the gimple and the string table out the asm file as a
2180 block of text. */
2181 lto_write_stream (ob->main_stream);
2182 lto_write_stream (ob->string_stream);
2183
2184 lto_end_section ();
2185
2186 destroy_output_block (ob);
2187 }
2188
2189
2190 /* Copy the function body or variable constructor of NODE without deserializing. */
2191
2192 static void
2193 copy_function_or_variable (struct symtab_node *node)
2194 {
2195 tree function = node->decl;
2196 struct lto_file_decl_data *file_data = node->lto_file_data;
2197 const char *data;
2198 size_t len;
2199 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2200 char *section_name =
2201 lto_get_section_name (LTO_section_function_body, name, NULL);
2202 size_t i, j;
2203 struct lto_in_decl_state *in_state;
2204 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2205
2206 lto_begin_section (section_name, !flag_wpa);
2207 free (section_name);
2208
2209 /* We may have renamed the declaration, e.g., a static function. */
2210 name = lto_get_decl_name_mapping (file_data, name);
2211
2212 data = lto_get_section_data (file_data, LTO_section_function_body,
2213 name, &len);
2214 gcc_assert (data);
2215
2216 /* Do a bit copy of the function body. */
2217 lto_write_data (data, len);
2218
2219 /* Copy decls. */
2220 in_state =
2221 lto_get_function_in_decl_state (node->lto_file_data, function);
2222 gcc_assert (in_state);
2223
2224 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2225 {
2226 size_t n = vec_safe_length (in_state->streams[i]);
2227 vec<tree, va_gc> *trees = in_state->streams[i];
2228 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2229
2230 /* The out state must have the same indices and the in state.
2231 So just copy the vector. All the encoders in the in state
2232 must be empty where we reach here. */
2233 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2234 encoder->trees.reserve_exact (n);
2235 for (j = 0; j < n; j++)
2236 encoder->trees.safe_push ((*trees)[j]);
2237 }
2238
2239 lto_free_section_data (file_data, LTO_section_function_body, name,
2240 data, len);
2241 lto_end_section ();
2242 }
2243
2244 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2245
2246 static tree
2247 wrap_refs (tree *tp, int *ws, void *)
2248 {
2249 tree t = *tp;
2250 if (handled_component_p (t)
2251 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2252 {
2253 tree decl = TREE_OPERAND (t, 0);
2254 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2255 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2256 build1 (ADDR_EXPR, ptrtype, decl),
2257 build_int_cst (ptrtype, 0));
2258 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2259 *ws = 0;
2260 }
2261 else if (TREE_CODE (t) == CONSTRUCTOR)
2262 ;
2263 else if (!EXPR_P (t))
2264 *ws = 0;
2265 return NULL_TREE;
2266 }
2267
2268 /* Main entry point from the pass manager. */
2269
2270 void
2271 lto_output (void)
2272 {
2273 struct lto_out_decl_state *decl_state;
2274 bitmap output = NULL;
2275 int i, n_nodes;
2276 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2277
2278 if (flag_checking)
2279 output = lto_bitmap_alloc ();
2280
2281 /* Initialize the streamer. */
2282 lto_streamer_init ();
2283
2284 n_nodes = lto_symtab_encoder_size (encoder);
2285 /* Process only the functions with bodies. */
2286 for (i = 0; i < n_nodes; i++)
2287 {
2288 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2289 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2290 {
2291 if (lto_symtab_encoder_encode_body_p (encoder, node)
2292 && !node->alias)
2293 {
2294 if (flag_checking)
2295 {
2296 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2297 bitmap_set_bit (output, DECL_UID (node->decl));
2298 }
2299 decl_state = lto_new_out_decl_state ();
2300 lto_push_out_decl_state (decl_state);
2301 if (gimple_has_body_p (node->decl) || !flag_wpa
2302 /* Thunks have no body but they may be synthetized
2303 at WPA time. */
2304 || DECL_ARGUMENTS (node->decl))
2305 output_function (node);
2306 else
2307 copy_function_or_variable (node);
2308 gcc_assert (lto_get_out_decl_state () == decl_state);
2309 lto_pop_out_decl_state ();
2310 lto_record_function_out_decl_state (node->decl, decl_state);
2311 }
2312 }
2313 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2314 {
2315 /* Wrap symbol references inside the ctor in a type
2316 preserving MEM_REF. */
2317 tree ctor = DECL_INITIAL (node->decl);
2318 if (ctor && !in_lto_p)
2319 walk_tree (&ctor, wrap_refs, NULL, NULL);
2320 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2321 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2322 && !node->alias)
2323 {
2324 timevar_push (TV_IPA_LTO_CTORS_OUT);
2325 if (flag_checking)
2326 {
2327 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2328 bitmap_set_bit (output, DECL_UID (node->decl));
2329 }
2330 decl_state = lto_new_out_decl_state ();
2331 lto_push_out_decl_state (decl_state);
2332 if (DECL_INITIAL (node->decl) != error_mark_node
2333 || !flag_wpa)
2334 output_constructor (node);
2335 else
2336 copy_function_or_variable (node);
2337 gcc_assert (lto_get_out_decl_state () == decl_state);
2338 lto_pop_out_decl_state ();
2339 lto_record_function_out_decl_state (node->decl, decl_state);
2340 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2341 }
2342 }
2343 }
2344
2345 /* Emit the callgraph after emitting function bodies. This needs to
2346 be done now to make sure that all the statements in every function
2347 have been renumbered so that edges can be associated with call
2348 statements using the statement UIDs. */
2349 output_symtab ();
2350
2351 output_offload_tables ();
2352
2353 #if CHECKING_P
2354 lto_bitmap_free (output);
2355 #endif
2356 }
2357
2358 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2359 from it and required for correct representation of its semantics.
2360 Each node in ENCODER must be a global declaration or a type. A node
2361 is written only once, even if it appears multiple times in the
2362 vector. Certain transitively-reachable nodes, such as those
2363 representing expressions, may be duplicated, but such nodes
2364 must not appear in ENCODER itself. */
2365
2366 static void
2367 write_global_stream (struct output_block *ob,
2368 struct lto_tree_ref_encoder *encoder)
2369 {
2370 tree t;
2371 size_t index;
2372 const size_t size = lto_tree_ref_encoder_size (encoder);
2373
2374 for (index = 0; index < size; index++)
2375 {
2376 t = lto_tree_ref_encoder_get_tree (encoder, index);
2377 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2378 stream_write_tree (ob, t, false);
2379 }
2380 }
2381
2382
2383 /* Write a sequence of indices into the globals vector corresponding
2384 to the trees in ENCODER. These are used by the reader to map the
2385 indices used to refer to global entities within function bodies to
2386 their referents. */
2387
2388 static void
2389 write_global_references (struct output_block *ob,
2390 struct lto_tree_ref_encoder *encoder)
2391 {
2392 tree t;
2393 uint32_t index;
2394 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2395
2396 /* Write size and slot indexes as 32-bit unsigned numbers. */
2397 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2398 data[0] = size;
2399
2400 for (index = 0; index < size; index++)
2401 {
2402 uint32_t slot_num;
2403
2404 t = lto_tree_ref_encoder_get_tree (encoder, index);
2405 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2406 gcc_assert (slot_num != (unsigned)-1);
2407 data[index + 1] = slot_num;
2408 }
2409
2410 lto_write_data (data, sizeof (int32_t) * (size + 1));
2411 free (data);
2412 }
2413
2414
2415 /* Write all the streams in an lto_out_decl_state STATE using
2416 output block OB and output stream OUT_STREAM. */
2417
2418 void
2419 lto_output_decl_state_streams (struct output_block *ob,
2420 struct lto_out_decl_state *state)
2421 {
2422 int i;
2423
2424 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2425 write_global_stream (ob, &state->streams[i]);
2426 }
2427
2428
2429 /* Write all the references in an lto_out_decl_state STATE using
2430 output block OB and output stream OUT_STREAM. */
2431
2432 void
2433 lto_output_decl_state_refs (struct output_block *ob,
2434 struct lto_out_decl_state *state)
2435 {
2436 unsigned i;
2437 uint32_t ref;
2438 tree decl;
2439
2440 /* Write reference to FUNCTION_DECL. If there is not function,
2441 write reference to void_type_node. */
2442 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2443 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2444 gcc_assert (ref != (unsigned)-1);
2445 lto_write_data (&ref, sizeof (uint32_t));
2446
2447 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2448 write_global_references (ob, &state->streams[i]);
2449 }
2450
2451
2452 /* Return the written size of STATE. */
2453
2454 static size_t
2455 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2456 {
2457 int i;
2458 size_t size;
2459
2460 size = sizeof (int32_t); /* fn_ref. */
2461 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2462 {
2463 size += sizeof (int32_t); /* vector size. */
2464 size += (lto_tree_ref_encoder_size (&state->streams[i])
2465 * sizeof (int32_t));
2466 }
2467 return size;
2468 }
2469
2470
2471 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2472 so far. */
2473
2474 static void
2475 write_symbol (struct streamer_tree_cache_d *cache,
2476 tree t, hash_set<const char *> *seen, bool alias)
2477 {
2478 const char *name;
2479 enum gcc_plugin_symbol_kind kind;
2480 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2481 unsigned slot_num;
2482 uint64_t size;
2483 const char *comdat;
2484 unsigned char c;
2485
2486 /* None of the following kinds of symbols are needed in the
2487 symbol table. */
2488 if (!TREE_PUBLIC (t)
2489 || is_builtin_fn (t)
2490 || DECL_ABSTRACT_P (t)
2491 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2492 return;
2493 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2494
2495 gcc_assert (TREE_CODE (t) == VAR_DECL
2496 || TREE_CODE (t) == FUNCTION_DECL);
2497
2498 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2499
2500 /* This behaves like assemble_name_raw in varasm.c, performing the
2501 same name manipulations that ASM_OUTPUT_LABELREF does. */
2502 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2503
2504 if (seen->add (name))
2505 return;
2506
2507 streamer_tree_cache_lookup (cache, t, &slot_num);
2508 gcc_assert (slot_num != (unsigned)-1);
2509
2510 if (DECL_EXTERNAL (t))
2511 {
2512 if (DECL_WEAK (t))
2513 kind = GCCPK_WEAKUNDEF;
2514 else
2515 kind = GCCPK_UNDEF;
2516 }
2517 else
2518 {
2519 if (DECL_WEAK (t))
2520 kind = GCCPK_WEAKDEF;
2521 else if (DECL_COMMON (t))
2522 kind = GCCPK_COMMON;
2523 else
2524 kind = GCCPK_DEF;
2525
2526 /* When something is defined, it should have node attached. */
2527 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2528 || varpool_node::get (t)->definition);
2529 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2530 || (cgraph_node::get (t)
2531 && cgraph_node::get (t)->definition));
2532 }
2533
2534 /* Imitate what default_elf_asm_output_external do.
2535 When symbol is external, we need to output it with DEFAULT visibility
2536 when compiling with -fvisibility=default, while with HIDDEN visibility
2537 when symbol has attribute (visibility("hidden")) specified.
2538 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2539 right. */
2540
2541 if (DECL_EXTERNAL (t)
2542 && !targetm.binds_local_p (t))
2543 visibility = GCCPV_DEFAULT;
2544 else
2545 switch (DECL_VISIBILITY (t))
2546 {
2547 case VISIBILITY_DEFAULT:
2548 visibility = GCCPV_DEFAULT;
2549 break;
2550 case VISIBILITY_PROTECTED:
2551 visibility = GCCPV_PROTECTED;
2552 break;
2553 case VISIBILITY_HIDDEN:
2554 visibility = GCCPV_HIDDEN;
2555 break;
2556 case VISIBILITY_INTERNAL:
2557 visibility = GCCPV_INTERNAL;
2558 break;
2559 }
2560
2561 if (kind == GCCPK_COMMON
2562 && DECL_SIZE_UNIT (t)
2563 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2564 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2565 else
2566 size = 0;
2567
2568 if (DECL_ONE_ONLY (t))
2569 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2570 else
2571 comdat = "";
2572
2573 lto_write_data (name, strlen (name) + 1);
2574 lto_write_data (comdat, strlen (comdat) + 1);
2575 c = (unsigned char) kind;
2576 lto_write_data (&c, 1);
2577 c = (unsigned char) visibility;
2578 lto_write_data (&c, 1);
2579 lto_write_data (&size, 8);
2580 lto_write_data (&slot_num, 4);
2581 }
2582
2583 /* Return true if NODE should appear in the plugin symbol table. */
2584
2585 bool
2586 output_symbol_p (symtab_node *node)
2587 {
2588 struct cgraph_node *cnode;
2589 if (!node->real_symbol_p ())
2590 return false;
2591 /* We keep external functions in symtab for sake of inlining
2592 and devirtualization. We do not want to see them in symbol table as
2593 references unless they are really used. */
2594 cnode = dyn_cast <cgraph_node *> (node);
2595 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2596 && cnode->callers)
2597 return true;
2598
2599 /* Ignore all references from external vars initializers - they are not really
2600 part of the compilation unit until they are used by folding. Some symbols,
2601 like references to external construction vtables can not be referred to at all.
2602 We decide this at can_refer_decl_in_current_unit_p. */
2603 if (!node->definition || DECL_EXTERNAL (node->decl))
2604 {
2605 int i;
2606 struct ipa_ref *ref;
2607 for (i = 0; node->iterate_referring (i, ref); i++)
2608 {
2609 if (ref->use == IPA_REF_ALIAS)
2610 continue;
2611 if (is_a <cgraph_node *> (ref->referring))
2612 return true;
2613 if (!DECL_EXTERNAL (ref->referring->decl))
2614 return true;
2615 }
2616 return false;
2617 }
2618 return true;
2619 }
2620
2621
2622 /* Write an IL symbol table to OB.
2623 SET and VSET are cgraph/varpool node sets we are outputting. */
2624
2625 static void
2626 produce_symtab (struct output_block *ob)
2627 {
2628 struct streamer_tree_cache_d *cache = ob->writer_cache;
2629 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2630 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2631 lto_symtab_encoder_iterator lsei;
2632
2633 lto_begin_section (section_name, false);
2634 free (section_name);
2635
2636 hash_set<const char *> seen;
2637
2638 /* Write the symbol table.
2639 First write everything defined and then all declarations.
2640 This is necessary to handle cases where we have duplicated symbols. */
2641 for (lsei = lsei_start (encoder);
2642 !lsei_end_p (lsei); lsei_next (&lsei))
2643 {
2644 symtab_node *node = lsei_node (lsei);
2645
2646 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2647 continue;
2648 write_symbol (cache, node->decl, &seen, false);
2649 }
2650 for (lsei = lsei_start (encoder);
2651 !lsei_end_p (lsei); lsei_next (&lsei))
2652 {
2653 symtab_node *node = lsei_node (lsei);
2654
2655 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2656 continue;
2657 write_symbol (cache, node->decl, &seen, false);
2658 }
2659
2660 lto_end_section ();
2661 }
2662
2663
2664 /* Init the streamer_mode_table for output, where we collect info on what
2665 machine_mode values have been streamed. */
2666 void
2667 lto_output_init_mode_table (void)
2668 {
2669 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2670 }
2671
2672
2673 /* Write the mode table. */
2674 static void
2675 lto_write_mode_table (void)
2676 {
2677 struct output_block *ob;
2678 ob = create_output_block (LTO_section_mode_table);
2679 bitpack_d bp = bitpack_create (ob->main_stream);
2680
2681 /* Ensure that for GET_MODE_INNER (m) != m we have
2682 also the inner mode marked. */
2683 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2684 if (streamer_mode_table[i])
2685 {
2686 machine_mode m = (machine_mode) i;
2687 if (GET_MODE_INNER (m) != m)
2688 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2689 }
2690 /* First stream modes that have GET_MODE_INNER (m) == m,
2691 so that we can refer to them afterwards. */
2692 for (int pass = 0; pass < 2; pass++)
2693 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2694 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2695 {
2696 machine_mode m = (machine_mode) i;
2697 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2698 continue;
2699 bp_pack_value (&bp, m, 8);
2700 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2701 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2702 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2703 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2704 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2705 switch (GET_MODE_CLASS (m))
2706 {
2707 case MODE_FRACT:
2708 case MODE_UFRACT:
2709 case MODE_ACCUM:
2710 case MODE_UACCUM:
2711 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2712 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2713 break;
2714 case MODE_FLOAT:
2715 case MODE_DECIMAL_FLOAT:
2716 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2717 break;
2718 default:
2719 break;
2720 }
2721 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2722 }
2723 bp_pack_value (&bp, VOIDmode, 8);
2724
2725 streamer_write_bitpack (&bp);
2726
2727 char *section_name
2728 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2729 lto_begin_section (section_name, !flag_wpa);
2730 free (section_name);
2731
2732 /* The entire header stream is computed here. */
2733 struct lto_simple_header_with_strings header;
2734 memset (&header, 0, sizeof (header));
2735
2736 /* Write the header. */
2737 header.major_version = LTO_major_version;
2738 header.minor_version = LTO_minor_version;
2739
2740 header.main_size = ob->main_stream->total_size;
2741 header.string_size = ob->string_stream->total_size;
2742 lto_write_data (&header, sizeof header);
2743
2744 /* Put all of the gimple and the string table out the asm file as a
2745 block of text. */
2746 lto_write_stream (ob->main_stream);
2747 lto_write_stream (ob->string_stream);
2748
2749 lto_end_section ();
2750 destroy_output_block (ob);
2751 }
2752
2753
2754 /* This pass is run after all of the functions are serialized and all
2755 of the IPA passes have written their serialized forms. This pass
2756 causes the vector of all of the global decls and types used from
2757 this file to be written in to a section that can then be read in to
2758 recover these on other side. */
2759
2760 void
2761 produce_asm_for_decls (void)
2762 {
2763 struct lto_out_decl_state *out_state;
2764 struct lto_out_decl_state *fn_out_state;
2765 struct lto_decl_header header;
2766 char *section_name;
2767 struct output_block *ob;
2768 unsigned idx, num_fns;
2769 size_t decl_state_size;
2770 int32_t num_decl_states;
2771
2772 ob = create_output_block (LTO_section_decls);
2773
2774 memset (&header, 0, sizeof (struct lto_decl_header));
2775
2776 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2777 lto_begin_section (section_name, !flag_wpa);
2778 free (section_name);
2779
2780 /* Make string 0 be a NULL string. */
2781 streamer_write_char_stream (ob->string_stream, 0);
2782
2783 gcc_assert (!alias_pairs);
2784
2785 /* Get rid of the global decl state hash tables to save some memory. */
2786 out_state = lto_get_out_decl_state ();
2787 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2788 if (out_state->streams[i].tree_hash_table)
2789 {
2790 delete out_state->streams[i].tree_hash_table;
2791 out_state->streams[i].tree_hash_table = NULL;
2792 }
2793
2794 /* Write the global symbols. */
2795 lto_output_decl_state_streams (ob, out_state);
2796 num_fns = lto_function_decl_states.length ();
2797 for (idx = 0; idx < num_fns; idx++)
2798 {
2799 fn_out_state =
2800 lto_function_decl_states[idx];
2801 lto_output_decl_state_streams (ob, fn_out_state);
2802 }
2803
2804 header.major_version = LTO_major_version;
2805 header.minor_version = LTO_minor_version;
2806
2807 /* Currently not used. This field would allow us to preallocate
2808 the globals vector, so that it need not be resized as it is extended. */
2809 header.num_nodes = -1;
2810
2811 /* Compute the total size of all decl out states. */
2812 decl_state_size = sizeof (int32_t);
2813 decl_state_size += lto_out_decl_state_written_size (out_state);
2814 for (idx = 0; idx < num_fns; idx++)
2815 {
2816 fn_out_state =
2817 lto_function_decl_states[idx];
2818 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2819 }
2820 header.decl_state_size = decl_state_size;
2821
2822 header.main_size = ob->main_stream->total_size;
2823 header.string_size = ob->string_stream->total_size;
2824
2825 lto_write_data (&header, sizeof header);
2826
2827 /* Write the main out-decl state, followed by out-decl states of
2828 functions. */
2829 num_decl_states = num_fns + 1;
2830 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2831 lto_output_decl_state_refs (ob, out_state);
2832 for (idx = 0; idx < num_fns; idx++)
2833 {
2834 fn_out_state = lto_function_decl_states[idx];
2835 lto_output_decl_state_refs (ob, fn_out_state);
2836 }
2837
2838 lto_write_stream (ob->main_stream);
2839 lto_write_stream (ob->string_stream);
2840
2841 lto_end_section ();
2842
2843 /* Write the symbol table. It is used by linker to determine dependencies
2844 and thus we can skip it for WPA. */
2845 if (!flag_wpa)
2846 produce_symtab (ob);
2847
2848 /* Write command line opts. */
2849 lto_write_options ();
2850
2851 /* Deallocate memory and clean up. */
2852 for (idx = 0; idx < num_fns; idx++)
2853 {
2854 fn_out_state =
2855 lto_function_decl_states[idx];
2856 lto_delete_out_decl_state (fn_out_state);
2857 }
2858 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2859 lto_function_decl_states.release ();
2860 destroy_output_block (ob);
2861 if (lto_stream_offload_p)
2862 lto_write_mode_table ();
2863 }