lto-streamer-out.c (DFS::DFS_write_tree_body): Do not stream BINFO_BASE_ACCESSES...
[gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46
47
48 static void lto_write_tree (struct output_block*, tree, bool);
49
50 /* Clear the line info stored in DATA_IN. */
51
52 static void
53 clear_line_info (struct output_block *ob)
54 {
55 ob->current_file = NULL;
56 ob->current_line = 0;
57 ob->current_col = 0;
58 ob->current_sysp = false;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69 if (streamer_dump_file)
70 fprintf (streamer_dump_file, "Creating output block for %s\n",
71 lto_section_name [section_type]);
72
73 ob->section_type = section_type;
74 ob->decl_state = lto_get_out_decl_state ();
75 ob->main_stream = XCNEW (struct lto_output_stream);
76 ob->string_stream = XCNEW (struct lto_output_stream);
77 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
78
79 if (section_type == LTO_section_function_body)
80 ob->cfg_stream = XCNEW (struct lto_output_stream);
81
82 clear_line_info (ob);
83
84 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
85 gcc_obstack_init (&ob->obstack);
86
87 return ob;
88 }
89
90
91 /* Destroy the output block OB. */
92
93 void
94 destroy_output_block (struct output_block *ob)
95 {
96 enum lto_section_type section_type = ob->section_type;
97
98 delete ob->string_hash_table;
99 ob->string_hash_table = NULL;
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 && DECL_CONTEXT (t))
136 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
137 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
138 else if (TREE_CODE (t) == IMPORTED_DECL)
139 return false;
140 else if (((VAR_P (t) && !TREE_STATIC (t))
141 || TREE_CODE (t) == TYPE_DECL
142 || TREE_CODE (t) == CONST_DECL
143 || TREE_CODE (t) == NAMELIST_DECL)
144 && decl_function_context (t))
145 return false;
146 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
147 return false;
148 /* Variably modified types need to be streamed alongside function
149 bodies because they can refer to local entities. Together with
150 them we have to localize their members as well.
151 ??? In theory that includes non-FIELD_DECLs as well. */
152 else if (TYPE_P (t)
153 && variably_modified_type_p (t, NULL_TREE))
154 return false;
155 else if (TREE_CODE (t) == FIELD_DECL
156 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
157 return false;
158 else
159 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
160 }
161
162
163 /* Output info about new location into bitpack BP.
164 After outputting bitpack, lto_output_location_data has
165 to be done to output actual data. */
166
167 void
168 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
169 location_t loc)
170 {
171 expanded_location xloc;
172
173 loc = LOCATION_LOCUS (loc);
174 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
175 loc < RESERVED_LOCATION_COUNT
176 ? loc : RESERVED_LOCATION_COUNT);
177 if (loc < RESERVED_LOCATION_COUNT)
178 return;
179
180 xloc = expand_location (loc);
181
182 bp_pack_value (bp, ob->current_file != xloc.file, 1);
183 bp_pack_value (bp, ob->current_line != xloc.line, 1);
184 bp_pack_value (bp, ob->current_col != xloc.column, 1);
185
186 if (ob->current_file != xloc.file)
187 {
188 bp_pack_string (ob, bp, xloc.file, true);
189 bp_pack_value (bp, xloc.sysp, 1);
190 }
191 ob->current_file = xloc.file;
192 ob->current_sysp = xloc.sysp;
193
194 if (ob->current_line != xloc.line)
195 bp_pack_var_len_unsigned (bp, xloc.line);
196 ob->current_line = xloc.line;
197
198 if (ob->current_col != xloc.column)
199 bp_pack_var_len_unsigned (bp, xloc.column);
200 ob->current_col = xloc.column;
201 }
202
203
204 /* If EXPR is an indexable tree node, output a reference to it to
205 output block OB. Otherwise, output the physical representation of
206 EXPR to OB. */
207
208 static void
209 lto_output_tree_ref (struct output_block *ob, tree expr)
210 {
211 enum tree_code code;
212
213 if (TYPE_P (expr))
214 {
215 output_type_ref (ob, expr);
216 return;
217 }
218
219 code = TREE_CODE (expr);
220 switch (code)
221 {
222 case SSA_NAME:
223 streamer_write_record_start (ob, LTO_ssa_name_ref);
224 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
225 break;
226
227 case FIELD_DECL:
228 streamer_write_record_start (ob, LTO_field_decl_ref);
229 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
230 break;
231
232 case FUNCTION_DECL:
233 streamer_write_record_start (ob, LTO_function_decl_ref);
234 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
235 break;
236
237 case VAR_DECL:
238 case DEBUG_EXPR_DECL:
239 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
240 /* FALLTHRU */
241 case PARM_DECL:
242 streamer_write_record_start (ob, LTO_global_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case CONST_DECL:
247 streamer_write_record_start (ob, LTO_const_decl_ref);
248 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case IMPORTED_DECL:
252 gcc_assert (decl_function_context (expr) == NULL);
253 streamer_write_record_start (ob, LTO_imported_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case TYPE_DECL:
258 streamer_write_record_start (ob, LTO_type_decl_ref);
259 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case NAMELIST_DECL:
263 streamer_write_record_start (ob, LTO_namelist_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 case NAMESPACE_DECL:
268 streamer_write_record_start (ob, LTO_namespace_decl_ref);
269 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
271
272 case LABEL_DECL:
273 streamer_write_record_start (ob, LTO_label_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case RESULT_DECL:
278 streamer_write_record_start (ob, LTO_result_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 case TRANSLATION_UNIT_DECL:
283 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
284 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
285 break;
286
287 default:
288 /* No other node is indexable, so it should have been handled by
289 lto_output_tree. */
290 gcc_unreachable ();
291 }
292 }
293
294
295 /* Return true if EXPR is a tree node that can be written to disk. */
296
297 static inline bool
298 lto_is_streamable (tree expr)
299 {
300 enum tree_code code = TREE_CODE (expr);
301
302 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
303 name version in lto_output_tree_ref (see output_ssa_names). */
304 return !is_lang_specific (expr)
305 && code != SSA_NAME
306 && code != CALL_EXPR
307 && code != LANG_TYPE
308 && code != MODIFY_EXPR
309 && code != INIT_EXPR
310 && code != TARGET_EXPR
311 && code != BIND_EXPR
312 && code != WITH_CLEANUP_EXPR
313 && code != STATEMENT_LIST
314 && (code == CASE_LABEL_EXPR
315 || code == DECL_EXPR
316 || TREE_CODE_CLASS (code) != tcc_statement);
317 }
318
319 /* Very rough estimate of streaming size of the initializer. If we ignored
320 presence of strings, we could simply just count number of non-indexable
321 tree nodes and number of references to indexable nodes. Strings however
322 may be very large and we do not want to dump them int othe global stream.
323
324 Count the size of initializer until the size in DATA is positive. */
325
326 static tree
327 subtract_estimated_size (tree *tp, int *ws, void *data)
328 {
329 long *sum = (long *)data;
330 if (tree_is_indexable (*tp))
331 {
332 /* Indexable tree is one reference to global stream.
333 Guess it may be about 4 bytes. */
334 *sum -= 4;
335 *ws = 0;
336 }
337 /* String table entry + base of tree node needs to be streamed. */
338 if (TREE_CODE (*tp) == STRING_CST)
339 *sum -= TREE_STRING_LENGTH (*tp) + 8;
340 else
341 {
342 /* Identifiers are also variable length but should not appear
343 naked in constructor. */
344 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
345 /* We do not really make attempt to work out size of pickled tree, as
346 it is very variable. Make it bigger than the reference. */
347 *sum -= 16;
348 }
349 if (*sum < 0)
350 return *tp;
351 return NULL_TREE;
352 }
353
354
355 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
356
357 static tree
358 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
359 {
360 gcc_checking_assert (DECL_P (expr)
361 && TREE_CODE (expr) != FUNCTION_DECL
362 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
363
364 /* Handle DECL_INITIAL for symbols. */
365 tree initial = DECL_INITIAL (expr);
366 if (VAR_P (expr)
367 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
368 && !DECL_IN_CONSTANT_POOL (expr)
369 && initial)
370 {
371 varpool_node *vnode;
372 /* Extra section needs about 30 bytes; do not produce it for simple
373 scalar values. */
374 if (!(vnode = varpool_node::get (expr))
375 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
376 initial = error_mark_node;
377 if (initial != error_mark_node)
378 {
379 long max_size = 30;
380 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
381 NULL))
382 initial = error_mark_node;
383 }
384 }
385
386 return initial;
387 }
388
389
390 /* Write a physical representation of tree node EXPR to output block
391 OB. If REF_P is true, the leaves of EXPR are emitted as references
392 via lto_output_tree_ref. IX is the index into the streamer cache
393 where EXPR is stored. */
394
395 static void
396 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
397 {
398 /* Pack all the non-pointer fields in EXPR into a bitpack and write
399 the resulting bitpack. */
400 streamer_write_tree_bitfields (ob, expr);
401
402 /* Write all the pointer fields in EXPR. */
403 streamer_write_tree_body (ob, expr, ref_p);
404
405 /* Write any LTO-specific data to OB. */
406 if (DECL_P (expr)
407 && TREE_CODE (expr) != FUNCTION_DECL
408 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
409 {
410 /* Handle DECL_INITIAL for symbols. */
411 tree initial = get_symbol_initial_value
412 (ob->decl_state->symtab_node_encoder, expr);
413 stream_write_tree (ob, initial, ref_p);
414 }
415
416 /* Stream references to early generated DIEs. Keep in sync with the
417 trees handled in dwarf2out_die_ref_for_decl. */
418 if ((DECL_P (expr)
419 && TREE_CODE (expr) != FIELD_DECL
420 && TREE_CODE (expr) != DEBUG_EXPR_DECL
421 && TREE_CODE (expr) != TYPE_DECL)
422 || TREE_CODE (expr) == BLOCK)
423 {
424 const char *sym;
425 unsigned HOST_WIDE_INT off;
426 if (debug_info_level > DINFO_LEVEL_NONE
427 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
428 {
429 streamer_write_string (ob, ob->main_stream, sym, true);
430 streamer_write_uhwi (ob, off);
431 }
432 else
433 streamer_write_string (ob, ob->main_stream, NULL, true);
434 }
435 }
436
437 /* Write a physical representation of tree node EXPR to output block
438 OB. If REF_P is true, the leaves of EXPR are emitted as references
439 via lto_output_tree_ref. IX is the index into the streamer cache
440 where EXPR is stored. */
441
442 static void
443 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
444 {
445 if (!lto_is_streamable (expr))
446 internal_error ("tree code %qs is not supported in LTO streams",
447 get_tree_code_name (TREE_CODE (expr)));
448
449 /* Write the header, containing everything needed to materialize
450 EXPR on the reading side. */
451 streamer_write_tree_header (ob, expr);
452
453 lto_write_tree_1 (ob, expr, ref_p);
454
455 /* Mark the end of EXPR. */
456 streamer_write_zero (ob);
457 }
458
459 /* Emit the physical representation of tree node EXPR to output block OB,
460 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
461 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
462
463 static void
464 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
465 bool ref_p, bool this_ref_p)
466 {
467 unsigned ix;
468
469 gcc_checking_assert (expr != NULL_TREE
470 && !(this_ref_p && tree_is_indexable (expr)));
471
472 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
473 expr, hash, &ix);
474 gcc_assert (!exists_p);
475 if (TREE_CODE (expr) == INTEGER_CST
476 && !TREE_OVERFLOW (expr))
477 {
478 /* Shared INTEGER_CST nodes are special because they need their
479 original type to be materialized by the reader (to implement
480 TYPE_CACHED_VALUES). */
481 streamer_write_integer_cst (ob, expr, ref_p);
482 }
483 else
484 {
485 /* This is the first time we see EXPR, write its fields
486 to OB. */
487 lto_write_tree (ob, expr, ref_p);
488 }
489 }
490
491 class DFS
492 {
493 public:
494 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
495 bool single_p);
496 ~DFS ();
497
498 struct scc_entry
499 {
500 tree t;
501 hashval_t hash;
502 };
503 vec<scc_entry> sccstack;
504
505 private:
506 struct sccs
507 {
508 unsigned int dfsnum;
509 unsigned int low;
510 };
511 struct worklist
512 {
513 tree expr;
514 sccs *from_state;
515 sccs *cstate;
516 bool ref_p;
517 bool this_ref_p;
518 };
519
520 static int scc_entry_compare (const void *, const void *);
521
522 void DFS_write_tree_body (struct output_block *ob,
523 tree expr, sccs *expr_state, bool ref_p);
524
525 void DFS_write_tree (struct output_block *ob, sccs *from_state,
526 tree expr, bool ref_p, bool this_ref_p);
527
528 hashval_t
529 hash_scc (struct output_block *ob, unsigned first, unsigned size,
530 bool ref_p, bool this_ref_p);
531
532 hash_map<tree, sccs *> sccstate;
533 vec<worklist> worklist_vec;
534 struct obstack sccstate_obstack;
535 };
536
537 /* Emit the physical representation of tree node EXPR to output block OB,
538 using depth-first search on the subgraph. If THIS_REF_P is true, the
539 leaves of EXPR are emitted as references via lto_output_tree_ref.
540 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
541 this is for a rewalk of a single leaf SCC. */
542
543 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
544 bool single_p)
545 {
546 unsigned int next_dfs_num = 1;
547 sccstack.create (0);
548 gcc_obstack_init (&sccstate_obstack);
549 worklist_vec = vNULL;
550 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
551 while (!worklist_vec.is_empty ())
552 {
553 worklist &w = worklist_vec.last ();
554 expr = w.expr;
555 sccs *from_state = w.from_state;
556 sccs *cstate = w.cstate;
557 ref_p = w.ref_p;
558 this_ref_p = w.this_ref_p;
559 if (cstate == NULL)
560 {
561 sccs **slot = &sccstate.get_or_insert (expr);
562 cstate = *slot;
563 if (cstate)
564 {
565 gcc_checking_assert (from_state);
566 if (cstate->dfsnum < from_state->dfsnum)
567 from_state->low = MIN (cstate->dfsnum, from_state->low);
568 worklist_vec.pop ();
569 continue;
570 }
571
572 scc_entry e = { expr, 0 };
573 /* Not yet visited. DFS recurse and push it onto the stack. */
574 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
575 sccstack.safe_push (e);
576 cstate->dfsnum = next_dfs_num++;
577 cstate->low = cstate->dfsnum;
578 w.cstate = cstate;
579
580 if (TREE_CODE (expr) == INTEGER_CST
581 && !TREE_OVERFLOW (expr))
582 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
583 else
584 {
585 DFS_write_tree_body (ob, expr, cstate, ref_p);
586
587 /* Walk any LTO-specific edges. */
588 if (DECL_P (expr)
589 && TREE_CODE (expr) != FUNCTION_DECL
590 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
591 {
592 /* Handle DECL_INITIAL for symbols. */
593 tree initial
594 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
595 expr);
596 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
597 }
598 }
599 continue;
600 }
601
602 /* See if we found an SCC. */
603 if (cstate->low == cstate->dfsnum)
604 {
605 unsigned first, size;
606 tree x;
607
608 /* If we are re-walking a single leaf SCC just pop it,
609 let earlier worklist item access the sccstack. */
610 if (single_p)
611 {
612 worklist_vec.pop ();
613 continue;
614 }
615
616 /* Pop the SCC and compute its size. */
617 first = sccstack.length ();
618 do
619 {
620 x = sccstack[--first].t;
621 }
622 while (x != expr);
623 size = sccstack.length () - first;
624
625 /* No need to compute hashes for LTRANS units, we don't perform
626 any merging there. */
627 hashval_t scc_hash = 0;
628 unsigned scc_entry_len = 0;
629 if (!flag_wpa)
630 {
631 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
632
633 /* Put the entries with the least number of collisions first. */
634 unsigned entry_start = 0;
635 scc_entry_len = size + 1;
636 for (unsigned i = 0; i < size;)
637 {
638 unsigned from = i;
639 for (i = i + 1; i < size
640 && (sccstack[first + i].hash
641 == sccstack[first + from].hash); ++i)
642 ;
643 if (i - from < scc_entry_len)
644 {
645 scc_entry_len = i - from;
646 entry_start = from;
647 }
648 }
649 for (unsigned i = 0; i < scc_entry_len; ++i)
650 std::swap (sccstack[first + i],
651 sccstack[first + entry_start + i]);
652
653 /* We already sorted SCC deterministically in hash_scc. */
654
655 /* Check that we have only one SCC.
656 Naturally we may have conflicts if hash function is not
657 strong enough. Lets see how far this gets. */
658 gcc_checking_assert (scc_entry_len == 1);
659 }
660
661 /* Write LTO_tree_scc. */
662 streamer_write_record_start (ob, LTO_tree_scc);
663 streamer_write_uhwi (ob, size);
664 streamer_write_uhwi (ob, scc_hash);
665
666 /* Write size-1 SCCs without wrapping them inside SCC bundles.
667 All INTEGER_CSTs need to be handled this way as we need
668 their type to materialize them. Also builtins are handled
669 this way.
670 ??? We still wrap these in LTO_tree_scc so at the
671 input side we can properly identify the tree we want
672 to ultimatively return. */
673 if (size == 1)
674 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
675 else
676 {
677 /* Write the size of the SCC entry candidates. */
678 streamer_write_uhwi (ob, scc_entry_len);
679
680 /* Write all headers and populate the streamer cache. */
681 for (unsigned i = 0; i < size; ++i)
682 {
683 hashval_t hash = sccstack[first+i].hash;
684 tree t = sccstack[first+i].t;
685 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
686 t, hash, NULL);
687 gcc_assert (!exists_p);
688
689 if (!lto_is_streamable (t))
690 internal_error ("tree code %qs is not supported "
691 "in LTO streams",
692 get_tree_code_name (TREE_CODE (t)));
693
694 /* Write the header, containing everything needed to
695 materialize EXPR on the reading side. */
696 streamer_write_tree_header (ob, t);
697 }
698
699 /* Write the bitpacks and tree references. */
700 for (unsigned i = 0; i < size; ++i)
701 {
702 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
703
704 /* Mark the end of the tree. */
705 streamer_write_zero (ob);
706 }
707 }
708
709 /* Finally truncate the vector. */
710 sccstack.truncate (first);
711
712 if (from_state)
713 from_state->low = MIN (from_state->low, cstate->low);
714 worklist_vec.pop ();
715 continue;
716 }
717
718 gcc_checking_assert (from_state);
719 from_state->low = MIN (from_state->low, cstate->low);
720 if (cstate->dfsnum < from_state->dfsnum)
721 from_state->low = MIN (cstate->dfsnum, from_state->low);
722 worklist_vec.pop ();
723 }
724 worklist_vec.release ();
725 }
726
727 DFS::~DFS ()
728 {
729 sccstack.release ();
730 obstack_free (&sccstate_obstack, NULL);
731 }
732
733 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
734 DFS recurse for all tree edges originating from it. */
735
736 void
737 DFS::DFS_write_tree_body (struct output_block *ob,
738 tree expr, sccs *expr_state, bool ref_p)
739 {
740 #define DFS_follow_tree_edge(DEST) \
741 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
742
743 enum tree_code code;
744
745 if (streamer_dump_file)
746 {
747 print_node_brief (streamer_dump_file, " Streaming ",
748 expr, 4);
749 fprintf (streamer_dump_file, " to %s\n",
750 lto_section_name [ob->section_type]);
751 }
752
753 code = TREE_CODE (expr);
754
755 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
756 {
757 if (TREE_CODE (expr) != IDENTIFIER_NODE)
758 DFS_follow_tree_edge (TREE_TYPE (expr));
759 }
760
761 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
762 {
763 unsigned int count = vector_cst_encoded_nelts (expr);
764 for (unsigned int i = 0; i < count; ++i)
765 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
766 }
767
768 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
769 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
770 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
771
772 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
773 {
774 DFS_follow_tree_edge (TREE_REALPART (expr));
775 DFS_follow_tree_edge (TREE_IMAGPART (expr));
776 }
777
778 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
779 {
780 /* Drop names that were created for anonymous entities. */
781 if (DECL_NAME (expr)
782 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
783 && anon_aggrname_p (DECL_NAME (expr)))
784 ;
785 else
786 DFS_follow_tree_edge (DECL_NAME (expr));
787 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
788 && ! DECL_CONTEXT (expr))
789 DFS_follow_tree_edge ((*all_translation_units)[0]);
790 else
791 DFS_follow_tree_edge (DECL_CONTEXT (expr));
792 }
793
794 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
795 {
796 DFS_follow_tree_edge (DECL_SIZE (expr));
797 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
798
799 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
800 special handling in LTO, it must be handled by streamer hooks. */
801
802 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
803
804 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
805 declarations which should be eliminated by decl merging. Be sure none
806 leaks to this point. */
807 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
808 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
809
810 if ((VAR_P (expr)
811 || TREE_CODE (expr) == PARM_DECL)
812 && DECL_HAS_VALUE_EXPR_P (expr))
813 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
814 if (VAR_P (expr)
815 && DECL_HAS_DEBUG_EXPR_P (expr))
816 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
817 }
818
819 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
820 {
821 if (TREE_CODE (expr) == TYPE_DECL)
822 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
823 }
824
825 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
826 {
827 /* Make sure we don't inadvertently set the assembler name. */
828 if (DECL_ASSEMBLER_NAME_SET_P (expr))
829 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
830 }
831
832 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
833 {
834 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
835 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
836 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
837 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
838 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
839 }
840
841 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
842 {
843 DFS_follow_tree_edge (DECL_VINDEX (expr));
844 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
845 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
846 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
847 }
848
849 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
850 {
851 DFS_follow_tree_edge (TYPE_SIZE (expr));
852 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
853 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
854 DFS_follow_tree_edge (TYPE_NAME (expr));
855 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
856 reconstructed during fixup. */
857 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
858 during fixup. */
859 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
860 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
861 /* TYPE_CANONICAL is re-computed during type merging, so no need
862 to follow it here. */
863 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
864 }
865
866 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
867 {
868 if (TREE_CODE (expr) == ENUMERAL_TYPE)
869 DFS_follow_tree_edge (TYPE_VALUES (expr));
870 else if (TREE_CODE (expr) == ARRAY_TYPE)
871 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
872 else if (RECORD_OR_UNION_TYPE_P (expr))
873 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
874 DFS_follow_tree_edge (t);
875 else if (TREE_CODE (expr) == FUNCTION_TYPE
876 || TREE_CODE (expr) == METHOD_TYPE)
877 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
878
879 if (!POINTER_TYPE_P (expr))
880 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
881 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
882 }
883
884 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
885 {
886 DFS_follow_tree_edge (TREE_PURPOSE (expr));
887 DFS_follow_tree_edge (TREE_VALUE (expr));
888 DFS_follow_tree_edge (TREE_CHAIN (expr));
889 }
890
891 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
892 {
893 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
894 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
895 }
896
897 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
898 {
899 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
900 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
901 DFS_follow_tree_edge (TREE_BLOCK (expr));
902 }
903
904 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
905 {
906 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
907 if (VAR_OR_FUNCTION_DECL_P (t)
908 && DECL_EXTERNAL (t))
909 /* We have to stream externals in the block chain as
910 non-references. See also
911 tree-streamer-out.c:streamer_write_chain. */
912 DFS_write_tree (ob, expr_state, t, ref_p, false);
913 else
914 DFS_follow_tree_edge (t);
915
916 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
917 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
918
919 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
920 information for early inlined BLOCKs so drop it on the floor instead
921 of ICEing in dwarf2out.c. */
922
923 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
924 streaming time. */
925
926 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
927 list is re-constructed from BLOCK_SUPERCONTEXT. */
928 }
929
930 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
931 {
932 unsigned i;
933 tree t;
934
935 /* Note that the number of BINFO slots has already been emitted in
936 EXPR's header (see streamer_write_tree_header) because this length
937 is needed to build the empty BINFO node on the reader side. */
938 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
939 DFS_follow_tree_edge (t);
940 DFS_follow_tree_edge (BINFO_OFFSET (expr));
941 DFS_follow_tree_edge (BINFO_VTABLE (expr));
942
943 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
944 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
945 by C++ FE only. */
946 }
947
948 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
949 {
950 unsigned i;
951 tree index, value;
952
953 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
954 {
955 DFS_follow_tree_edge (index);
956 DFS_follow_tree_edge (value);
957 }
958 }
959
960 if (code == OMP_CLAUSE)
961 {
962 int i;
963 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
964 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
965 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
966 }
967
968 #undef DFS_follow_tree_edge
969 }
970
971 /* Return a hash value for the tree T.
972 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
973 may hold hash values if trees inside current SCC. */
974
975 static hashval_t
976 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
977 {
978 inchash::hash hstate;
979
980 #define visit(SIBLING) \
981 do { \
982 unsigned ix; \
983 if (!SIBLING) \
984 hstate.add_int (0); \
985 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
986 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
987 else if (map) \
988 hstate.add_int (*map->get (SIBLING)); \
989 else \
990 hstate.add_int (1); \
991 } while (0)
992
993 /* Hash TS_BASE. */
994 enum tree_code code = TREE_CODE (t);
995 hstate.add_int (code);
996 if (!TYPE_P (t))
997 {
998 hstate.add_flag (TREE_SIDE_EFFECTS (t));
999 hstate.add_flag (TREE_CONSTANT (t));
1000 hstate.add_flag (TREE_READONLY (t));
1001 hstate.add_flag (TREE_PUBLIC (t));
1002 }
1003 hstate.add_flag (TREE_ADDRESSABLE (t));
1004 hstate.add_flag (TREE_THIS_VOLATILE (t));
1005 if (DECL_P (t))
1006 hstate.add_flag (DECL_UNSIGNED (t));
1007 else if (TYPE_P (t))
1008 hstate.add_flag (TYPE_UNSIGNED (t));
1009 if (TYPE_P (t))
1010 hstate.add_flag (TYPE_ARTIFICIAL (t));
1011 else
1012 hstate.add_flag (TREE_NO_WARNING (t));
1013 hstate.add_flag (TREE_NOTHROW (t));
1014 hstate.add_flag (TREE_STATIC (t));
1015 hstate.add_flag (TREE_PROTECTED (t));
1016 hstate.add_flag (TREE_DEPRECATED (t));
1017 if (code != TREE_BINFO)
1018 hstate.add_flag (TREE_PRIVATE (t));
1019 if (TYPE_P (t))
1020 {
1021 hstate.add_flag (AGGREGATE_TYPE_P (t)
1022 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1023 hstate.add_flag (TYPE_ADDR_SPACE (t));
1024 }
1025 else if (code == SSA_NAME)
1026 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1027 hstate.commit_flag ();
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1030 hstate.add_wide_int (wi::to_widest (t));
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1033 {
1034 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1035 hstate.add_flag (r.cl);
1036 hstate.add_flag (r.sign);
1037 hstate.add_flag (r.signalling);
1038 hstate.add_flag (r.canonical);
1039 hstate.commit_flag ();
1040 hstate.add_int (r.uexp);
1041 hstate.add (r.sig, sizeof (r.sig));
1042 }
1043
1044 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1045 {
1046 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1047 hstate.add_int (f.mode);
1048 hstate.add_int (f.data.low);
1049 hstate.add_int (f.data.high);
1050 }
1051
1052 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1053 {
1054 hstate.add_hwi (DECL_MODE (t));
1055 hstate.add_flag (DECL_NONLOCAL (t));
1056 hstate.add_flag (DECL_VIRTUAL_P (t));
1057 hstate.add_flag (DECL_IGNORED_P (t));
1058 hstate.add_flag (DECL_ABSTRACT_P (t));
1059 hstate.add_flag (DECL_ARTIFICIAL (t));
1060 hstate.add_flag (DECL_USER_ALIGN (t));
1061 hstate.add_flag (DECL_PRESERVE_P (t));
1062 hstate.add_flag (DECL_EXTERNAL (t));
1063 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1064 hstate.commit_flag ();
1065 hstate.add_int (DECL_ALIGN (t));
1066 if (code == LABEL_DECL)
1067 {
1068 hstate.add_int (EH_LANDING_PAD_NR (t));
1069 hstate.add_int (LABEL_DECL_UID (t));
1070 }
1071 else if (code == FIELD_DECL)
1072 {
1073 hstate.add_flag (DECL_PACKED (t));
1074 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1075 hstate.add_flag (DECL_PADDING_P (t));
1076 hstate.add_int (DECL_OFFSET_ALIGN (t));
1077 }
1078 else if (code == VAR_DECL)
1079 {
1080 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1081 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1082 }
1083 if (code == RESULT_DECL
1084 || code == PARM_DECL
1085 || code == VAR_DECL)
1086 {
1087 hstate.add_flag (DECL_BY_REFERENCE (t));
1088 if (code == VAR_DECL
1089 || code == PARM_DECL)
1090 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1091 }
1092 hstate.commit_flag ();
1093 }
1094
1095 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1096 hstate.add_int (DECL_REGISTER (t));
1097
1098 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1099 {
1100 hstate.add_flag (DECL_COMMON (t));
1101 hstate.add_flag (DECL_DLLIMPORT_P (t));
1102 hstate.add_flag (DECL_WEAK (t));
1103 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1104 hstate.add_flag (DECL_COMDAT (t));
1105 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1106 hstate.add_int (DECL_VISIBILITY (t));
1107 if (code == VAR_DECL)
1108 {
1109 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1110 hstate.add_flag (DECL_HARD_REGISTER (t));
1111 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1112 }
1113 if (TREE_CODE (t) == FUNCTION_DECL)
1114 {
1115 hstate.add_flag (DECL_FINAL_P (t));
1116 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1117 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1118 }
1119 hstate.commit_flag ();
1120 }
1121
1122 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1123 {
1124 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1125 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1126 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1127 hstate.add_flag (DECL_UNINLINABLE (t));
1128 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1129 hstate.add_flag (DECL_IS_NOVOPS (t));
1130 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1131 hstate.add_flag (DECL_IS_MALLOC (t));
1132 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1133 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1134 hstate.add_flag (DECL_STATIC_CHAIN (t));
1135 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1136 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1137 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1138 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1139 hstate.add_flag (DECL_PURE_P (t));
1140 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1141 hstate.commit_flag ();
1142 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1143 hstate.add_int (DECL_FUNCTION_CODE (t));
1144 }
1145
1146 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1147 {
1148 hstate.add_hwi (TYPE_MODE (t));
1149 hstate.add_flag (TYPE_STRING_FLAG (t));
1150 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1151 no streaming. */
1152 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1153 hstate.add_flag (TYPE_PACKED (t));
1154 hstate.add_flag (TYPE_RESTRICT (t));
1155 hstate.add_flag (TYPE_USER_ALIGN (t));
1156 hstate.add_flag (TYPE_READONLY (t));
1157 if (RECORD_OR_UNION_TYPE_P (t))
1158 {
1159 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1160 hstate.add_flag (TYPE_FINAL_P (t));
1161 }
1162 else if (code == ARRAY_TYPE)
1163 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1164 if (AGGREGATE_TYPE_P (t))
1165 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1166 hstate.commit_flag ();
1167 hstate.add_int (TYPE_PRECISION (t));
1168 hstate.add_int (TYPE_ALIGN (t));
1169 hstate.add_int (TYPE_EMPTY_P (t));
1170 }
1171
1172 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1173 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1174 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1175
1176 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1177 /* We don't stream these when passing things to a different target. */
1178 && !lto_stream_offload_p)
1179 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1180
1181 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1182 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1183
1184 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1185 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1186
1187 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1188 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1189
1190 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1191 {
1192 if (code != IDENTIFIER_NODE)
1193 visit (TREE_TYPE (t));
1194 }
1195
1196 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1197 {
1198 unsigned int count = vector_cst_encoded_nelts (t);
1199 for (unsigned int i = 0; i < count; ++i)
1200 visit (VECTOR_CST_ENCODED_ELT (t, i));
1201 }
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1204 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1205 visit (POLY_INT_CST_COEFF (t, i));
1206
1207 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1208 {
1209 visit (TREE_REALPART (t));
1210 visit (TREE_IMAGPART (t));
1211 }
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1214 {
1215 /* Drop names that were created for anonymous entities. */
1216 if (DECL_NAME (t)
1217 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1218 && anon_aggrname_p (DECL_NAME (t)))
1219 ;
1220 else
1221 visit (DECL_NAME (t));
1222 if (DECL_FILE_SCOPE_P (t))
1223 ;
1224 else
1225 visit (DECL_CONTEXT (t));
1226 }
1227
1228 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1229 {
1230 visit (DECL_SIZE (t));
1231 visit (DECL_SIZE_UNIT (t));
1232 visit (DECL_ATTRIBUTES (t));
1233 if ((code == VAR_DECL
1234 || code == PARM_DECL)
1235 && DECL_HAS_VALUE_EXPR_P (t))
1236 visit (DECL_VALUE_EXPR (t));
1237 if (code == VAR_DECL
1238 && DECL_HAS_DEBUG_EXPR_P (t))
1239 visit (DECL_DEBUG_EXPR (t));
1240 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1241 be able to call get_symbol_initial_value. */
1242 }
1243
1244 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1245 {
1246 if (code == TYPE_DECL)
1247 visit (DECL_ORIGINAL_TYPE (t));
1248 }
1249
1250 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1251 {
1252 if (DECL_ASSEMBLER_NAME_SET_P (t))
1253 visit (DECL_ASSEMBLER_NAME (t));
1254 }
1255
1256 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1257 {
1258 visit (DECL_FIELD_OFFSET (t));
1259 visit (DECL_BIT_FIELD_TYPE (t));
1260 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1261 visit (DECL_FIELD_BIT_OFFSET (t));
1262 visit (DECL_FCONTEXT (t));
1263 }
1264
1265 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1266 {
1267 visit (DECL_VINDEX (t));
1268 visit (DECL_FUNCTION_PERSONALITY (t));
1269 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1270 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1271 }
1272
1273 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1274 {
1275 visit (TYPE_SIZE (t));
1276 visit (TYPE_SIZE_UNIT (t));
1277 visit (TYPE_ATTRIBUTES (t));
1278 visit (TYPE_NAME (t));
1279 visit (TYPE_MAIN_VARIANT (t));
1280 if (TYPE_FILE_SCOPE_P (t))
1281 ;
1282 else
1283 visit (TYPE_CONTEXT (t));
1284 visit (TYPE_STUB_DECL (t));
1285 }
1286
1287 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1288 {
1289 if (code == ENUMERAL_TYPE)
1290 visit (TYPE_VALUES (t));
1291 else if (code == ARRAY_TYPE)
1292 visit (TYPE_DOMAIN (t));
1293 else if (RECORD_OR_UNION_TYPE_P (t))
1294 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1295 visit (f);
1296 else if (code == FUNCTION_TYPE
1297 || code == METHOD_TYPE)
1298 visit (TYPE_ARG_TYPES (t));
1299 if (!POINTER_TYPE_P (t))
1300 visit (TYPE_MIN_VALUE_RAW (t));
1301 visit (TYPE_MAX_VALUE_RAW (t));
1302 }
1303
1304 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1305 {
1306 visit (TREE_PURPOSE (t));
1307 visit (TREE_VALUE (t));
1308 visit (TREE_CHAIN (t));
1309 }
1310
1311 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1312 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1313 visit (TREE_VEC_ELT (t, i));
1314
1315 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1316 {
1317 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1318 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1319 visit (TREE_OPERAND (t, i));
1320 }
1321
1322 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1323 {
1324 unsigned i;
1325 tree b;
1326 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1327 visit (b);
1328 visit (BINFO_OFFSET (t));
1329 visit (BINFO_VTABLE (t));
1330 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1331 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1332 by C++ FE only. */
1333 }
1334
1335 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1336 {
1337 unsigned i;
1338 tree index, value;
1339 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1340 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1341 {
1342 visit (index);
1343 visit (value);
1344 }
1345 }
1346
1347 if (code == OMP_CLAUSE)
1348 {
1349 int i;
1350 HOST_WIDE_INT val;
1351
1352 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1353 switch (OMP_CLAUSE_CODE (t))
1354 {
1355 case OMP_CLAUSE_DEFAULT:
1356 val = OMP_CLAUSE_DEFAULT_KIND (t);
1357 break;
1358 case OMP_CLAUSE_SCHEDULE:
1359 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1360 break;
1361 case OMP_CLAUSE_DEPEND:
1362 val = OMP_CLAUSE_DEPEND_KIND (t);
1363 break;
1364 case OMP_CLAUSE_MAP:
1365 val = OMP_CLAUSE_MAP_KIND (t);
1366 break;
1367 case OMP_CLAUSE_PROC_BIND:
1368 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1369 break;
1370 case OMP_CLAUSE_REDUCTION:
1371 val = OMP_CLAUSE_REDUCTION_CODE (t);
1372 break;
1373 default:
1374 val = 0;
1375 break;
1376 }
1377 hstate.add_hwi (val);
1378 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1379 visit (OMP_CLAUSE_OPERAND (t, i));
1380 visit (OMP_CLAUSE_CHAIN (t));
1381 }
1382
1383 return hstate.end ();
1384
1385 #undef visit
1386 }
1387
1388 /* Compare two SCC entries by their hash value for qsorting them. */
1389
1390 int
1391 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1392 {
1393 const scc_entry *p1 = (const scc_entry *) p1_;
1394 const scc_entry *p2 = (const scc_entry *) p2_;
1395 if (p1->hash < p2->hash)
1396 return -1;
1397 else if (p1->hash > p2->hash)
1398 return 1;
1399 return 0;
1400 }
1401
1402 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1403 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1404
1405 hashval_t
1406 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1407 bool ref_p, bool this_ref_p)
1408 {
1409 unsigned int last_classes = 0, iterations = 0;
1410
1411 /* Compute hash values for the SCC members. */
1412 for (unsigned i = 0; i < size; ++i)
1413 sccstack[first+i].hash
1414 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1415
1416 if (size == 1)
1417 return sccstack[first].hash;
1418
1419 /* We aim to get unique hash for every tree within SCC and compute hash value
1420 of the whole SCC by combining all values together in a stable (entry-point
1421 independent) order. This guarantees that the same SCC regions within
1422 different translation units will get the same hash values and therefore
1423 will be merged at WPA time.
1424
1425 Often the hashes are already unique. In that case we compute the SCC hash
1426 by combining individual hash values in an increasing order.
1427
1428 If there are duplicates, we seek at least one tree with unique hash (and
1429 pick one with minimal hash and this property). Then we obtain a stable
1430 order by DFS walk starting from this unique tree and then use the index
1431 within this order to make individual hash values unique.
1432
1433 If there is no tree with unique hash, we iteratively propagate the hash
1434 values across the internal edges of SCC. This usually quickly leads
1435 to unique hashes. Consider, for example, an SCC containing two pointers
1436 that are identical except for the types they point to and assume that
1437 these types are also part of the SCC. The propagation will add the
1438 points-to type information into their hash values. */
1439 do
1440 {
1441 /* Sort the SCC so we can easily check for uniqueness. */
1442 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1443
1444 unsigned int classes = 1;
1445 int firstunique = -1;
1446
1447 /* Find the tree with lowest unique hash (if it exists) and compute
1448 the number of equivalence classes. */
1449 if (sccstack[first].hash != sccstack[first+1].hash)
1450 firstunique = 0;
1451 for (unsigned i = 1; i < size; ++i)
1452 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1453 {
1454 classes++;
1455 if (firstunique == -1
1456 && (i == size - 1
1457 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1458 firstunique = i;
1459 }
1460
1461 /* If we found a tree with unique hash, stop the iteration. */
1462 if (firstunique != -1
1463 /* Also terminate if we run out of iterations or if the number of
1464 equivalence classes is no longer increasing.
1465 For example a cyclic list of trees that are all equivalent will
1466 never have unique entry point; we however do not build such SCCs
1467 in our IL. */
1468 || classes <= last_classes || iterations > 16)
1469 {
1470 hashval_t scc_hash;
1471
1472 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1473 starting from FIRSTUNIQUE to obtain a stable order. */
1474 if (classes != size && firstunique != -1)
1475 {
1476 hash_map <tree, hashval_t> map(size*2);
1477
1478 /* Store hash values into a map, so we can associate them with
1479 the reordered SCC. */
1480 for (unsigned i = 0; i < size; ++i)
1481 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1482
1483 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1484 true);
1485 gcc_assert (again.sccstack.length () == size);
1486
1487 memcpy (sccstack.address () + first,
1488 again.sccstack.address (),
1489 sizeof (scc_entry) * size);
1490
1491 /* Update hash values of individual members by hashing in the
1492 index within the stable order. This ensures uniqueness.
1493 Also compute the SCC hash by mixing in all hash values in
1494 the stable order we obtained. */
1495 sccstack[first].hash = *map.get (sccstack[first].t);
1496 scc_hash = sccstack[first].hash;
1497 for (unsigned i = 1; i < size; ++i)
1498 {
1499 sccstack[first+i].hash
1500 = iterative_hash_hashval_t (i,
1501 *map.get (sccstack[first+i].t));
1502 scc_hash
1503 = iterative_hash_hashval_t (scc_hash,
1504 sccstack[first+i].hash);
1505 }
1506 }
1507 /* If we got a unique hash value for each tree, then sort already
1508 ensured entry-point independent order. Only compute the final
1509 SCC hash.
1510
1511 If we failed to find the unique entry point, we go by the same
1512 route. We will eventually introduce unwanted hash conflicts. */
1513 else
1514 {
1515 scc_hash = sccstack[first].hash;
1516 for (unsigned i = 1; i < size; ++i)
1517 scc_hash
1518 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1519
1520 /* We cannot 100% guarantee that the hash won't conflict so as
1521 to make it impossible to find a unique hash. This however
1522 should be an extremely rare case. ICE for now so possible
1523 issues are found and evaluated. */
1524 gcc_checking_assert (classes == size);
1525 }
1526
1527 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1528 hash into the hash of each element. */
1529 for (unsigned i = 0; i < size; ++i)
1530 sccstack[first+i].hash
1531 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1532 return scc_hash;
1533 }
1534
1535 last_classes = classes;
1536 iterations++;
1537
1538 /* We failed to identify the entry point; propagate hash values across
1539 the edges. */
1540 hash_map <tree, hashval_t> map(size*2);
1541
1542 for (unsigned i = 0; i < size; ++i)
1543 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1544
1545 for (unsigned i = 0; i < size; i++)
1546 sccstack[first+i].hash
1547 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1548 }
1549 while (true);
1550 }
1551
1552 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1553 already in the streamer cache. Main routine called for
1554 each visit of EXPR. */
1555
1556 void
1557 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1558 tree expr, bool ref_p, bool this_ref_p)
1559 {
1560 /* Handle special cases. */
1561 if (expr == NULL_TREE)
1562 return;
1563
1564 /* Do not DFS walk into indexable trees. */
1565 if (this_ref_p && tree_is_indexable (expr))
1566 return;
1567
1568 /* Check if we already streamed EXPR. */
1569 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1570 return;
1571
1572 worklist w;
1573 w.expr = expr;
1574 w.from_state = from_state;
1575 w.cstate = NULL;
1576 w.ref_p = ref_p;
1577 w.this_ref_p = this_ref_p;
1578 worklist_vec.safe_push (w);
1579 }
1580
1581
1582 /* Emit the physical representation of tree node EXPR to output block OB.
1583 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1584 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1585
1586 void
1587 lto_output_tree (struct output_block *ob, tree expr,
1588 bool ref_p, bool this_ref_p)
1589 {
1590 unsigned ix;
1591 bool existed_p;
1592
1593 if (expr == NULL_TREE)
1594 {
1595 streamer_write_record_start (ob, LTO_null);
1596 return;
1597 }
1598
1599 if (this_ref_p && tree_is_indexable (expr))
1600 {
1601 lto_output_tree_ref (ob, expr);
1602 return;
1603 }
1604
1605 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1606 if (existed_p)
1607 {
1608 /* If a node has already been streamed out, make sure that
1609 we don't write it more than once. Otherwise, the reader
1610 will instantiate two different nodes for the same object. */
1611 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1612 streamer_write_uhwi (ob, ix);
1613 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1614 lto_tree_code_to_tag (TREE_CODE (expr)));
1615 lto_stats.num_pickle_refs_output++;
1616 }
1617 else
1618 {
1619 /* This is the first time we see EXPR, write all reachable
1620 trees to OB. */
1621 static bool in_dfs_walk;
1622
1623 /* Protect against recursion which means disconnect between
1624 what tree edges we walk in the DFS walk and what edges
1625 we stream out. */
1626 gcc_assert (!in_dfs_walk);
1627
1628 if (streamer_dump_file)
1629 {
1630 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1631 expr, 4);
1632 fprintf (streamer_dump_file, "\n");
1633 }
1634
1635 /* Start the DFS walk. */
1636 /* Save ob state ... */
1637 /* let's see ... */
1638 in_dfs_walk = true;
1639 DFS (ob, expr, ref_p, this_ref_p, false);
1640 in_dfs_walk = false;
1641
1642 /* Finally append a reference to the tree we were writing.
1643 ??? If expr ended up as a singleton we could have
1644 inlined it here and avoid outputting a reference. */
1645 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1646 gcc_assert (existed_p);
1647 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1648 streamer_write_uhwi (ob, ix);
1649 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1650 lto_tree_code_to_tag (TREE_CODE (expr)));
1651 if (streamer_dump_file)
1652 {
1653 print_node_brief (streamer_dump_file, " Finished SCC of ",
1654 expr, 4);
1655 fprintf (streamer_dump_file, "\n\n");
1656 }
1657 lto_stats.num_pickle_refs_output++;
1658 }
1659 }
1660
1661
1662 /* Output to OB a list of try/catch handlers starting with FIRST. */
1663
1664 static void
1665 output_eh_try_list (struct output_block *ob, eh_catch first)
1666 {
1667 eh_catch n;
1668
1669 for (n = first; n; n = n->next_catch)
1670 {
1671 streamer_write_record_start (ob, LTO_eh_catch);
1672 stream_write_tree (ob, n->type_list, true);
1673 stream_write_tree (ob, n->filter_list, true);
1674 stream_write_tree (ob, n->label, true);
1675 }
1676
1677 streamer_write_record_start (ob, LTO_null);
1678 }
1679
1680
1681 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1682 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1683 detect EH region sharing. */
1684
1685 static void
1686 output_eh_region (struct output_block *ob, eh_region r)
1687 {
1688 enum LTO_tags tag;
1689
1690 if (r == NULL)
1691 {
1692 streamer_write_record_start (ob, LTO_null);
1693 return;
1694 }
1695
1696 if (r->type == ERT_CLEANUP)
1697 tag = LTO_ert_cleanup;
1698 else if (r->type == ERT_TRY)
1699 tag = LTO_ert_try;
1700 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1701 tag = LTO_ert_allowed_exceptions;
1702 else if (r->type == ERT_MUST_NOT_THROW)
1703 tag = LTO_ert_must_not_throw;
1704 else
1705 gcc_unreachable ();
1706
1707 streamer_write_record_start (ob, tag);
1708 streamer_write_hwi (ob, r->index);
1709
1710 if (r->outer)
1711 streamer_write_hwi (ob, r->outer->index);
1712 else
1713 streamer_write_zero (ob);
1714
1715 if (r->inner)
1716 streamer_write_hwi (ob, r->inner->index);
1717 else
1718 streamer_write_zero (ob);
1719
1720 if (r->next_peer)
1721 streamer_write_hwi (ob, r->next_peer->index);
1722 else
1723 streamer_write_zero (ob);
1724
1725 if (r->type == ERT_TRY)
1726 {
1727 output_eh_try_list (ob, r->u.eh_try.first_catch);
1728 }
1729 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1730 {
1731 stream_write_tree (ob, r->u.allowed.type_list, true);
1732 stream_write_tree (ob, r->u.allowed.label, true);
1733 streamer_write_uhwi (ob, r->u.allowed.filter);
1734 }
1735 else if (r->type == ERT_MUST_NOT_THROW)
1736 {
1737 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1738 bitpack_d bp = bitpack_create (ob->main_stream);
1739 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1740 streamer_write_bitpack (&bp);
1741 }
1742
1743 if (r->landing_pads)
1744 streamer_write_hwi (ob, r->landing_pads->index);
1745 else
1746 streamer_write_zero (ob);
1747 }
1748
1749
1750 /* Output landing pad LP to OB. */
1751
1752 static void
1753 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1754 {
1755 if (lp == NULL)
1756 {
1757 streamer_write_record_start (ob, LTO_null);
1758 return;
1759 }
1760
1761 streamer_write_record_start (ob, LTO_eh_landing_pad);
1762 streamer_write_hwi (ob, lp->index);
1763 if (lp->next_lp)
1764 streamer_write_hwi (ob, lp->next_lp->index);
1765 else
1766 streamer_write_zero (ob);
1767
1768 if (lp->region)
1769 streamer_write_hwi (ob, lp->region->index);
1770 else
1771 streamer_write_zero (ob);
1772
1773 stream_write_tree (ob, lp->post_landing_pad, true);
1774 }
1775
1776
1777 /* Output the existing eh_table to OB. */
1778
1779 static void
1780 output_eh_regions (struct output_block *ob, struct function *fn)
1781 {
1782 if (fn->eh && fn->eh->region_tree)
1783 {
1784 unsigned i;
1785 eh_region eh;
1786 eh_landing_pad lp;
1787 tree ttype;
1788
1789 streamer_write_record_start (ob, LTO_eh_table);
1790
1791 /* Emit the index of the root of the EH region tree. */
1792 streamer_write_hwi (ob, fn->eh->region_tree->index);
1793
1794 /* Emit all the EH regions in the region array. */
1795 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1796 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1797 output_eh_region (ob, eh);
1798
1799 /* Emit all landing pads. */
1800 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1801 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1802 output_eh_lp (ob, lp);
1803
1804 /* Emit all the runtime type data. */
1805 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1806 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1807 stream_write_tree (ob, ttype, true);
1808
1809 /* Emit the table of action chains. */
1810 if (targetm.arm_eabi_unwinder)
1811 {
1812 tree t;
1813 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1814 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1815 stream_write_tree (ob, t, true);
1816 }
1817 else
1818 {
1819 uchar c;
1820 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1821 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1822 streamer_write_char_stream (ob->main_stream, c);
1823 }
1824 }
1825
1826 /* The LTO_null either terminates the record or indicates that there
1827 are no eh_records at all. */
1828 streamer_write_record_start (ob, LTO_null);
1829 }
1830
1831
1832 /* Output all of the active ssa names to the ssa_names stream. */
1833
1834 static void
1835 output_ssa_names (struct output_block *ob, struct function *fn)
1836 {
1837 unsigned int i, len;
1838
1839 len = vec_safe_length (SSANAMES (fn));
1840 streamer_write_uhwi (ob, len);
1841
1842 for (i = 1; i < len; i++)
1843 {
1844 tree ptr = (*SSANAMES (fn))[i];
1845
1846 if (ptr == NULL_TREE
1847 || SSA_NAME_IN_FREE_LIST (ptr)
1848 || virtual_operand_p (ptr)
1849 /* Simply skip unreleased SSA names. */
1850 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1851 && (! SSA_NAME_DEF_STMT (ptr)
1852 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1853 continue;
1854
1855 streamer_write_uhwi (ob, i);
1856 streamer_write_char_stream (ob->main_stream,
1857 SSA_NAME_IS_DEFAULT_DEF (ptr));
1858 if (SSA_NAME_VAR (ptr))
1859 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1860 else
1861 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1862 stream_write_tree (ob, TREE_TYPE (ptr), true);
1863 }
1864
1865 streamer_write_zero (ob);
1866 }
1867
1868
1869
1870 /* Output the cfg. */
1871
1872 static void
1873 output_cfg (struct output_block *ob, struct function *fn)
1874 {
1875 struct lto_output_stream *tmp_stream = ob->main_stream;
1876 basic_block bb;
1877
1878 ob->main_stream = ob->cfg_stream;
1879
1880 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1881 profile_status_for_fn (fn));
1882
1883 /* Output the number of the highest basic block. */
1884 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1885
1886 FOR_ALL_BB_FN (bb, fn)
1887 {
1888 edge_iterator ei;
1889 edge e;
1890
1891 streamer_write_hwi (ob, bb->index);
1892
1893 /* Output the successors and the edge flags. */
1894 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1895 FOR_EACH_EDGE (e, ei, bb->succs)
1896 {
1897 streamer_write_uhwi (ob, e->dest->index);
1898 e->probability.stream_out (ob);
1899 streamer_write_uhwi (ob, e->flags);
1900 }
1901 }
1902
1903 streamer_write_hwi (ob, -1);
1904
1905 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1906 while (bb->next_bb)
1907 {
1908 streamer_write_hwi (ob, bb->next_bb->index);
1909 bb = bb->next_bb;
1910 }
1911
1912 streamer_write_hwi (ob, -1);
1913
1914 /* ??? The cfgloop interface is tied to cfun. */
1915 gcc_assert (cfun == fn);
1916
1917 /* Output the number of loops. */
1918 streamer_write_uhwi (ob, number_of_loops (fn));
1919
1920 /* Output each loop, skipping the tree root which has number zero. */
1921 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1922 {
1923 struct loop *loop = get_loop (fn, i);
1924
1925 /* Write the index of the loop header. That's enough to rebuild
1926 the loop tree on the reader side. Stream -1 for an unused
1927 loop entry. */
1928 if (!loop)
1929 {
1930 streamer_write_hwi (ob, -1);
1931 continue;
1932 }
1933 else
1934 streamer_write_hwi (ob, loop->header->index);
1935
1936 /* Write everything copy_loop_info copies. */
1937 streamer_write_enum (ob->main_stream,
1938 loop_estimation, EST_LAST, loop->estimate_state);
1939 streamer_write_hwi (ob, loop->any_upper_bound);
1940 if (loop->any_upper_bound)
1941 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1942 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1943 if (loop->any_likely_upper_bound)
1944 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1945 streamer_write_hwi (ob, loop->any_estimate);
1946 if (loop->any_estimate)
1947 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1948
1949 /* Write OMP SIMD related info. */
1950 streamer_write_hwi (ob, loop->safelen);
1951 streamer_write_hwi (ob, loop->unroll);
1952 streamer_write_hwi (ob, loop->dont_vectorize);
1953 streamer_write_hwi (ob, loop->force_vectorize);
1954 stream_write_tree (ob, loop->simduid, true);
1955 }
1956
1957 ob->main_stream = tmp_stream;
1958 }
1959
1960
1961 /* Create the header in the file using OB. If the section type is for
1962 a function, set FN to the decl for that function. */
1963
1964 void
1965 produce_asm (struct output_block *ob, tree fn)
1966 {
1967 enum lto_section_type section_type = ob->section_type;
1968 struct lto_function_header header;
1969 char *section_name;
1970
1971 if (section_type == LTO_section_function_body)
1972 {
1973 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1974 section_name = lto_get_section_name (section_type, name, NULL);
1975 }
1976 else
1977 section_name = lto_get_section_name (section_type, NULL, NULL);
1978
1979 lto_begin_section (section_name, !flag_wpa);
1980 free (section_name);
1981
1982 /* The entire header is stream computed here. */
1983 memset (&header, 0, sizeof (struct lto_function_header));
1984
1985 /* Write the header. */
1986 header.major_version = LTO_major_version;
1987 header.minor_version = LTO_minor_version;
1988
1989 if (section_type == LTO_section_function_body)
1990 header.cfg_size = ob->cfg_stream->total_size;
1991 header.main_size = ob->main_stream->total_size;
1992 header.string_size = ob->string_stream->total_size;
1993 lto_write_data (&header, sizeof header);
1994
1995 /* Put all of the gimple and the string table out the asm file as a
1996 block of text. */
1997 if (section_type == LTO_section_function_body)
1998 lto_write_stream (ob->cfg_stream);
1999 lto_write_stream (ob->main_stream);
2000 lto_write_stream (ob->string_stream);
2001
2002 lto_end_section ();
2003 }
2004
2005
2006 /* Output the base body of struct function FN using output block OB. */
2007
2008 static void
2009 output_struct_function_base (struct output_block *ob, struct function *fn)
2010 {
2011 struct bitpack_d bp;
2012 unsigned i;
2013 tree t;
2014
2015 /* Output the static chain and non-local goto save area. */
2016 stream_write_tree (ob, fn->static_chain_decl, true);
2017 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2018
2019 /* Output all the local variables in the function. */
2020 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2021 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2022 stream_write_tree (ob, t, true);
2023
2024 /* Output current IL state of the function. */
2025 streamer_write_uhwi (ob, fn->curr_properties);
2026
2027 /* Write all the attributes for FN. */
2028 bp = bitpack_create (ob->main_stream);
2029 bp_pack_value (&bp, fn->is_thunk, 1);
2030 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2031 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2032 bp_pack_value (&bp, fn->returns_struct, 1);
2033 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2034 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2035 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2036 bp_pack_value (&bp, fn->after_inlining, 1);
2037 bp_pack_value (&bp, fn->stdarg, 1);
2038 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2039 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2040 bp_pack_value (&bp, fn->calls_alloca, 1);
2041 bp_pack_value (&bp, fn->calls_setjmp, 1);
2042 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2043 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2044 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2045 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2046 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2047
2048 /* Output the function start and end loci. */
2049 stream_output_location (ob, &bp, fn->function_start_locus);
2050 stream_output_location (ob, &bp, fn->function_end_locus);
2051
2052 streamer_write_bitpack (&bp);
2053 }
2054
2055
2056 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2057
2058 static void
2059 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2060 {
2061 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2062 if (! BLOCK_SUBBLOCKS (root))
2063 leafs.safe_push (root);
2064 else
2065 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2066 }
2067
2068 /* Output the body of function NODE->DECL. */
2069
2070 static void
2071 output_function (struct cgraph_node *node)
2072 {
2073 tree function;
2074 struct function *fn;
2075 basic_block bb;
2076 struct output_block *ob;
2077
2078 if (streamer_dump_file)
2079 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2080 node->name ());
2081
2082 function = node->decl;
2083 fn = DECL_STRUCT_FUNCTION (function);
2084 ob = create_output_block (LTO_section_function_body);
2085
2086 clear_line_info (ob);
2087 ob->symbol = node;
2088
2089 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2090
2091 /* Set current_function_decl and cfun. */
2092 push_cfun (fn);
2093
2094 /* Make string 0 be a NULL string. */
2095 streamer_write_char_stream (ob->string_stream, 0);
2096
2097 streamer_write_record_start (ob, LTO_function);
2098
2099 /* Output decls for parameters and args. */
2100 stream_write_tree (ob, DECL_RESULT (function), true);
2101 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2102
2103 /* Output debug args if available. */
2104 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2105 if (! debugargs)
2106 streamer_write_uhwi (ob, 0);
2107 else
2108 {
2109 streamer_write_uhwi (ob, (*debugargs)->length ());
2110 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2111 stream_write_tree (ob, (**debugargs)[i], true);
2112 }
2113
2114 /* Output DECL_INITIAL for the function, which contains the tree of
2115 lexical scopes. */
2116 stream_write_tree (ob, DECL_INITIAL (function), true);
2117 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2118 collect block tree leafs and stream those. */
2119 auto_vec<tree> block_tree_leafs;
2120 if (DECL_INITIAL (function))
2121 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2122 streamer_write_uhwi (ob, block_tree_leafs.length ());
2123 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2124 stream_write_tree (ob, block_tree_leafs[i], true);
2125
2126 /* We also stream abstract functions where we stream only stuff needed for
2127 debug info. */
2128 if (gimple_has_body_p (function))
2129 {
2130 /* Fixup loops if required to match discovery done in the reader. */
2131 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2132
2133 streamer_write_uhwi (ob, 1);
2134 output_struct_function_base (ob, fn);
2135
2136 /* Output all the SSA names used in the function. */
2137 output_ssa_names (ob, fn);
2138
2139 /* Output any exception handling regions. */
2140 output_eh_regions (ob, fn);
2141
2142
2143 /* We will renumber the statements. The code that does this uses
2144 the same ordering that we use for serializing them so we can use
2145 the same code on the other end and not have to write out the
2146 statement numbers. We do not assign UIDs to PHIs here because
2147 virtual PHIs get re-computed on-the-fly which would make numbers
2148 inconsistent. */
2149 set_gimple_stmt_max_uid (cfun, 0);
2150 FOR_ALL_BB_FN (bb, cfun)
2151 {
2152 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2153 gsi_next (&gsi))
2154 {
2155 gphi *stmt = gsi.phi ();
2156
2157 /* Virtual PHIs are not going to be streamed. */
2158 if (!virtual_operand_p (gimple_phi_result (stmt)))
2159 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2160 }
2161 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2162 gsi_next (&gsi))
2163 {
2164 gimple *stmt = gsi_stmt (gsi);
2165 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2166 }
2167 }
2168 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2169 virtual phis now. */
2170 FOR_ALL_BB_FN (bb, cfun)
2171 {
2172 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2173 gsi_next (&gsi))
2174 {
2175 gphi *stmt = gsi.phi ();
2176 if (virtual_operand_p (gimple_phi_result (stmt)))
2177 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2178 }
2179 }
2180
2181 /* Output the code for the function. */
2182 FOR_ALL_BB_FN (bb, fn)
2183 output_bb (ob, bb, fn);
2184
2185 /* The terminator for this function. */
2186 streamer_write_record_start (ob, LTO_null);
2187
2188 output_cfg (ob, fn);
2189
2190 loop_optimizer_finalize ();
2191 pop_cfun ();
2192 }
2193 else
2194 streamer_write_uhwi (ob, 0);
2195
2196 /* Create a section to hold the pickled output of this function. */
2197 produce_asm (ob, function);
2198
2199 destroy_output_block (ob);
2200 if (streamer_dump_file)
2201 fprintf (streamer_dump_file, "Finished streaming %s\n",
2202 node->name ());
2203 }
2204
2205 /* Output the body of function NODE->DECL. */
2206
2207 static void
2208 output_constructor (struct varpool_node *node)
2209 {
2210 tree var = node->decl;
2211 struct output_block *ob;
2212
2213 if (streamer_dump_file)
2214 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2215 node->name ());
2216
2217 ob = create_output_block (LTO_section_function_body);
2218
2219 clear_line_info (ob);
2220 ob->symbol = node;
2221
2222 /* Make string 0 be a NULL string. */
2223 streamer_write_char_stream (ob->string_stream, 0);
2224
2225 /* Output DECL_INITIAL for the function, which contains the tree of
2226 lexical scopes. */
2227 stream_write_tree (ob, DECL_INITIAL (var), true);
2228
2229 /* Create a section to hold the pickled output of this function. */
2230 produce_asm (ob, var);
2231
2232 destroy_output_block (ob);
2233 if (streamer_dump_file)
2234 fprintf (streamer_dump_file, "Finished streaming %s\n",
2235 node->name ());
2236 }
2237
2238
2239 /* Emit toplevel asms. */
2240
2241 void
2242 lto_output_toplevel_asms (void)
2243 {
2244 struct output_block *ob;
2245 struct asm_node *can;
2246 char *section_name;
2247 struct lto_simple_header_with_strings header;
2248
2249 if (!symtab->first_asm_symbol ())
2250 return;
2251
2252 ob = create_output_block (LTO_section_asm);
2253
2254 /* Make string 0 be a NULL string. */
2255 streamer_write_char_stream (ob->string_stream, 0);
2256
2257 for (can = symtab->first_asm_symbol (); can; can = can->next)
2258 {
2259 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2260 streamer_write_hwi (ob, can->order);
2261 }
2262
2263 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2264
2265 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2266 lto_begin_section (section_name, !flag_wpa);
2267 free (section_name);
2268
2269 /* The entire header stream is computed here. */
2270 memset (&header, 0, sizeof (header));
2271
2272 /* Write the header. */
2273 header.major_version = LTO_major_version;
2274 header.minor_version = LTO_minor_version;
2275
2276 header.main_size = ob->main_stream->total_size;
2277 header.string_size = ob->string_stream->total_size;
2278 lto_write_data (&header, sizeof header);
2279
2280 /* Put all of the gimple and the string table out the asm file as a
2281 block of text. */
2282 lto_write_stream (ob->main_stream);
2283 lto_write_stream (ob->string_stream);
2284
2285 lto_end_section ();
2286
2287 destroy_output_block (ob);
2288 }
2289
2290
2291 /* Copy the function body or variable constructor of NODE without deserializing. */
2292
2293 static void
2294 copy_function_or_variable (struct symtab_node *node)
2295 {
2296 tree function = node->decl;
2297 struct lto_file_decl_data *file_data = node->lto_file_data;
2298 const char *data;
2299 size_t len;
2300 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2301 char *section_name =
2302 lto_get_section_name (LTO_section_function_body, name, NULL);
2303 size_t i, j;
2304 struct lto_in_decl_state *in_state;
2305 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2306
2307 lto_begin_section (section_name, false);
2308 free (section_name);
2309
2310 /* We may have renamed the declaration, e.g., a static function. */
2311 name = lto_get_decl_name_mapping (file_data, name);
2312
2313 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2314 name, &len);
2315 gcc_assert (data);
2316
2317 /* Do a bit copy of the function body. */
2318 lto_write_raw_data (data, len);
2319
2320 /* Copy decls. */
2321 in_state =
2322 lto_get_function_in_decl_state (node->lto_file_data, function);
2323 out_state->compressed = in_state->compressed;
2324 gcc_assert (in_state);
2325
2326 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2327 {
2328 size_t n = vec_safe_length (in_state->streams[i]);
2329 vec<tree, va_gc> *trees = in_state->streams[i];
2330 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2331
2332 /* The out state must have the same indices and the in state.
2333 So just copy the vector. All the encoders in the in state
2334 must be empty where we reach here. */
2335 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2336 encoder->trees.reserve_exact (n);
2337 for (j = 0; j < n; j++)
2338 encoder->trees.safe_push ((*trees)[j]);
2339 }
2340
2341 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2342 data, len);
2343 lto_end_section ();
2344 }
2345
2346 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2347
2348 static tree
2349 wrap_refs (tree *tp, int *ws, void *)
2350 {
2351 tree t = *tp;
2352 if (handled_component_p (t)
2353 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2354 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2355 {
2356 tree decl = TREE_OPERAND (t, 0);
2357 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2358 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2359 build1 (ADDR_EXPR, ptrtype, decl),
2360 build_int_cst (ptrtype, 0));
2361 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2362 *ws = 0;
2363 }
2364 else if (TREE_CODE (t) == CONSTRUCTOR)
2365 ;
2366 else if (!EXPR_P (t))
2367 *ws = 0;
2368 return NULL_TREE;
2369 }
2370
2371 /* Remove functions that are no longer used from offload_funcs, and mark the
2372 remaining ones with DECL_PRESERVE_P. */
2373
2374 static void
2375 prune_offload_funcs (void)
2376 {
2377 if (!offload_funcs)
2378 return;
2379
2380 unsigned ix, ix2;
2381 tree *elem_ptr;
2382 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2383 cgraph_node::get (*elem_ptr) == NULL);
2384
2385 tree fn_decl;
2386 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2387 DECL_PRESERVE_P (fn_decl) = 1;
2388 }
2389
2390 /* Main entry point from the pass manager. */
2391
2392 void
2393 lto_output (void)
2394 {
2395 struct lto_out_decl_state *decl_state;
2396 bitmap output = NULL;
2397 int i, n_nodes;
2398 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2399
2400 prune_offload_funcs ();
2401
2402 if (flag_checking)
2403 output = lto_bitmap_alloc ();
2404
2405 /* Initialize the streamer. */
2406 lto_streamer_init ();
2407
2408 n_nodes = lto_symtab_encoder_size (encoder);
2409 /* Process only the functions with bodies. */
2410 for (i = 0; i < n_nodes; i++)
2411 {
2412 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2413 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2414 {
2415 if (lto_symtab_encoder_encode_body_p (encoder, node)
2416 && !node->alias
2417 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2418 {
2419 if (flag_checking)
2420 {
2421 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2422 bitmap_set_bit (output, DECL_UID (node->decl));
2423 }
2424 decl_state = lto_new_out_decl_state ();
2425 lto_push_out_decl_state (decl_state);
2426 if (gimple_has_body_p (node->decl)
2427 || (!flag_wpa
2428 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2429 /* Thunks have no body but they may be synthetized
2430 at WPA time. */
2431 || DECL_ARGUMENTS (node->decl))
2432 output_function (node);
2433 else
2434 copy_function_or_variable (node);
2435 gcc_assert (lto_get_out_decl_state () == decl_state);
2436 lto_pop_out_decl_state ();
2437 lto_record_function_out_decl_state (node->decl, decl_state);
2438 }
2439 }
2440 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2441 {
2442 /* Wrap symbol references inside the ctor in a type
2443 preserving MEM_REF. */
2444 tree ctor = DECL_INITIAL (node->decl);
2445 if (ctor && !in_lto_p)
2446 walk_tree (&ctor, wrap_refs, NULL, NULL);
2447 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2448 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2449 && !node->alias)
2450 {
2451 timevar_push (TV_IPA_LTO_CTORS_OUT);
2452 if (flag_checking)
2453 {
2454 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2455 bitmap_set_bit (output, DECL_UID (node->decl));
2456 }
2457 decl_state = lto_new_out_decl_state ();
2458 lto_push_out_decl_state (decl_state);
2459 if (DECL_INITIAL (node->decl) != error_mark_node
2460 || (!flag_wpa
2461 && flag_incremental_link != INCREMENTAL_LINK_LTO))
2462 output_constructor (node);
2463 else
2464 copy_function_or_variable (node);
2465 gcc_assert (lto_get_out_decl_state () == decl_state);
2466 lto_pop_out_decl_state ();
2467 lto_record_function_out_decl_state (node->decl, decl_state);
2468 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2469 }
2470 }
2471 }
2472
2473 /* Emit the callgraph after emitting function bodies. This needs to
2474 be done now to make sure that all the statements in every function
2475 have been renumbered so that edges can be associated with call
2476 statements using the statement UIDs. */
2477 output_symtab ();
2478
2479 output_offload_tables ();
2480
2481 #if CHECKING_P
2482 lto_bitmap_free (output);
2483 #endif
2484 }
2485
2486 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2487 from it and required for correct representation of its semantics.
2488 Each node in ENCODER must be a global declaration or a type. A node
2489 is written only once, even if it appears multiple times in the
2490 vector. Certain transitively-reachable nodes, such as those
2491 representing expressions, may be duplicated, but such nodes
2492 must not appear in ENCODER itself. */
2493
2494 static void
2495 write_global_stream (struct output_block *ob,
2496 struct lto_tree_ref_encoder *encoder)
2497 {
2498 tree t;
2499 size_t index;
2500 const size_t size = lto_tree_ref_encoder_size (encoder);
2501
2502 for (index = 0; index < size; index++)
2503 {
2504 t = lto_tree_ref_encoder_get_tree (encoder, index);
2505 if (streamer_dump_file)
2506 {
2507 fprintf (streamer_dump_file, " %i:", (int)index);
2508 print_node_brief (streamer_dump_file, "", t, 4);
2509 fprintf (streamer_dump_file, "\n");
2510 }
2511 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2512 stream_write_tree (ob, t, false);
2513 }
2514 }
2515
2516
2517 /* Write a sequence of indices into the globals vector corresponding
2518 to the trees in ENCODER. These are used by the reader to map the
2519 indices used to refer to global entities within function bodies to
2520 their referents. */
2521
2522 static void
2523 write_global_references (struct output_block *ob,
2524 struct lto_tree_ref_encoder *encoder)
2525 {
2526 tree t;
2527 uint32_t index;
2528 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2529
2530 /* Write size and slot indexes as 32-bit unsigned numbers. */
2531 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2532 data[0] = size;
2533
2534 for (index = 0; index < size; index++)
2535 {
2536 unsigned slot_num;
2537
2538 t = lto_tree_ref_encoder_get_tree (encoder, index);
2539 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2540 gcc_assert (slot_num != (unsigned)-1);
2541 data[index + 1] = slot_num;
2542 }
2543
2544 lto_write_data (data, sizeof (int32_t) * (size + 1));
2545 free (data);
2546 }
2547
2548
2549 /* Write all the streams in an lto_out_decl_state STATE using
2550 output block OB and output stream OUT_STREAM. */
2551
2552 void
2553 lto_output_decl_state_streams (struct output_block *ob,
2554 struct lto_out_decl_state *state)
2555 {
2556 int i;
2557
2558 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2559 write_global_stream (ob, &state->streams[i]);
2560 }
2561
2562
2563 /* Write all the references in an lto_out_decl_state STATE using
2564 output block OB and output stream OUT_STREAM. */
2565
2566 void
2567 lto_output_decl_state_refs (struct output_block *ob,
2568 struct lto_out_decl_state *state)
2569 {
2570 unsigned i;
2571 unsigned ref;
2572 tree decl;
2573
2574 /* Write reference to FUNCTION_DECL. If there is not function,
2575 write reference to void_type_node. */
2576 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2577 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2578 gcc_assert (ref != (unsigned)-1);
2579 ref = ref * 2 + (state->compressed ? 1 : 0);
2580 lto_write_data (&ref, sizeof (uint32_t));
2581
2582 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2583 write_global_references (ob, &state->streams[i]);
2584 }
2585
2586
2587 /* Return the written size of STATE. */
2588
2589 static size_t
2590 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2591 {
2592 int i;
2593 size_t size;
2594
2595 size = sizeof (int32_t); /* fn_ref. */
2596 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2597 {
2598 size += sizeof (int32_t); /* vector size. */
2599 size += (lto_tree_ref_encoder_size (&state->streams[i])
2600 * sizeof (int32_t));
2601 }
2602 return size;
2603 }
2604
2605
2606 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2607 so far. */
2608
2609 static void
2610 write_symbol (struct streamer_tree_cache_d *cache,
2611 tree t, hash_set<const char *> *seen, bool alias)
2612 {
2613 const char *name;
2614 enum gcc_plugin_symbol_kind kind;
2615 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2616 unsigned slot_num;
2617 uint64_t size;
2618 const char *comdat;
2619 unsigned char c;
2620
2621 gcc_checking_assert (TREE_PUBLIC (t)
2622 && !is_builtin_fn (t)
2623 && !DECL_ABSTRACT_P (t)
2624 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2625
2626 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2627
2628 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2629
2630 /* This behaves like assemble_name_raw in varasm.c, performing the
2631 same name manipulations that ASM_OUTPUT_LABELREF does. */
2632 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2633
2634 if (seen->add (name))
2635 return;
2636
2637 streamer_tree_cache_lookup (cache, t, &slot_num);
2638 gcc_assert (slot_num != (unsigned)-1);
2639
2640 if (DECL_EXTERNAL (t))
2641 {
2642 if (DECL_WEAK (t))
2643 kind = GCCPK_WEAKUNDEF;
2644 else
2645 kind = GCCPK_UNDEF;
2646 }
2647 else
2648 {
2649 if (DECL_WEAK (t))
2650 kind = GCCPK_WEAKDEF;
2651 else if (DECL_COMMON (t))
2652 kind = GCCPK_COMMON;
2653 else
2654 kind = GCCPK_DEF;
2655
2656 /* When something is defined, it should have node attached. */
2657 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2658 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2659 || (cgraph_node::get (t)
2660 && cgraph_node::get (t)->definition));
2661 }
2662
2663 /* Imitate what default_elf_asm_output_external do.
2664 When symbol is external, we need to output it with DEFAULT visibility
2665 when compiling with -fvisibility=default, while with HIDDEN visibility
2666 when symbol has attribute (visibility("hidden")) specified.
2667 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2668 right. */
2669
2670 if (DECL_EXTERNAL (t)
2671 && !targetm.binds_local_p (t))
2672 visibility = GCCPV_DEFAULT;
2673 else
2674 switch (DECL_VISIBILITY (t))
2675 {
2676 case VISIBILITY_DEFAULT:
2677 visibility = GCCPV_DEFAULT;
2678 break;
2679 case VISIBILITY_PROTECTED:
2680 visibility = GCCPV_PROTECTED;
2681 break;
2682 case VISIBILITY_HIDDEN:
2683 visibility = GCCPV_HIDDEN;
2684 break;
2685 case VISIBILITY_INTERNAL:
2686 visibility = GCCPV_INTERNAL;
2687 break;
2688 }
2689
2690 if (kind == GCCPK_COMMON
2691 && DECL_SIZE_UNIT (t)
2692 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2693 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2694 else
2695 size = 0;
2696
2697 if (DECL_ONE_ONLY (t))
2698 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2699 else
2700 comdat = "";
2701
2702 lto_write_data (name, strlen (name) + 1);
2703 lto_write_data (comdat, strlen (comdat) + 1);
2704 c = (unsigned char) kind;
2705 lto_write_data (&c, 1);
2706 c = (unsigned char) visibility;
2707 lto_write_data (&c, 1);
2708 lto_write_data (&size, 8);
2709 lto_write_data (&slot_num, 4);
2710 }
2711
2712 /* Write an IL symbol table to OB.
2713 SET and VSET are cgraph/varpool node sets we are outputting. */
2714
2715 static void
2716 produce_symtab (struct output_block *ob)
2717 {
2718 struct streamer_tree_cache_d *cache = ob->writer_cache;
2719 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2720 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2721 lto_symtab_encoder_iterator lsei;
2722
2723 lto_begin_section (section_name, false);
2724 free (section_name);
2725
2726 hash_set<const char *> seen;
2727
2728 /* Write the symbol table.
2729 First write everything defined and then all declarations.
2730 This is necessary to handle cases where we have duplicated symbols. */
2731 for (lsei = lsei_start (encoder);
2732 !lsei_end_p (lsei); lsei_next (&lsei))
2733 {
2734 symtab_node *node = lsei_node (lsei);
2735
2736 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2737 continue;
2738 write_symbol (cache, node->decl, &seen, false);
2739 }
2740 for (lsei = lsei_start (encoder);
2741 !lsei_end_p (lsei); lsei_next (&lsei))
2742 {
2743 symtab_node *node = lsei_node (lsei);
2744
2745 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2746 continue;
2747 write_symbol (cache, node->decl, &seen, false);
2748 }
2749
2750 lto_end_section ();
2751 }
2752
2753
2754 /* Init the streamer_mode_table for output, where we collect info on what
2755 machine_mode values have been streamed. */
2756 void
2757 lto_output_init_mode_table (void)
2758 {
2759 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2760 }
2761
2762
2763 /* Write the mode table. */
2764 static void
2765 lto_write_mode_table (void)
2766 {
2767 struct output_block *ob;
2768 ob = create_output_block (LTO_section_mode_table);
2769 bitpack_d bp = bitpack_create (ob->main_stream);
2770
2771 /* Ensure that for GET_MODE_INNER (m) != m we have
2772 also the inner mode marked. */
2773 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2774 if (streamer_mode_table[i])
2775 {
2776 machine_mode m = (machine_mode) i;
2777 machine_mode inner_m = GET_MODE_INNER (m);
2778 if (inner_m != m)
2779 streamer_mode_table[(int) inner_m] = 1;
2780 }
2781 /* First stream modes that have GET_MODE_INNER (m) == m,
2782 so that we can refer to them afterwards. */
2783 for (int pass = 0; pass < 2; pass++)
2784 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2785 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2786 {
2787 machine_mode m = (machine_mode) i;
2788 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2789 continue;
2790 bp_pack_value (&bp, m, 8);
2791 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2792 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2793 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2794 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2795 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2796 switch (GET_MODE_CLASS (m))
2797 {
2798 case MODE_FRACT:
2799 case MODE_UFRACT:
2800 case MODE_ACCUM:
2801 case MODE_UACCUM:
2802 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2803 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2804 break;
2805 case MODE_FLOAT:
2806 case MODE_DECIMAL_FLOAT:
2807 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2808 break;
2809 default:
2810 break;
2811 }
2812 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2813 }
2814 bp_pack_value (&bp, VOIDmode, 8);
2815
2816 streamer_write_bitpack (&bp);
2817
2818 char *section_name
2819 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2820 lto_begin_section (section_name, !flag_wpa);
2821 free (section_name);
2822
2823 /* The entire header stream is computed here. */
2824 struct lto_simple_header_with_strings header;
2825 memset (&header, 0, sizeof (header));
2826
2827 /* Write the header. */
2828 header.major_version = LTO_major_version;
2829 header.minor_version = LTO_minor_version;
2830
2831 header.main_size = ob->main_stream->total_size;
2832 header.string_size = ob->string_stream->total_size;
2833 lto_write_data (&header, sizeof header);
2834
2835 /* Put all of the gimple and the string table out the asm file as a
2836 block of text. */
2837 lto_write_stream (ob->main_stream);
2838 lto_write_stream (ob->string_stream);
2839
2840 lto_end_section ();
2841 destroy_output_block (ob);
2842 }
2843
2844
2845 /* This pass is run after all of the functions are serialized and all
2846 of the IPA passes have written their serialized forms. This pass
2847 causes the vector of all of the global decls and types used from
2848 this file to be written in to a section that can then be read in to
2849 recover these on other side. */
2850
2851 void
2852 produce_asm_for_decls (void)
2853 {
2854 struct lto_out_decl_state *out_state;
2855 struct lto_out_decl_state *fn_out_state;
2856 struct lto_decl_header header;
2857 char *section_name;
2858 struct output_block *ob;
2859 unsigned idx, num_fns;
2860 size_t decl_state_size;
2861 int32_t num_decl_states;
2862
2863 ob = create_output_block (LTO_section_decls);
2864
2865 memset (&header, 0, sizeof (struct lto_decl_header));
2866
2867 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2868 lto_begin_section (section_name, !flag_wpa);
2869 free (section_name);
2870
2871 /* Make string 0 be a NULL string. */
2872 streamer_write_char_stream (ob->string_stream, 0);
2873
2874 gcc_assert (!alias_pairs);
2875
2876 /* Get rid of the global decl state hash tables to save some memory. */
2877 out_state = lto_get_out_decl_state ();
2878 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2879 if (out_state->streams[i].tree_hash_table)
2880 {
2881 delete out_state->streams[i].tree_hash_table;
2882 out_state->streams[i].tree_hash_table = NULL;
2883 }
2884
2885 /* Write the global symbols. */
2886 if (streamer_dump_file)
2887 fprintf (streamer_dump_file, "Outputting global stream\n");
2888 lto_output_decl_state_streams (ob, out_state);
2889 num_fns = lto_function_decl_states.length ();
2890 for (idx = 0; idx < num_fns; idx++)
2891 {
2892 fn_out_state =
2893 lto_function_decl_states[idx];
2894 if (streamer_dump_file)
2895 fprintf (streamer_dump_file, "Outputting stream for %s\n",
2896 IDENTIFIER_POINTER
2897 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
2898 lto_output_decl_state_streams (ob, fn_out_state);
2899 }
2900
2901 header.major_version = LTO_major_version;
2902 header.minor_version = LTO_minor_version;
2903
2904 /* Currently not used. This field would allow us to preallocate
2905 the globals vector, so that it need not be resized as it is extended. */
2906 header.num_nodes = -1;
2907
2908 /* Compute the total size of all decl out states. */
2909 decl_state_size = sizeof (int32_t);
2910 decl_state_size += lto_out_decl_state_written_size (out_state);
2911 for (idx = 0; idx < num_fns; idx++)
2912 {
2913 fn_out_state =
2914 lto_function_decl_states[idx];
2915 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2916 }
2917 header.decl_state_size = decl_state_size;
2918
2919 header.main_size = ob->main_stream->total_size;
2920 header.string_size = ob->string_stream->total_size;
2921
2922 lto_write_data (&header, sizeof header);
2923
2924 /* Write the main out-decl state, followed by out-decl states of
2925 functions. */
2926 num_decl_states = num_fns + 1;
2927 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2928 lto_output_decl_state_refs (ob, out_state);
2929 for (idx = 0; idx < num_fns; idx++)
2930 {
2931 fn_out_state = lto_function_decl_states[idx];
2932 lto_output_decl_state_refs (ob, fn_out_state);
2933 }
2934
2935 lto_write_stream (ob->main_stream);
2936 lto_write_stream (ob->string_stream);
2937
2938 lto_end_section ();
2939
2940 /* Write the symbol table. It is used by linker to determine dependencies
2941 and thus we can skip it for WPA. */
2942 if (!flag_wpa)
2943 produce_symtab (ob);
2944
2945 /* Write command line opts. */
2946 lto_write_options ();
2947
2948 /* Deallocate memory and clean up. */
2949 for (idx = 0; idx < num_fns; idx++)
2950 {
2951 fn_out_state =
2952 lto_function_decl_states[idx];
2953 lto_delete_out_decl_state (fn_out_state);
2954 }
2955 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2956 lto_function_decl_states.release ();
2957 destroy_output_block (ob);
2958 if (lto_stream_offload_p)
2959 lto_write_mode_table ();
2960 }