Do not update SSA in lto-stremaer-in
[gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46
47 /* Allocator used to hold string slot entries for line map streaming. */
48 static struct object_allocator<struct string_slot> *string_slot_allocator;
49
50 /* The table to hold the file names. */
51 static hash_table<string_slot_hasher> *file_name_hash_table;
52
53 /* This obstack holds file names used in locators. Line map datastructures
54 points here and thus it needs to be kept allocated as long as linemaps
55 exists. */
56 static struct obstack file_name_obstack;
57
58
59 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
60 number of valid tag values to check. */
61
62 void
63 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
64 {
65 va_list ap;
66 int i;
67
68 va_start (ap, ntags);
69 for (i = 0; i < ntags; i++)
70 if ((unsigned) actual == va_arg (ap, unsigned))
71 {
72 va_end (ap);
73 return;
74 }
75
76 va_end (ap);
77 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
78 }
79
80
81 /* Read LENGTH bytes from STREAM to ADDR. */
82
83 void
84 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
85 {
86 size_t i;
87 unsigned char *const buffer = (unsigned char *) addr;
88
89 for (i = 0; i < length; i++)
90 buffer[i] = streamer_read_uchar (ib);
91 }
92
93
94 /* Lookup STRING in file_name_hash_table. If found, return the existing
95 string, otherwise insert STRING as the canonical version. */
96
97 static const char *
98 canon_file_name (const char *string)
99 {
100 string_slot **slot;
101 struct string_slot s_slot;
102 size_t len = strlen (string);
103
104 s_slot.s = string;
105 s_slot.len = len;
106
107 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
108 if (*slot == NULL)
109 {
110 char *saved_string;
111 struct string_slot *new_slot;
112
113 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
114 new_slot = string_slot_allocator->allocate ();
115 memcpy (saved_string, string, len + 1);
116 new_slot->s = saved_string;
117 new_slot->len = len;
118 *slot = new_slot;
119 return saved_string;
120 }
121 else
122 {
123 struct string_slot *old_slot = *slot;
124 return old_slot->s;
125 }
126 }
127
128 /* Pointer to currently alive instance of lto_location_cache. */
129
130 lto_location_cache *lto_location_cache::current_cache;
131
132 /* Sort locations in source order. Start with file from last application. */
133
134 int
135 lto_location_cache::cmp_loc (const void *pa, const void *pb)
136 {
137 const cached_location *a = ((const cached_location *)pa);
138 const cached_location *b = ((const cached_location *)pb);
139 const char *current_file = current_cache->current_file;
140 int current_line = current_cache->current_line;
141
142 if (a->file == current_file && b->file != current_file)
143 return -1;
144 if (a->file != current_file && b->file == current_file)
145 return 1;
146 if (a->file == current_file && b->file == current_file)
147 {
148 if (a->line == current_line && b->line != current_line)
149 return -1;
150 if (a->line != current_line && b->line == current_line)
151 return 1;
152 }
153 if (a->file != b->file)
154 return strcmp (a->file, b->file);
155 if (a->sysp != b->sysp)
156 return a->sysp ? 1 : -1;
157 if (a->line != b->line)
158 return a->line - b->line;
159 return a->col - b->col;
160 }
161
162 /* Apply all changes in location cache. Add locations into linemap and patch
163 trees. */
164
165 bool
166 lto_location_cache::apply_location_cache ()
167 {
168 static const char *prev_file;
169 if (!loc_cache.length ())
170 return false;
171 if (loc_cache.length () > 1)
172 loc_cache.qsort (cmp_loc);
173
174 for (unsigned int i = 0; i < loc_cache.length (); i++)
175 {
176 struct cached_location loc = loc_cache[i];
177
178 if (current_file != loc.file)
179 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
180 loc.sysp, loc.file, loc.line);
181 else if (current_line != loc.line)
182 {
183 int max = loc.col;
184
185 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
186 if (loc.file != loc_cache[j].file
187 || loc.line != loc_cache[j].line)
188 break;
189 else if (max < loc_cache[j].col)
190 max = loc_cache[j].col;
191 linemap_line_start (line_table, loc.line, max + 1);
192 }
193 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
194 if (current_file == loc.file && current_line == loc.line
195 && current_col == loc.col)
196 *loc.loc = current_loc;
197 else
198 current_loc = *loc.loc = linemap_position_for_column (line_table,
199 loc.col);
200 current_line = loc.line;
201 prev_file = current_file = loc.file;
202 current_col = loc.col;
203 }
204 loc_cache.truncate (0);
205 accepted_length = 0;
206 return true;
207 }
208
209 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
210
211 void
212 lto_location_cache::accept_location_cache ()
213 {
214 gcc_assert (current_cache == this);
215 accepted_length = loc_cache.length ();
216 }
217
218 /* Tree merging did suceed; throw away recent changes. */
219
220 void
221 lto_location_cache::revert_location_cache ()
222 {
223 loc_cache.truncate (accepted_length);
224 }
225
226 /* Read a location bitpack from input block IB and either update *LOC directly
227 or add it to the location cache.
228 It is neccesary to call apply_location_cache to get *LOC updated. */
229
230 void
231 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
232 class data_in *data_in)
233 {
234 static const char *stream_file;
235 static int stream_line;
236 static int stream_col;
237 static bool stream_sysp;
238 bool file_change, line_change, column_change;
239
240 gcc_assert (current_cache == this);
241
242 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
243
244 if (*loc < RESERVED_LOCATION_COUNT)
245 return;
246
247 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
248 ICE on it. */
249
250 file_change = bp_unpack_value (bp, 1);
251 line_change = bp_unpack_value (bp, 1);
252 column_change = bp_unpack_value (bp, 1);
253
254 if (file_change)
255 {
256 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
257 stream_sysp = bp_unpack_value (bp, 1);
258 }
259
260 if (line_change)
261 stream_line = bp_unpack_var_len_unsigned (bp);
262
263 if (column_change)
264 stream_col = bp_unpack_var_len_unsigned (bp);
265
266 /* This optimization saves location cache operations druing gimple
267 streaming. */
268
269 if (current_file == stream_file && current_line == stream_line
270 && current_col == stream_col && current_sysp == stream_sysp)
271 {
272 *loc = current_loc;
273 return;
274 }
275
276 struct cached_location entry
277 = {stream_file, loc, stream_line, stream_col, stream_sysp};
278 loc_cache.safe_push (entry);
279 }
280
281 /* Read a location bitpack from input block IB and either update *LOC directly
282 or add it to the location cache.
283 It is neccesary to call apply_location_cache to get *LOC updated. */
284
285 void
286 lto_input_location (location_t *loc, struct bitpack_d *bp,
287 class data_in *data_in)
288 {
289 data_in->location_cache.input_location (loc, bp, data_in);
290 }
291
292 /* Read location and return it instead of going through location caching.
293 This should be used only when the resulting location is not going to be
294 discarded. */
295
296 location_t
297 stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
298 {
299 location_t loc;
300 stream_input_location (&loc, bp, data_in);
301 data_in->location_cache.apply_location_cache ();
302 return loc;
303 }
304
305 /* Read a reference to a tree node from DATA_IN using input block IB.
306 TAG is the expected node that should be found in IB, if TAG belongs
307 to one of the indexable trees, expect to read a reference index to
308 be looked up in one of the symbol tables, otherwise read the pysical
309 representation of the tree using stream_read_tree. FN is the
310 function scope for the read tree. */
311
312 tree
313 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
314 struct function *fn, enum LTO_tags tag)
315 {
316 unsigned HOST_WIDE_INT ix_u;
317 tree result = NULL_TREE;
318
319 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
320
321 switch (tag)
322 {
323 case LTO_type_ref:
324 ix_u = streamer_read_uhwi (ib);
325 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
326 break;
327
328 case LTO_ssa_name_ref:
329 ix_u = streamer_read_uhwi (ib);
330 result = (*SSANAMES (fn))[ix_u];
331 break;
332
333 case LTO_field_decl_ref:
334 ix_u = streamer_read_uhwi (ib);
335 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
336 break;
337
338 case LTO_function_decl_ref:
339 ix_u = streamer_read_uhwi (ib);
340 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
341 break;
342
343 case LTO_type_decl_ref:
344 ix_u = streamer_read_uhwi (ib);
345 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
346 break;
347
348 case LTO_namespace_decl_ref:
349 ix_u = streamer_read_uhwi (ib);
350 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
351 break;
352
353 case LTO_global_decl_ref:
354 case LTO_result_decl_ref:
355 case LTO_const_decl_ref:
356 case LTO_imported_decl_ref:
357 case LTO_label_decl_ref:
358 case LTO_translation_unit_decl_ref:
359 case LTO_namelist_decl_ref:
360 ix_u = streamer_read_uhwi (ib);
361 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
362 break;
363
364 default:
365 gcc_unreachable ();
366 }
367
368 gcc_assert (result);
369
370 return result;
371 }
372
373
374 /* Read and return a double-linked list of catch handlers from input
375 block IB, using descriptors in DATA_IN. */
376
377 static struct eh_catch_d *
378 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
379 eh_catch *last_p)
380 {
381 eh_catch first;
382 enum LTO_tags tag;
383
384 *last_p = first = NULL;
385 tag = streamer_read_record_start (ib);
386 while (tag)
387 {
388 tree list;
389 eh_catch n;
390
391 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
392
393 /* Read the catch node. */
394 n = ggc_cleared_alloc<eh_catch_d> ();
395 n->type_list = stream_read_tree (ib, data_in);
396 n->filter_list = stream_read_tree (ib, data_in);
397 n->label = stream_read_tree (ib, data_in);
398
399 /* Register all the types in N->FILTER_LIST. */
400 for (list = n->filter_list; list; list = TREE_CHAIN (list))
401 add_type_for_runtime (TREE_VALUE (list));
402
403 /* Chain N to the end of the list. */
404 if (*last_p)
405 (*last_p)->next_catch = n;
406 n->prev_catch = *last_p;
407 *last_p = n;
408
409 /* Set the head of the list the first time through the loop. */
410 if (first == NULL)
411 first = n;
412
413 tag = streamer_read_record_start (ib);
414 }
415
416 return first;
417 }
418
419
420 /* Read and return EH region IX from input block IB, using descriptors
421 in DATA_IN. */
422
423 static eh_region
424 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
425 {
426 enum LTO_tags tag;
427 eh_region r;
428
429 /* Read the region header. */
430 tag = streamer_read_record_start (ib);
431 if (tag == LTO_null)
432 return NULL;
433
434 r = ggc_cleared_alloc<eh_region_d> ();
435 r->index = streamer_read_hwi (ib);
436
437 gcc_assert (r->index == ix);
438
439 /* Read all the region pointers as region numbers. We'll fix up
440 the pointers once the whole array has been read. */
441 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
442 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
443 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444
445 switch (tag)
446 {
447 case LTO_ert_cleanup:
448 r->type = ERT_CLEANUP;
449 break;
450
451 case LTO_ert_try:
452 {
453 struct eh_catch_d *last_catch;
454 r->type = ERT_TRY;
455 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
456 &last_catch);
457 r->u.eh_try.last_catch = last_catch;
458 break;
459 }
460
461 case LTO_ert_allowed_exceptions:
462 {
463 tree l;
464
465 r->type = ERT_ALLOWED_EXCEPTIONS;
466 r->u.allowed.type_list = stream_read_tree (ib, data_in);
467 r->u.allowed.label = stream_read_tree (ib, data_in);
468 r->u.allowed.filter = streamer_read_uhwi (ib);
469
470 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
471 add_type_for_runtime (TREE_VALUE (l));
472 }
473 break;
474
475 case LTO_ert_must_not_throw:
476 {
477 r->type = ERT_MUST_NOT_THROW;
478 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
479 bitpack_d bp = streamer_read_bitpack (ib);
480 r->u.must_not_throw.failure_loc
481 = stream_input_location_now (&bp, data_in);
482 }
483 break;
484
485 default:
486 gcc_unreachable ();
487 }
488
489 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
490
491 return r;
492 }
493
494
495 /* Read and return EH landing pad IX from input block IB, using descriptors
496 in DATA_IN. */
497
498 static eh_landing_pad
499 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
500 {
501 enum LTO_tags tag;
502 eh_landing_pad lp;
503
504 /* Read the landing pad header. */
505 tag = streamer_read_record_start (ib);
506 if (tag == LTO_null)
507 return NULL;
508
509 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
510
511 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
512 lp->index = streamer_read_hwi (ib);
513 gcc_assert (lp->index == ix);
514 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
515 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
516 lp->post_landing_pad = stream_read_tree (ib, data_in);
517
518 return lp;
519 }
520
521
522 /* After reading the EH regions, pointers to peer and children regions
523 are region numbers. This converts all these region numbers into
524 real pointers into the rematerialized regions for FN. ROOT_REGION
525 is the region number for the root EH region in FN. */
526
527 static void
528 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
529 {
530 unsigned i;
531 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
532 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
533 eh_region r;
534 eh_landing_pad lp;
535
536 gcc_assert (eh_array && lp_array);
537
538 gcc_assert (root_region >= 0);
539 fn->eh->region_tree = (*eh_array)[root_region];
540
541 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
542 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
543
544 /* Convert all the index numbers stored in pointer fields into
545 pointers to the corresponding slots in the EH region array. */
546 FOR_EACH_VEC_ELT (*eh_array, i, r)
547 {
548 /* The array may contain NULL regions. */
549 if (r == NULL)
550 continue;
551
552 gcc_assert (i == (unsigned) r->index);
553 FIXUP_EH_REGION (r->outer);
554 FIXUP_EH_REGION (r->inner);
555 FIXUP_EH_REGION (r->next_peer);
556 FIXUP_EH_LP (r->landing_pads);
557 }
558
559 /* Convert all the index numbers stored in pointer fields into
560 pointers to the corresponding slots in the EH landing pad array. */
561 FOR_EACH_VEC_ELT (*lp_array, i, lp)
562 {
563 /* The array may contain NULL landing pads. */
564 if (lp == NULL)
565 continue;
566
567 gcc_assert (i == (unsigned) lp->index);
568 FIXUP_EH_LP (lp->next_lp);
569 FIXUP_EH_REGION (lp->region);
570 }
571
572 #undef FIXUP_EH_REGION
573 #undef FIXUP_EH_LP
574 }
575
576
577 /* Initialize EH support. */
578
579 void
580 lto_init_eh (void)
581 {
582 static bool eh_initialized_p = false;
583
584 if (eh_initialized_p)
585 return;
586
587 /* Contrary to most other FEs, we only initialize EH support when at
588 least one of the files in the set contains exception regions in
589 it. Since this happens much later than the call to init_eh in
590 lang_dependent_init, we have to set flag_exceptions and call
591 init_eh again to initialize the EH tables. */
592 flag_exceptions = 1;
593 init_eh ();
594
595 eh_initialized_p = true;
596 }
597
598
599 /* Read the exception table for FN from IB using the data descriptors
600 in DATA_IN. */
601
602 static void
603 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
604 struct function *fn)
605 {
606 HOST_WIDE_INT i, root_region, len;
607 enum LTO_tags tag;
608
609 tag = streamer_read_record_start (ib);
610 if (tag == LTO_null)
611 return;
612
613 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
614
615 gcc_assert (fn->eh);
616
617 root_region = streamer_read_hwi (ib);
618 gcc_assert (root_region == (int) root_region);
619
620 /* Read the EH region array. */
621 len = streamer_read_hwi (ib);
622 gcc_assert (len == (int) len);
623 if (len > 0)
624 {
625 vec_safe_grow_cleared (fn->eh->region_array, len);
626 for (i = 0; i < len; i++)
627 {
628 eh_region r = input_eh_region (ib, data_in, i);
629 (*fn->eh->region_array)[i] = r;
630 }
631 }
632
633 /* Read the landing pads. */
634 len = streamer_read_hwi (ib);
635 gcc_assert (len == (int) len);
636 if (len > 0)
637 {
638 vec_safe_grow_cleared (fn->eh->lp_array, len);
639 for (i = 0; i < len; i++)
640 {
641 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
642 (*fn->eh->lp_array)[i] = lp;
643 }
644 }
645
646 /* Read the runtime type data. */
647 len = streamer_read_hwi (ib);
648 gcc_assert (len == (int) len);
649 if (len > 0)
650 {
651 vec_safe_grow_cleared (fn->eh->ttype_data, len);
652 for (i = 0; i < len; i++)
653 {
654 tree ttype = stream_read_tree (ib, data_in);
655 (*fn->eh->ttype_data)[i] = ttype;
656 }
657 }
658
659 /* Read the table of action chains. */
660 len = streamer_read_hwi (ib);
661 gcc_assert (len == (int) len);
662 if (len > 0)
663 {
664 if (targetm.arm_eabi_unwinder)
665 {
666 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
667 for (i = 0; i < len; i++)
668 {
669 tree t = stream_read_tree (ib, data_in);
670 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
671 }
672 }
673 else
674 {
675 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
676 for (i = 0; i < len; i++)
677 {
678 uchar c = streamer_read_uchar (ib);
679 (*fn->eh->ehspec_data.other)[i] = c;
680 }
681 }
682 }
683
684 /* Reconstruct the EH region tree by fixing up the peer/children
685 pointers. */
686 fixup_eh_region_pointers (fn, root_region);
687
688 tag = streamer_read_record_start (ib);
689 lto_tag_check_range (tag, LTO_null, LTO_null);
690 }
691
692
693 /* Make a new basic block with index INDEX in function FN. */
694
695 static basic_block
696 make_new_block (struct function *fn, unsigned int index)
697 {
698 basic_block bb = alloc_block ();
699 bb->index = index;
700 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
701 n_basic_blocks_for_fn (fn)++;
702 return bb;
703 }
704
705
706 /* Read the CFG for function FN from input block IB. */
707
708 static void
709 input_cfg (class lto_input_block *ib, class data_in *data_in,
710 struct function *fn)
711 {
712 unsigned int bb_count;
713 basic_block p_bb;
714 unsigned int i;
715 int index;
716
717 init_empty_tree_cfg_for_function (fn);
718 init_ssa_operands (fn);
719
720 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
721 PROFILE_LAST);
722
723 bb_count = streamer_read_uhwi (ib);
724
725 last_basic_block_for_fn (fn) = bb_count;
726 if (bb_count > basic_block_info_for_fn (fn)->length ())
727 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
728
729 if (bb_count > label_to_block_map_for_fn (fn)->length ())
730 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
731
732 index = streamer_read_hwi (ib);
733 while (index != -1)
734 {
735 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
736 unsigned int edge_count;
737
738 if (bb == NULL)
739 bb = make_new_block (fn, index);
740
741 edge_count = streamer_read_uhwi (ib);
742
743 /* Connect up the CFG. */
744 for (i = 0; i < edge_count; i++)
745 {
746 unsigned int dest_index;
747 unsigned int edge_flags;
748 basic_block dest;
749 profile_probability probability;
750 edge e;
751
752 dest_index = streamer_read_uhwi (ib);
753 probability = profile_probability::stream_in (ib);
754 edge_flags = streamer_read_uhwi (ib);
755
756 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
757
758 if (dest == NULL)
759 dest = make_new_block (fn, dest_index);
760
761 e = make_edge (bb, dest, edge_flags);
762 e->probability = probability;
763 }
764
765 index = streamer_read_hwi (ib);
766 }
767
768 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
769 index = streamer_read_hwi (ib);
770 while (index != -1)
771 {
772 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
773 bb->prev_bb = p_bb;
774 p_bb->next_bb = bb;
775 p_bb = bb;
776 index = streamer_read_hwi (ib);
777 }
778
779 /* ??? The cfgloop interface is tied to cfun. */
780 gcc_assert (cfun == fn);
781
782 /* Input the loop tree. */
783 unsigned n_loops = streamer_read_uhwi (ib);
784 if (n_loops == 0)
785 return;
786
787 struct loops *loops = ggc_cleared_alloc<struct loops> ();
788 init_loops_structure (fn, loops, n_loops);
789 set_loops_for_fn (fn, loops);
790
791 /* Input each loop and associate it with its loop header so
792 flow_loops_find can rebuild the loop tree. */
793 for (unsigned i = 1; i < n_loops; ++i)
794 {
795 int header_index = streamer_read_hwi (ib);
796 if (header_index == -1)
797 {
798 loops->larray->quick_push (NULL);
799 continue;
800 }
801
802 class loop *loop = alloc_loop ();
803 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
804 loop->header->loop_father = loop;
805
806 /* Read everything copy_loop_info copies. */
807 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
808 loop->any_upper_bound = streamer_read_hwi (ib);
809 if (loop->any_upper_bound)
810 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
811 loop->any_likely_upper_bound = streamer_read_hwi (ib);
812 if (loop->any_likely_upper_bound)
813 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
814 loop->any_estimate = streamer_read_hwi (ib);
815 if (loop->any_estimate)
816 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
817
818 /* Read OMP SIMD related info. */
819 loop->safelen = streamer_read_hwi (ib);
820 loop->unroll = streamer_read_hwi (ib);
821 loop->owned_clique = streamer_read_hwi (ib);
822 loop->dont_vectorize = streamer_read_hwi (ib);
823 loop->force_vectorize = streamer_read_hwi (ib);
824 loop->simduid = stream_read_tree (ib, data_in);
825
826 place_new_loop (fn, loop);
827
828 /* flow_loops_find doesn't like loops not in the tree, hook them
829 all as siblings of the tree root temporarily. */
830 flow_loop_tree_node_add (loops->tree_root, loop);
831 }
832
833 /* Rebuild the loop tree. */
834 flow_loops_find (loops);
835 }
836
837
838 /* Read the SSA names array for function FN from DATA_IN using input
839 block IB. */
840
841 static void
842 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
843 struct function *fn)
844 {
845 unsigned int i, size;
846
847 size = streamer_read_uhwi (ib);
848 init_ssanames (fn, size);
849
850 i = streamer_read_uhwi (ib);
851 while (i)
852 {
853 tree ssa_name, name;
854 bool is_default_def;
855
856 /* Skip over the elements that had been freed. */
857 while (SSANAMES (fn)->length () < i)
858 SSANAMES (fn)->quick_push (NULL_TREE);
859
860 is_default_def = (streamer_read_uchar (ib) != 0);
861 name = stream_read_tree (ib, data_in);
862 ssa_name = make_ssa_name_fn (fn, name, NULL);
863
864 if (is_default_def)
865 {
866 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
867 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
868 }
869
870 i = streamer_read_uhwi (ib);
871 }
872 }
873
874
875 /* Go through all NODE edges and fixup call_stmt pointers
876 so they point to STMTS. */
877
878 static void
879 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
880 struct function *fn)
881 {
882 #define STMT_UID_NOT_IN_RANGE(uid) \
883 (gimple_stmt_max_uid (fn) < uid || uid == 0)
884
885 struct cgraph_edge *cedge;
886 struct ipa_ref *ref = NULL;
887 unsigned int i;
888
889 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
890 {
891 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
892 fatal_error (input_location,
893 "Cgraph edge statement index out of range");
894 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
895 if (!cedge->call_stmt)
896 fatal_error (input_location,
897 "Cgraph edge statement index not found");
898 }
899 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
900 {
901 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
902 fatal_error (input_location,
903 "Cgraph edge statement index out of range");
904 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
905 if (!cedge->call_stmt)
906 fatal_error (input_location, "Cgraph edge statement index not found");
907 }
908 for (i = 0; node->iterate_reference (i, ref); i++)
909 if (ref->lto_stmt_uid)
910 {
911 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
912 fatal_error (input_location,
913 "Reference statement index out of range");
914 ref->stmt = stmts[ref->lto_stmt_uid - 1];
915 if (!ref->stmt)
916 fatal_error (input_location, "Reference statement index not found");
917 }
918 }
919
920
921 /* Fixup call_stmt pointers in NODE and all clones. */
922
923 static void
924 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
925 {
926 struct cgraph_node *node;
927 struct function *fn;
928
929 while (orig->clone_of)
930 orig = orig->clone_of;
931 fn = DECL_STRUCT_FUNCTION (orig->decl);
932
933 if (!orig->thunk.thunk_p)
934 fixup_call_stmt_edges_1 (orig, stmts, fn);
935 if (orig->clones)
936 for (node = orig->clones; node != orig;)
937 {
938 if (!node->thunk.thunk_p)
939 fixup_call_stmt_edges_1 (node, stmts, fn);
940 if (node->clones)
941 node = node->clones;
942 else if (node->next_sibling_clone)
943 node = node->next_sibling_clone;
944 else
945 {
946 while (node != orig && !node->next_sibling_clone)
947 node = node->clone_of;
948 if (node != orig)
949 node = node->next_sibling_clone;
950 }
951 }
952 }
953
954
955 /* Input the base body of struct function FN from DATA_IN
956 using input block IB. */
957
958 static void
959 input_struct_function_base (struct function *fn, class data_in *data_in,
960 class lto_input_block *ib)
961 {
962 struct bitpack_d bp;
963 int len;
964
965 /* Read the static chain and non-local goto save area. */
966 fn->static_chain_decl = stream_read_tree (ib, data_in);
967 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
968
969 /* Read all the local symbols. */
970 len = streamer_read_hwi (ib);
971 if (len > 0)
972 {
973 int i;
974 vec_safe_grow_cleared (fn->local_decls, len);
975 for (i = 0; i < len; i++)
976 {
977 tree t = stream_read_tree (ib, data_in);
978 (*fn->local_decls)[i] = t;
979 }
980 }
981
982 /* Input the current IL state of the function. */
983 fn->curr_properties = streamer_read_uhwi (ib);
984
985 /* Read all the attributes for FN. */
986 bp = streamer_read_bitpack (ib);
987 fn->is_thunk = bp_unpack_value (&bp, 1);
988 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
989 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
990 fn->returns_struct = bp_unpack_value (&bp, 1);
991 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
992 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
993 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
994 fn->after_inlining = bp_unpack_value (&bp, 1);
995 fn->stdarg = bp_unpack_value (&bp, 1);
996 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
997 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
998 fn->calls_alloca = bp_unpack_value (&bp, 1);
999 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1000 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1001 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1002 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1003 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1004 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1005 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1006
1007 /* Input the function start and end loci. */
1008 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1009 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1010
1011 /* Restore the instance discriminators if present. */
1012 int instance_number = bp_unpack_value (&bp, 1);
1013 if (instance_number)
1014 {
1015 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1016 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1017 }
1018 }
1019
1020
1021 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1022
1023 static void
1024 input_function (tree fn_decl, class data_in *data_in,
1025 class lto_input_block *ib, class lto_input_block *ib_cfg)
1026 {
1027 struct function *fn;
1028 enum LTO_tags tag;
1029 gimple **stmts;
1030 basic_block bb;
1031 struct cgraph_node *node;
1032
1033 tag = streamer_read_record_start (ib);
1034 lto_tag_check (tag, LTO_function);
1035
1036 /* Read decls for parameters and args. */
1037 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1038 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1039
1040 /* Read debug args if available. */
1041 unsigned n_debugargs = streamer_read_uhwi (ib);
1042 if (n_debugargs)
1043 {
1044 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1045 vec_safe_grow (*debugargs, n_debugargs);
1046 for (unsigned i = 0; i < n_debugargs; ++i)
1047 (**debugargs)[i] = stream_read_tree (ib, data_in);
1048 }
1049
1050 /* Read the tree of lexical scopes for the function. */
1051 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1052 unsigned block_leaf_count = streamer_read_uhwi (ib);
1053 while (block_leaf_count--)
1054 stream_read_tree (ib, data_in);
1055
1056 if (!streamer_read_uhwi (ib))
1057 return;
1058
1059 push_struct_function (fn_decl);
1060 fn = DECL_STRUCT_FUNCTION (fn_decl);
1061 init_tree_ssa (fn);
1062 /* We input IL in SSA form. */
1063 cfun->gimple_df->in_ssa_p = true;
1064
1065 gimple_register_cfg_hooks ();
1066
1067 node = cgraph_node::get (fn_decl);
1068 if (!node)
1069 node = cgraph_node::create (fn_decl);
1070 input_struct_function_base (fn, data_in, ib);
1071 input_cfg (ib_cfg, data_in, fn);
1072
1073 /* Read all the SSA names. */
1074 input_ssa_names (ib, data_in, fn);
1075
1076 /* Read the exception handling regions in the function. */
1077 input_eh_regions (ib, data_in, fn);
1078
1079 gcc_assert (DECL_INITIAL (fn_decl));
1080 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1081
1082 /* Read all the basic blocks. */
1083 tag = streamer_read_record_start (ib);
1084 while (tag)
1085 {
1086 input_bb (ib, tag, data_in, fn,
1087 node->count_materialization_scale);
1088 tag = streamer_read_record_start (ib);
1089 }
1090
1091 /* Fix up the call statements that are mentioned in the callgraph
1092 edges. */
1093 set_gimple_stmt_max_uid (cfun, 0);
1094 FOR_ALL_BB_FN (bb, cfun)
1095 {
1096 gimple_stmt_iterator gsi;
1097 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1098 {
1099 gimple *stmt = gsi_stmt (gsi);
1100 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1101 }
1102 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1103 {
1104 gimple *stmt = gsi_stmt (gsi);
1105 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1106 }
1107 }
1108 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1109 FOR_ALL_BB_FN (bb, cfun)
1110 {
1111 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1112 while (!gsi_end_p (bsi))
1113 {
1114 gimple *stmt = gsi_stmt (bsi);
1115 gsi_next (&bsi);
1116 stmts[gimple_uid (stmt)] = stmt;
1117 }
1118 bsi = gsi_start_bb (bb);
1119 while (!gsi_end_p (bsi))
1120 {
1121 gimple *stmt = gsi_stmt (bsi);
1122 bool remove = false;
1123 /* If we're recompiling LTO objects with debug stmts but
1124 we're not supposed to have debug stmts, remove them now.
1125 We can't remove them earlier because this would cause uid
1126 mismatches in fixups, but we can do it at this point, as
1127 long as debug stmts don't require fixups.
1128 Similarly remove all IFN_*SAN_* internal calls */
1129 if (!flag_wpa)
1130 {
1131 if (is_gimple_debug (stmt)
1132 && (gimple_debug_nonbind_marker_p (stmt)
1133 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1134 : !MAY_HAVE_DEBUG_BIND_STMTS))
1135 remove = true;
1136 /* In case the linemap overflows locations can be dropped
1137 to zero. Thus do not keep nonsensical inline entry markers
1138 we'd later ICE on. */
1139 tree block;
1140 if (gimple_debug_inline_entry_p (stmt)
1141 && (block = gimple_block (stmt))
1142 && !inlined_function_outer_scope_p (block))
1143 remove = true;
1144 if (is_gimple_call (stmt)
1145 && gimple_call_internal_p (stmt))
1146 {
1147 bool replace = false;
1148 switch (gimple_call_internal_fn (stmt))
1149 {
1150 case IFN_UBSAN_NULL:
1151 if ((flag_sanitize
1152 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1153 replace = true;
1154 break;
1155 case IFN_UBSAN_BOUNDS:
1156 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1157 replace = true;
1158 break;
1159 case IFN_UBSAN_VPTR:
1160 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1161 replace = true;
1162 break;
1163 case IFN_UBSAN_OBJECT_SIZE:
1164 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1165 replace = true;
1166 break;
1167 case IFN_UBSAN_PTR:
1168 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1169 replace = true;
1170 break;
1171 case IFN_ASAN_MARK:
1172 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1173 replace = true;
1174 break;
1175 case IFN_TSAN_FUNC_EXIT:
1176 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1177 replace = true;
1178 break;
1179 default:
1180 break;
1181 }
1182 if (replace)
1183 {
1184 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1185 IFN_NOP);
1186 update_stmt (stmt);
1187 }
1188 }
1189 }
1190 if (remove)
1191 {
1192 gimple_stmt_iterator gsi = bsi;
1193 gsi_next (&bsi);
1194 unlink_stmt_vdef (stmt);
1195 release_defs (stmt);
1196 gsi_remove (&gsi, true);
1197 }
1198 else
1199 {
1200 gsi_next (&bsi);
1201 stmts[gimple_uid (stmt)] = stmt;
1202
1203 /* Remember that the input function has begin stmt
1204 markers, so that we know to expect them when emitting
1205 debug info. */
1206 if (!cfun->debug_nonbind_markers
1207 && gimple_debug_nonbind_marker_p (stmt))
1208 cfun->debug_nonbind_markers = true;
1209 }
1210 }
1211 }
1212
1213 /* Set the gimple body to the statement sequence in the entry
1214 basic block. FIXME lto, this is fairly hacky. The existence
1215 of a gimple body is used by the cgraph routines, but we should
1216 really use the presence of the CFG. */
1217 {
1218 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1219 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1220 }
1221
1222 update_max_bb_count ();
1223 fixup_call_stmt_edges (node, stmts);
1224 execute_all_ipa_stmt_fixups (node, stmts);
1225
1226 free_dominance_info (CDI_DOMINATORS);
1227 free_dominance_info (CDI_POST_DOMINATORS);
1228 free (stmts);
1229 pop_cfun ();
1230 }
1231
1232 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1233
1234 static void
1235 input_constructor (tree var, class data_in *data_in,
1236 class lto_input_block *ib)
1237 {
1238 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1239 }
1240
1241
1242 /* Read the body from DATA for function NODE and fill it in.
1243 FILE_DATA are the global decls and types. SECTION_TYPE is either
1244 LTO_section_function_body or LTO_section_static_initializer. If
1245 section type is LTO_section_function_body, FN must be the decl for
1246 that function. */
1247
1248 static void
1249 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1250 const char *data, enum lto_section_type section_type)
1251 {
1252 const struct lto_function_header *header;
1253 class data_in *data_in;
1254 int cfg_offset;
1255 int main_offset;
1256 int string_offset;
1257 tree fn_decl = node->decl;
1258
1259 header = (const struct lto_function_header *) data;
1260 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1261 {
1262 cfg_offset = sizeof (struct lto_function_header);
1263 main_offset = cfg_offset + header->cfg_size;
1264 string_offset = main_offset + header->main_size;
1265 }
1266 else
1267 {
1268 main_offset = sizeof (struct lto_function_header);
1269 string_offset = main_offset + header->main_size;
1270 }
1271
1272 data_in = lto_data_in_create (file_data, data + string_offset,
1273 header->string_size, vNULL);
1274
1275 if (section_type == LTO_section_function_body)
1276 {
1277 struct lto_in_decl_state *decl_state;
1278 unsigned from;
1279
1280 gcc_checking_assert (node);
1281
1282 /* Use the function's decl state. */
1283 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1284 gcc_assert (decl_state);
1285 file_data->current_decl_state = decl_state;
1286
1287
1288 /* Set up the struct function. */
1289 from = data_in->reader_cache->nodes.length ();
1290 lto_input_block ib_main (data + main_offset, header->main_size,
1291 file_data->mode_table);
1292 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1293 {
1294 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1295 file_data->mode_table);
1296 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1297 }
1298 else
1299 input_constructor (fn_decl, data_in, &ib_main);
1300 data_in->location_cache.apply_location_cache ();
1301 /* And fixup types we streamed locally. */
1302 {
1303 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1304 unsigned len = cache->nodes.length ();
1305 unsigned i;
1306 for (i = len; i-- > from;)
1307 {
1308 tree t = streamer_tree_cache_get_tree (cache, i);
1309 if (t == NULL_TREE)
1310 continue;
1311
1312 if (TYPE_P (t))
1313 {
1314 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1315 if (type_with_alias_set_p (t)
1316 && canonical_type_used_p (t))
1317 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1318 if (TYPE_MAIN_VARIANT (t) != t)
1319 {
1320 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1321 TYPE_NEXT_VARIANT (t)
1322 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1323 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1324 }
1325 }
1326 }
1327 }
1328
1329 /* Restore decl state */
1330 file_data->current_decl_state = file_data->global_decl_state;
1331 }
1332
1333 lto_data_in_delete (data_in);
1334 }
1335
1336
1337 /* Read the body of NODE using DATA. FILE_DATA holds the global
1338 decls and types. */
1339
1340 void
1341 lto_input_function_body (struct lto_file_decl_data *file_data,
1342 struct cgraph_node *node, const char *data)
1343 {
1344 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1345 }
1346
1347 /* Read the body of NODE using DATA. FILE_DATA holds the global
1348 decls and types. */
1349
1350 void
1351 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1352 struct varpool_node *node, const char *data)
1353 {
1354 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1355 }
1356
1357
1358 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1359 are only applied to prevailing tree nodes during tree merging. */
1360 vec<dref_entry> dref_queue;
1361
1362 /* Read the physical representation of a tree node EXPR from
1363 input block IB using the per-file context in DATA_IN. */
1364
1365 static void
1366 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1367 {
1368 /* Read all the bitfield values in EXPR. Note that for LTO, we
1369 only write language-independent bitfields, so no more unpacking is
1370 needed. */
1371 streamer_read_tree_bitfields (ib, data_in, expr);
1372
1373 /* Read all the pointer fields in EXPR. */
1374 streamer_read_tree_body (ib, data_in, expr);
1375
1376 /* Read any LTO-specific data not read by the tree streamer. */
1377 if (DECL_P (expr)
1378 && TREE_CODE (expr) != FUNCTION_DECL
1379 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1380 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1381
1382 /* Stream references to early generated DIEs. Keep in sync with the
1383 trees handled in dwarf2out_register_external_die. */
1384 if ((DECL_P (expr)
1385 && TREE_CODE (expr) != FIELD_DECL
1386 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1387 && TREE_CODE (expr) != TYPE_DECL)
1388 || TREE_CODE (expr) == BLOCK)
1389 {
1390 const char *str = streamer_read_string (data_in, ib);
1391 if (str)
1392 {
1393 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1394 dref_entry e = { expr, str, off };
1395 dref_queue.safe_push (e);
1396 }
1397 }
1398 }
1399
1400 /* Read the physical representation of a tree node with tag TAG from
1401 input block IB using the per-file context in DATA_IN. */
1402
1403 static tree
1404 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1405 enum LTO_tags tag, hashval_t hash)
1406 {
1407 /* Instantiate a new tree node. */
1408 tree result = streamer_alloc_tree (ib, data_in, tag);
1409
1410 /* Enter RESULT in the reader cache. This will make RESULT
1411 available so that circular references in the rest of the tree
1412 structure can be resolved in subsequent calls to stream_read_tree. */
1413 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1414
1415 lto_read_tree_1 (ib, data_in, result);
1416
1417 /* end_marker = */ streamer_read_uchar (ib);
1418
1419 return result;
1420 }
1421
1422
1423 /* Populate the reader cache with trees materialized from the SCC
1424 following in the IB, DATA_IN stream. */
1425
1426 hashval_t
1427 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1428 unsigned *len, unsigned *entry_len)
1429 {
1430 /* A blob of unnamed tree nodes, fill the cache from it and
1431 recurse. */
1432 unsigned size = streamer_read_uhwi (ib);
1433 hashval_t scc_hash = streamer_read_uhwi (ib);
1434 unsigned scc_entry_len = 1;
1435
1436 if (size == 1)
1437 {
1438 enum LTO_tags tag = streamer_read_record_start (ib);
1439 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1440 }
1441 else
1442 {
1443 unsigned int first = data_in->reader_cache->nodes.length ();
1444 tree result;
1445
1446 scc_entry_len = streamer_read_uhwi (ib);
1447
1448 /* Materialize size trees by reading their headers. */
1449 for (unsigned i = 0; i < size; ++i)
1450 {
1451 enum LTO_tags tag = streamer_read_record_start (ib);
1452 if (tag == LTO_null
1453 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1454 || tag == LTO_tree_pickle_reference
1455 || tag == LTO_integer_cst
1456 || tag == LTO_tree_scc)
1457 gcc_unreachable ();
1458
1459 result = streamer_alloc_tree (ib, data_in, tag);
1460 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1461 }
1462
1463 /* Read the tree bitpacks and references. */
1464 for (unsigned i = 0; i < size; ++i)
1465 {
1466 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1467 first + i);
1468 lto_read_tree_1 (ib, data_in, result);
1469 /* end_marker = */ streamer_read_uchar (ib);
1470 }
1471 }
1472
1473 *len = size;
1474 *entry_len = scc_entry_len;
1475 return scc_hash;
1476 }
1477
1478
1479 /* Read a tree from input block IB using the per-file context in
1480 DATA_IN. This context is used, for example, to resolve references
1481 to previously read nodes. */
1482
1483 tree
1484 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1485 enum LTO_tags tag, hashval_t hash)
1486 {
1487 tree result;
1488
1489 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1490
1491 if (tag == LTO_null)
1492 result = NULL_TREE;
1493 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1494 {
1495 /* If TAG is a reference to an indexable tree, the next value
1496 in IB is the index into the table where we expect to find
1497 that tree. */
1498 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1499 }
1500 else if (tag == LTO_tree_pickle_reference)
1501 {
1502 /* If TAG is a reference to a previously read tree, look it up in
1503 the reader cache. */
1504 result = streamer_get_pickled_tree (ib, data_in);
1505 }
1506 else if (tag == LTO_integer_cst)
1507 {
1508 /* For shared integer constants in singletons we can use the
1509 existing tree integer constant merging code. */
1510 tree type = stream_read_tree (ib, data_in);
1511 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1512 unsigned HOST_WIDE_INT i;
1513 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1514
1515 for (i = 0; i < len; i++)
1516 a[i] = streamer_read_hwi (ib);
1517 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1518 result = wide_int_to_tree (type, wide_int::from_array
1519 (a, len, TYPE_PRECISION (type)));
1520 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1521 }
1522 else if (tag == LTO_tree_scc)
1523 gcc_unreachable ();
1524 else
1525 {
1526 /* Otherwise, materialize a new node from IB. */
1527 result = lto_read_tree (ib, data_in, tag, hash);
1528 }
1529
1530 return result;
1531 }
1532
1533 tree
1534 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1535 {
1536 enum LTO_tags tag;
1537
1538 /* Input and skip SCCs. */
1539 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1540 {
1541 unsigned len, entry_len;
1542 lto_input_scc (ib, data_in, &len, &entry_len);
1543
1544 /* Register DECLs with the debuginfo machinery. */
1545 while (!dref_queue.is_empty ())
1546 {
1547 dref_entry e = dref_queue.pop ();
1548 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1549 }
1550 }
1551 return lto_input_tree_1 (ib, data_in, tag, 0);
1552 }
1553
1554
1555 /* Input toplevel asms. */
1556
1557 void
1558 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1559 {
1560 size_t len;
1561 const char *data
1562 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1563 const struct lto_simple_header_with_strings *header
1564 = (const struct lto_simple_header_with_strings *) data;
1565 int string_offset;
1566 class data_in *data_in;
1567 tree str;
1568
1569 if (! data)
1570 return;
1571
1572 string_offset = sizeof (*header) + header->main_size;
1573
1574 lto_input_block ib (data + sizeof (*header), header->main_size,
1575 file_data->mode_table);
1576
1577 data_in = lto_data_in_create (file_data, data + string_offset,
1578 header->string_size, vNULL);
1579
1580 while ((str = streamer_read_string_cst (data_in, &ib)))
1581 {
1582 asm_node *node = symtab->finalize_toplevel_asm (str);
1583 node->order = streamer_read_hwi (&ib) + order_base;
1584 if (node->order >= symtab->order)
1585 symtab->order = node->order + 1;
1586 }
1587
1588 lto_data_in_delete (data_in);
1589
1590 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1591 }
1592
1593
1594 /* Input mode table. */
1595
1596 void
1597 lto_input_mode_table (struct lto_file_decl_data *file_data)
1598 {
1599 size_t len;
1600 const char *data
1601 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1602 if (! data)
1603 {
1604 internal_error ("cannot read LTO mode table from %s",
1605 file_data->file_name);
1606 return;
1607 }
1608
1609 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1610 file_data->mode_table = table;
1611 const struct lto_simple_header_with_strings *header
1612 = (const struct lto_simple_header_with_strings *) data;
1613 int string_offset;
1614 class data_in *data_in;
1615 string_offset = sizeof (*header) + header->main_size;
1616
1617 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1618 data_in = lto_data_in_create (file_data, data + string_offset,
1619 header->string_size, vNULL);
1620 bitpack_d bp = streamer_read_bitpack (&ib);
1621
1622 table[VOIDmode] = VOIDmode;
1623 table[BLKmode] = BLKmode;
1624 unsigned int m;
1625 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1626 {
1627 enum mode_class mclass
1628 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1629 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1630 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1631 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1632 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1633 unsigned int ibit = 0, fbit = 0;
1634 unsigned int real_fmt_len = 0;
1635 const char *real_fmt_name = NULL;
1636 switch (mclass)
1637 {
1638 case MODE_FRACT:
1639 case MODE_UFRACT:
1640 case MODE_ACCUM:
1641 case MODE_UACCUM:
1642 ibit = bp_unpack_value (&bp, 8);
1643 fbit = bp_unpack_value (&bp, 8);
1644 break;
1645 case MODE_FLOAT:
1646 case MODE_DECIMAL_FLOAT:
1647 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1648 &real_fmt_len);
1649 break;
1650 default:
1651 break;
1652 }
1653 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1654 if not found, fallback to all modes. */
1655 int pass;
1656 for (pass = 0; pass < 2; pass++)
1657 for (machine_mode mr = pass ? VOIDmode
1658 : GET_CLASS_NARROWEST_MODE (mclass);
1659 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1660 pass ? mr = (machine_mode) (mr + 1)
1661 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1662 if (GET_MODE_CLASS (mr) != mclass
1663 || maybe_ne (GET_MODE_SIZE (mr), size)
1664 || maybe_ne (GET_MODE_PRECISION (mr), prec)
1665 || (inner == m
1666 ? GET_MODE_INNER (mr) != mr
1667 : GET_MODE_INNER (mr) != table[(int) inner])
1668 || GET_MODE_IBIT (mr) != ibit
1669 || GET_MODE_FBIT (mr) != fbit
1670 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1671 continue;
1672 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1673 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1674 continue;
1675 else
1676 {
1677 table[m] = mr;
1678 pass = 2;
1679 break;
1680 }
1681 unsigned int mname_len;
1682 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1683 if (pass == 2)
1684 {
1685 switch (mclass)
1686 {
1687 case MODE_VECTOR_BOOL:
1688 case MODE_VECTOR_INT:
1689 case MODE_VECTOR_FLOAT:
1690 case MODE_VECTOR_FRACT:
1691 case MODE_VECTOR_UFRACT:
1692 case MODE_VECTOR_ACCUM:
1693 case MODE_VECTOR_UACCUM:
1694 /* For unsupported vector modes just use BLKmode,
1695 if the scalar mode is supported. */
1696 if (table[(int) inner] != VOIDmode)
1697 {
1698 table[m] = BLKmode;
1699 break;
1700 }
1701 /* FALLTHRU */
1702 default:
1703 fatal_error (UNKNOWN_LOCATION, "unsupported mode %qs", mname);
1704 break;
1705 }
1706 }
1707 }
1708 lto_data_in_delete (data_in);
1709
1710 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1711 }
1712
1713
1714 /* Initialization for the LTO reader. */
1715
1716 void
1717 lto_reader_init (void)
1718 {
1719 lto_streamer_init ();
1720 file_name_hash_table
1721 = new hash_table<string_slot_hasher> (37);
1722 string_slot_allocator = new object_allocator <struct string_slot>
1723 ("line map file name hash");
1724 gcc_obstack_init (&file_name_obstack);
1725 }
1726
1727 /* Free hash table used to stream in location file names. */
1728
1729 void
1730 lto_free_file_name_hash (void)
1731 {
1732 delete file_name_hash_table;
1733 file_name_hash_table = NULL;
1734 delete string_slot_allocator;
1735 string_slot_allocator = NULL;
1736 /* file_name_obstack must stay allocated since it is referred to by
1737 line map table. */
1738 }
1739
1740
1741 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1742 table to use with LEN strings. RESOLUTIONS is the vector of linker
1743 resolutions (NULL if not using a linker plugin). */
1744
1745 class data_in *
1746 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1747 unsigned len,
1748 vec<ld_plugin_symbol_resolution_t> resolutions)
1749 {
1750 class data_in *data_in = new (class data_in);
1751 data_in->file_data = file_data;
1752 data_in->strings = strings;
1753 data_in->strings_len = len;
1754 data_in->globals_resolution = resolutions;
1755 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1756 return data_in;
1757 }
1758
1759
1760 /* Remove DATA_IN. */
1761
1762 void
1763 lto_data_in_delete (class data_in *data_in)
1764 {
1765 data_in->globals_resolution.release ();
1766 streamer_tree_cache_delete (data_in->reader_cache);
1767 delete data_in;
1768 }