Fix stack overflow with autofdo (PR83355)
[gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45
46
47 struct freeing_string_slot_hasher : string_slot_hasher
48 {
49 static inline void remove (value_type *);
50 };
51
52 inline void
53 freeing_string_slot_hasher::remove (value_type *v)
54 {
55 free (v);
56 }
57
58 /* The table to hold the file names. */
59 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
60
61
62 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
63 number of valid tag values to check. */
64
65 void
66 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
67 {
68 va_list ap;
69 int i;
70
71 va_start (ap, ntags);
72 for (i = 0; i < ntags; i++)
73 if ((unsigned) actual == va_arg (ap, unsigned))
74 {
75 va_end (ap);
76 return;
77 }
78
79 va_end (ap);
80 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
81 }
82
83
84 /* Read LENGTH bytes from STREAM to ADDR. */
85
86 void
87 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
88 {
89 size_t i;
90 unsigned char *const buffer = (unsigned char *) addr;
91
92 for (i = 0; i < length; i++)
93 buffer[i] = streamer_read_uchar (ib);
94 }
95
96
97 /* Lookup STRING in file_name_hash_table. If found, return the existing
98 string, otherwise insert STRING as the canonical version. */
99
100 static const char *
101 canon_file_name (const char *string)
102 {
103 string_slot **slot;
104 struct string_slot s_slot;
105 size_t len = strlen (string);
106
107 s_slot.s = string;
108 s_slot.len = len;
109
110 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
111 if (*slot == NULL)
112 {
113 char *saved_string;
114 struct string_slot *new_slot;
115
116 saved_string = (char *) xmalloc (len + 1);
117 new_slot = XCNEW (struct string_slot);
118 memcpy (saved_string, string, len + 1);
119 new_slot->s = saved_string;
120 new_slot->len = len;
121 *slot = new_slot;
122 return saved_string;
123 }
124 else
125 {
126 struct string_slot *old_slot = *slot;
127 return old_slot->s;
128 }
129 }
130
131 /* Pointer to currently alive instance of lto_location_cache. */
132
133 lto_location_cache *lto_location_cache::current_cache;
134
135 /* Sort locations in source order. Start with file from last application. */
136
137 int
138 lto_location_cache::cmp_loc (const void *pa, const void *pb)
139 {
140 const cached_location *a = ((const cached_location *)pa);
141 const cached_location *b = ((const cached_location *)pb);
142 const char *current_file = current_cache->current_file;
143 int current_line = current_cache->current_line;
144
145 if (a->file == current_file && b->file != current_file)
146 return -1;
147 if (a->file != current_file && b->file == current_file)
148 return 1;
149 if (a->file == current_file && b->file == current_file)
150 {
151 if (a->line == current_line && b->line != current_line)
152 return -1;
153 if (a->line != current_line && b->line == current_line)
154 return 1;
155 }
156 if (a->file != b->file)
157 return strcmp (a->file, b->file);
158 if (a->sysp != b->sysp)
159 return a->sysp ? 1 : -1;
160 if (a->line != b->line)
161 return a->line - b->line;
162 return a->col - b->col;
163 }
164
165 /* Apply all changes in location cache. Add locations into linemap and patch
166 trees. */
167
168 bool
169 lto_location_cache::apply_location_cache ()
170 {
171 static const char *prev_file;
172 if (!loc_cache.length ())
173 return false;
174 if (loc_cache.length () > 1)
175 loc_cache.qsort (cmp_loc);
176
177 for (unsigned int i = 0; i < loc_cache.length (); i++)
178 {
179 struct cached_location loc = loc_cache[i];
180
181 if (current_file != loc.file)
182 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
183 loc.sysp, loc.file, loc.line);
184 else if (current_line != loc.line)
185 {
186 int max = loc.col;
187
188 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
189 if (loc.file != loc_cache[j].file
190 || loc.line != loc_cache[j].line)
191 break;
192 else if (max < loc_cache[j].col)
193 max = loc_cache[j].col;
194 linemap_line_start (line_table, loc.line, max + 1);
195 }
196 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
197 if (current_file == loc.file && current_line == loc.line
198 && current_col == loc.col)
199 *loc.loc = current_loc;
200 else
201 current_loc = *loc.loc = linemap_position_for_column (line_table,
202 loc.col);
203 current_line = loc.line;
204 prev_file = current_file = loc.file;
205 current_col = loc.col;
206 }
207 loc_cache.truncate (0);
208 accepted_length = 0;
209 return true;
210 }
211
212 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
213
214 void
215 lto_location_cache::accept_location_cache ()
216 {
217 gcc_assert (current_cache == this);
218 accepted_length = loc_cache.length ();
219 }
220
221 /* Tree merging did suceed; throw away recent changes. */
222
223 void
224 lto_location_cache::revert_location_cache ()
225 {
226 loc_cache.truncate (accepted_length);
227 }
228
229 /* Read a location bitpack from input block IB and either update *LOC directly
230 or add it to the location cache.
231 It is neccesary to call apply_location_cache to get *LOC updated. */
232
233 void
234 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
235 struct data_in *data_in)
236 {
237 static const char *stream_file;
238 static int stream_line;
239 static int stream_col;
240 static bool stream_sysp;
241 bool file_change, line_change, column_change;
242
243 gcc_assert (current_cache == this);
244
245 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
246
247 if (*loc < RESERVED_LOCATION_COUNT)
248 return;
249
250 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
251 ICE on it. */
252
253 file_change = bp_unpack_value (bp, 1);
254 line_change = bp_unpack_value (bp, 1);
255 column_change = bp_unpack_value (bp, 1);
256
257 if (file_change)
258 {
259 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
260 stream_sysp = bp_unpack_value (bp, 1);
261 }
262
263 if (line_change)
264 stream_line = bp_unpack_var_len_unsigned (bp);
265
266 if (column_change)
267 stream_col = bp_unpack_var_len_unsigned (bp);
268
269 /* This optimization saves location cache operations druing gimple
270 streaming. */
271
272 if (current_file == stream_file && current_line == stream_line
273 && current_col == stream_col && current_sysp == stream_sysp)
274 {
275 *loc = current_loc;
276 return;
277 }
278
279 struct cached_location entry
280 = {stream_file, loc, stream_line, stream_col, stream_sysp};
281 loc_cache.safe_push (entry);
282 }
283
284 /* Read a location bitpack from input block IB and either update *LOC directly
285 or add it to the location cache.
286 It is neccesary to call apply_location_cache to get *LOC updated. */
287
288 void
289 lto_input_location (location_t *loc, struct bitpack_d *bp,
290 struct data_in *data_in)
291 {
292 data_in->location_cache.input_location (loc, bp, data_in);
293 }
294
295 /* Read location and return it instead of going through location caching.
296 This should be used only when the resulting location is not going to be
297 discarded. */
298
299 location_t
300 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
301 {
302 location_t loc;
303 stream_input_location (&loc, bp, data_in);
304 data_in->location_cache.apply_location_cache ();
305 return loc;
306 }
307
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309 TAG is the expected node that should be found in IB, if TAG belongs
310 to one of the indexable trees, expect to read a reference index to
311 be looked up in one of the symbol tables, otherwise read the pysical
312 representation of the tree using stream_read_tree. FN is the
313 function scope for the read tree. */
314
315 tree
316 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
317 struct function *fn, enum LTO_tags tag)
318 {
319 unsigned HOST_WIDE_INT ix_u;
320 tree result = NULL_TREE;
321
322 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
323
324 switch (tag)
325 {
326 case LTO_type_ref:
327 ix_u = streamer_read_uhwi (ib);
328 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
329 break;
330
331 case LTO_ssa_name_ref:
332 ix_u = streamer_read_uhwi (ib);
333 result = (*SSANAMES (fn))[ix_u];
334 break;
335
336 case LTO_field_decl_ref:
337 ix_u = streamer_read_uhwi (ib);
338 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
339 break;
340
341 case LTO_function_decl_ref:
342 ix_u = streamer_read_uhwi (ib);
343 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
344 break;
345
346 case LTO_type_decl_ref:
347 ix_u = streamer_read_uhwi (ib);
348 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
349 break;
350
351 case LTO_namespace_decl_ref:
352 ix_u = streamer_read_uhwi (ib);
353 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
354 break;
355
356 case LTO_global_decl_ref:
357 case LTO_result_decl_ref:
358 case LTO_const_decl_ref:
359 case LTO_imported_decl_ref:
360 case LTO_label_decl_ref:
361 case LTO_translation_unit_decl_ref:
362 case LTO_namelist_decl_ref:
363 if (!data_in->file_data->current_decl_state)
364 printf("tag %d\n", tag);
365 ix_u = streamer_read_uhwi (ib);
366 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
367 break;
368
369 default:
370 gcc_unreachable ();
371 }
372
373 gcc_assert (result);
374
375 return result;
376 }
377
378
379 /* Read and return a double-linked list of catch handlers from input
380 block IB, using descriptors in DATA_IN. */
381
382 static struct eh_catch_d *
383 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
384 eh_catch *last_p)
385 {
386 eh_catch first;
387 enum LTO_tags tag;
388
389 *last_p = first = NULL;
390 tag = streamer_read_record_start (ib);
391 while (tag)
392 {
393 tree list;
394 eh_catch n;
395
396 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
397
398 /* Read the catch node. */
399 n = ggc_cleared_alloc<eh_catch_d> ();
400 n->type_list = stream_read_tree (ib, data_in);
401 n->filter_list = stream_read_tree (ib, data_in);
402 n->label = stream_read_tree (ib, data_in);
403
404 /* Register all the types in N->FILTER_LIST. */
405 for (list = n->filter_list; list; list = TREE_CHAIN (list))
406 add_type_for_runtime (TREE_VALUE (list));
407
408 /* Chain N to the end of the list. */
409 if (*last_p)
410 (*last_p)->next_catch = n;
411 n->prev_catch = *last_p;
412 *last_p = n;
413
414 /* Set the head of the list the first time through the loop. */
415 if (first == NULL)
416 first = n;
417
418 tag = streamer_read_record_start (ib);
419 }
420
421 return first;
422 }
423
424
425 /* Read and return EH region IX from input block IB, using descriptors
426 in DATA_IN. */
427
428 static eh_region
429 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
430 {
431 enum LTO_tags tag;
432 eh_region r;
433
434 /* Read the region header. */
435 tag = streamer_read_record_start (ib);
436 if (tag == LTO_null)
437 return NULL;
438
439 r = ggc_cleared_alloc<eh_region_d> ();
440 r->index = streamer_read_hwi (ib);
441
442 gcc_assert (r->index == ix);
443
444 /* Read all the region pointers as region numbers. We'll fix up
445 the pointers once the whole array has been read. */
446 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
447 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
448 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
449
450 switch (tag)
451 {
452 case LTO_ert_cleanup:
453 r->type = ERT_CLEANUP;
454 break;
455
456 case LTO_ert_try:
457 {
458 struct eh_catch_d *last_catch;
459 r->type = ERT_TRY;
460 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
461 &last_catch);
462 r->u.eh_try.last_catch = last_catch;
463 break;
464 }
465
466 case LTO_ert_allowed_exceptions:
467 {
468 tree l;
469
470 r->type = ERT_ALLOWED_EXCEPTIONS;
471 r->u.allowed.type_list = stream_read_tree (ib, data_in);
472 r->u.allowed.label = stream_read_tree (ib, data_in);
473 r->u.allowed.filter = streamer_read_uhwi (ib);
474
475 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
476 add_type_for_runtime (TREE_VALUE (l));
477 }
478 break;
479
480 case LTO_ert_must_not_throw:
481 {
482 r->type = ERT_MUST_NOT_THROW;
483 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
484 bitpack_d bp = streamer_read_bitpack (ib);
485 r->u.must_not_throw.failure_loc
486 = stream_input_location_now (&bp, data_in);
487 }
488 break;
489
490 default:
491 gcc_unreachable ();
492 }
493
494 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
495
496 return r;
497 }
498
499
500 /* Read and return EH landing pad IX from input block IB, using descriptors
501 in DATA_IN. */
502
503 static eh_landing_pad
504 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
505 {
506 enum LTO_tags tag;
507 eh_landing_pad lp;
508
509 /* Read the landing pad header. */
510 tag = streamer_read_record_start (ib);
511 if (tag == LTO_null)
512 return NULL;
513
514 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
515
516 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
517 lp->index = streamer_read_hwi (ib);
518 gcc_assert (lp->index == ix);
519 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
520 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
521 lp->post_landing_pad = stream_read_tree (ib, data_in);
522
523 return lp;
524 }
525
526
527 /* After reading the EH regions, pointers to peer and children regions
528 are region numbers. This converts all these region numbers into
529 real pointers into the rematerialized regions for FN. ROOT_REGION
530 is the region number for the root EH region in FN. */
531
532 static void
533 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
534 {
535 unsigned i;
536 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
537 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
538 eh_region r;
539 eh_landing_pad lp;
540
541 gcc_assert (eh_array && lp_array);
542
543 gcc_assert (root_region >= 0);
544 fn->eh->region_tree = (*eh_array)[root_region];
545
546 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
547 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
548
549 /* Convert all the index numbers stored in pointer fields into
550 pointers to the corresponding slots in the EH region array. */
551 FOR_EACH_VEC_ELT (*eh_array, i, r)
552 {
553 /* The array may contain NULL regions. */
554 if (r == NULL)
555 continue;
556
557 gcc_assert (i == (unsigned) r->index);
558 FIXUP_EH_REGION (r->outer);
559 FIXUP_EH_REGION (r->inner);
560 FIXUP_EH_REGION (r->next_peer);
561 FIXUP_EH_LP (r->landing_pads);
562 }
563
564 /* Convert all the index numbers stored in pointer fields into
565 pointers to the corresponding slots in the EH landing pad array. */
566 FOR_EACH_VEC_ELT (*lp_array, i, lp)
567 {
568 /* The array may contain NULL landing pads. */
569 if (lp == NULL)
570 continue;
571
572 gcc_assert (i == (unsigned) lp->index);
573 FIXUP_EH_LP (lp->next_lp);
574 FIXUP_EH_REGION (lp->region);
575 }
576
577 #undef FIXUP_EH_REGION
578 #undef FIXUP_EH_LP
579 }
580
581
582 /* Initialize EH support. */
583
584 void
585 lto_init_eh (void)
586 {
587 static bool eh_initialized_p = false;
588
589 if (eh_initialized_p)
590 return;
591
592 /* Contrary to most other FEs, we only initialize EH support when at
593 least one of the files in the set contains exception regions in
594 it. Since this happens much later than the call to init_eh in
595 lang_dependent_init, we have to set flag_exceptions and call
596 init_eh again to initialize the EH tables. */
597 flag_exceptions = 1;
598 init_eh ();
599
600 eh_initialized_p = true;
601 }
602
603
604 /* Read the exception table for FN from IB using the data descriptors
605 in DATA_IN. */
606
607 static void
608 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
609 struct function *fn)
610 {
611 HOST_WIDE_INT i, root_region, len;
612 enum LTO_tags tag;
613
614 tag = streamer_read_record_start (ib);
615 if (tag == LTO_null)
616 return;
617
618 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
619
620 /* If the file contains EH regions, then it was compiled with
621 -fexceptions. In that case, initialize the backend EH
622 machinery. */
623 lto_init_eh ();
624
625 gcc_assert (fn->eh);
626
627 root_region = streamer_read_hwi (ib);
628 gcc_assert (root_region == (int) root_region);
629
630 /* Read the EH region array. */
631 len = streamer_read_hwi (ib);
632 gcc_assert (len == (int) len);
633 if (len > 0)
634 {
635 vec_safe_grow_cleared (fn->eh->region_array, len);
636 for (i = 0; i < len; i++)
637 {
638 eh_region r = input_eh_region (ib, data_in, i);
639 (*fn->eh->region_array)[i] = r;
640 }
641 }
642
643 /* Read the landing pads. */
644 len = streamer_read_hwi (ib);
645 gcc_assert (len == (int) len);
646 if (len > 0)
647 {
648 vec_safe_grow_cleared (fn->eh->lp_array, len);
649 for (i = 0; i < len; i++)
650 {
651 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
652 (*fn->eh->lp_array)[i] = lp;
653 }
654 }
655
656 /* Read the runtime type data. */
657 len = streamer_read_hwi (ib);
658 gcc_assert (len == (int) len);
659 if (len > 0)
660 {
661 vec_safe_grow_cleared (fn->eh->ttype_data, len);
662 for (i = 0; i < len; i++)
663 {
664 tree ttype = stream_read_tree (ib, data_in);
665 (*fn->eh->ttype_data)[i] = ttype;
666 }
667 }
668
669 /* Read the table of action chains. */
670 len = streamer_read_hwi (ib);
671 gcc_assert (len == (int) len);
672 if (len > 0)
673 {
674 if (targetm.arm_eabi_unwinder)
675 {
676 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
677 for (i = 0; i < len; i++)
678 {
679 tree t = stream_read_tree (ib, data_in);
680 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
681 }
682 }
683 else
684 {
685 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
686 for (i = 0; i < len; i++)
687 {
688 uchar c = streamer_read_uchar (ib);
689 (*fn->eh->ehspec_data.other)[i] = c;
690 }
691 }
692 }
693
694 /* Reconstruct the EH region tree by fixing up the peer/children
695 pointers. */
696 fixup_eh_region_pointers (fn, root_region);
697
698 tag = streamer_read_record_start (ib);
699 lto_tag_check_range (tag, LTO_null, LTO_null);
700 }
701
702
703 /* Make a new basic block with index INDEX in function FN. */
704
705 static basic_block
706 make_new_block (struct function *fn, unsigned int index)
707 {
708 basic_block bb = alloc_block ();
709 bb->index = index;
710 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
711 n_basic_blocks_for_fn (fn)++;
712 return bb;
713 }
714
715
716 /* Read the CFG for function FN from input block IB. */
717
718 static void
719 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
720 struct function *fn)
721 {
722 unsigned int bb_count;
723 basic_block p_bb;
724 unsigned int i;
725 int index;
726
727 init_empty_tree_cfg_for_function (fn);
728 init_ssa_operands (fn);
729
730 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
731 PROFILE_LAST);
732
733 bb_count = streamer_read_uhwi (ib);
734
735 last_basic_block_for_fn (fn) = bb_count;
736 if (bb_count > basic_block_info_for_fn (fn)->length ())
737 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
738
739 if (bb_count > label_to_block_map_for_fn (fn)->length ())
740 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
741
742 index = streamer_read_hwi (ib);
743 while (index != -1)
744 {
745 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
746 unsigned int edge_count;
747
748 if (bb == NULL)
749 bb = make_new_block (fn, index);
750
751 edge_count = streamer_read_uhwi (ib);
752
753 /* Connect up the CFG. */
754 for (i = 0; i < edge_count; i++)
755 {
756 unsigned int dest_index;
757 unsigned int edge_flags;
758 basic_block dest;
759 profile_probability probability;
760 edge e;
761
762 dest_index = streamer_read_uhwi (ib);
763 probability = profile_probability::stream_in (ib);
764 edge_flags = streamer_read_uhwi (ib);
765
766 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
767
768 if (dest == NULL)
769 dest = make_new_block (fn, dest_index);
770
771 e = make_edge (bb, dest, edge_flags);
772 e->probability = probability;
773 }
774
775 index = streamer_read_hwi (ib);
776 }
777
778 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
779 index = streamer_read_hwi (ib);
780 while (index != -1)
781 {
782 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
783 bb->prev_bb = p_bb;
784 p_bb->next_bb = bb;
785 p_bb = bb;
786 index = streamer_read_hwi (ib);
787 }
788
789 /* ??? The cfgloop interface is tied to cfun. */
790 gcc_assert (cfun == fn);
791
792 /* Input the loop tree. */
793 unsigned n_loops = streamer_read_uhwi (ib);
794 if (n_loops == 0)
795 return;
796
797 struct loops *loops = ggc_cleared_alloc<struct loops> ();
798 init_loops_structure (fn, loops, n_loops);
799 set_loops_for_fn (fn, loops);
800
801 /* Input each loop and associate it with its loop header so
802 flow_loops_find can rebuild the loop tree. */
803 for (unsigned i = 1; i < n_loops; ++i)
804 {
805 int header_index = streamer_read_hwi (ib);
806 if (header_index == -1)
807 {
808 loops->larray->quick_push (NULL);
809 continue;
810 }
811
812 struct loop *loop = alloc_loop ();
813 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
814 loop->header->loop_father = loop;
815
816 /* Read everything copy_loop_info copies. */
817 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
818 loop->any_upper_bound = streamer_read_hwi (ib);
819 if (loop->any_upper_bound)
820 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
821 loop->any_likely_upper_bound = streamer_read_hwi (ib);
822 if (loop->any_likely_upper_bound)
823 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
824 loop->any_estimate = streamer_read_hwi (ib);
825 if (loop->any_estimate)
826 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
827
828 /* Read OMP SIMD related info. */
829 loop->safelen = streamer_read_hwi (ib);
830 loop->unroll = streamer_read_hwi (ib);
831 loop->dont_vectorize = streamer_read_hwi (ib);
832 loop->force_vectorize = streamer_read_hwi (ib);
833 loop->simduid = stream_read_tree (ib, data_in);
834
835 place_new_loop (fn, loop);
836
837 /* flow_loops_find doesn't like loops not in the tree, hook them
838 all as siblings of the tree root temporarily. */
839 flow_loop_tree_node_add (loops->tree_root, loop);
840 }
841
842 /* Rebuild the loop tree. */
843 flow_loops_find (loops);
844 }
845
846
847 /* Read the SSA names array for function FN from DATA_IN using input
848 block IB. */
849
850 static void
851 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
852 struct function *fn)
853 {
854 unsigned int i, size;
855
856 size = streamer_read_uhwi (ib);
857 init_ssanames (fn, size);
858
859 i = streamer_read_uhwi (ib);
860 while (i)
861 {
862 tree ssa_name, name;
863 bool is_default_def;
864
865 /* Skip over the elements that had been freed. */
866 while (SSANAMES (fn)->length () < i)
867 SSANAMES (fn)->quick_push (NULL_TREE);
868
869 is_default_def = (streamer_read_uchar (ib) != 0);
870 name = stream_read_tree (ib, data_in);
871 ssa_name = make_ssa_name_fn (fn, name, NULL);
872
873 if (is_default_def)
874 {
875 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
876 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
877 }
878
879 i = streamer_read_uhwi (ib);
880 }
881 }
882
883
884 /* Go through all NODE edges and fixup call_stmt pointers
885 so they point to STMTS. */
886
887 static void
888 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
889 struct function *fn)
890 {
891 #define STMT_UID_NOT_IN_RANGE(uid) \
892 (gimple_stmt_max_uid (fn) < uid || uid == 0)
893
894 struct cgraph_edge *cedge;
895 struct ipa_ref *ref = NULL;
896 unsigned int i;
897
898 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
899 {
900 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
901 fatal_error (input_location,
902 "Cgraph edge statement index out of range");
903 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
904 if (!cedge->call_stmt)
905 fatal_error (input_location,
906 "Cgraph edge statement index not found");
907 }
908 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
909 {
910 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
911 fatal_error (input_location,
912 "Cgraph edge statement index out of range");
913 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
914 if (!cedge->call_stmt)
915 fatal_error (input_location, "Cgraph edge statement index not found");
916 }
917 for (i = 0; node->iterate_reference (i, ref); i++)
918 if (ref->lto_stmt_uid)
919 {
920 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
921 fatal_error (input_location,
922 "Reference statement index out of range");
923 ref->stmt = stmts[ref->lto_stmt_uid - 1];
924 if (!ref->stmt)
925 fatal_error (input_location, "Reference statement index not found");
926 }
927 }
928
929
930 /* Fixup call_stmt pointers in NODE and all clones. */
931
932 static void
933 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
934 {
935 struct cgraph_node *node;
936 struct function *fn;
937
938 while (orig->clone_of)
939 orig = orig->clone_of;
940 fn = DECL_STRUCT_FUNCTION (orig->decl);
941
942 if (!orig->thunk.thunk_p)
943 fixup_call_stmt_edges_1 (orig, stmts, fn);
944 if (orig->clones)
945 for (node = orig->clones; node != orig;)
946 {
947 if (!node->thunk.thunk_p)
948 fixup_call_stmt_edges_1 (node, stmts, fn);
949 if (node->clones)
950 node = node->clones;
951 else if (node->next_sibling_clone)
952 node = node->next_sibling_clone;
953 else
954 {
955 while (node != orig && !node->next_sibling_clone)
956 node = node->clone_of;
957 if (node != orig)
958 node = node->next_sibling_clone;
959 }
960 }
961 }
962
963
964 /* Input the base body of struct function FN from DATA_IN
965 using input block IB. */
966
967 static void
968 input_struct_function_base (struct function *fn, struct data_in *data_in,
969 struct lto_input_block *ib)
970 {
971 struct bitpack_d bp;
972 int len;
973
974 /* Read the static chain and non-local goto save area. */
975 fn->static_chain_decl = stream_read_tree (ib, data_in);
976 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
977
978 /* Read all the local symbols. */
979 len = streamer_read_hwi (ib);
980 if (len > 0)
981 {
982 int i;
983 vec_safe_grow_cleared (fn->local_decls, len);
984 for (i = 0; i < len; i++)
985 {
986 tree t = stream_read_tree (ib, data_in);
987 (*fn->local_decls)[i] = t;
988 }
989 }
990
991 /* Input the current IL state of the function. */
992 fn->curr_properties = streamer_read_uhwi (ib);
993
994 /* Read all the attributes for FN. */
995 bp = streamer_read_bitpack (ib);
996 fn->is_thunk = bp_unpack_value (&bp, 1);
997 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
998 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
999 fn->returns_struct = bp_unpack_value (&bp, 1);
1000 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1001 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1002 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1003 fn->after_inlining = bp_unpack_value (&bp, 1);
1004 fn->stdarg = bp_unpack_value (&bp, 1);
1005 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1006 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1007 fn->calls_alloca = bp_unpack_value (&bp, 1);
1008 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1009 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1010 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1011 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1012 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1013 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1014
1015 /* Input the function start and end loci. */
1016 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1017 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1018 }
1019
1020
1021 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1022
1023 static void
1024 input_function (tree fn_decl, struct data_in *data_in,
1025 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1026 {
1027 struct function *fn;
1028 enum LTO_tags tag;
1029 gimple **stmts;
1030 basic_block bb;
1031 struct cgraph_node *node;
1032
1033 tag = streamer_read_record_start (ib);
1034 lto_tag_check (tag, LTO_function);
1035
1036 /* Read decls for parameters and args. */
1037 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1038 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1039
1040 /* Read debug args if available. */
1041 unsigned n_debugargs = streamer_read_uhwi (ib);
1042 if (n_debugargs)
1043 {
1044 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1045 vec_safe_grow (*debugargs, n_debugargs);
1046 for (unsigned i = 0; i < n_debugargs; ++i)
1047 (**debugargs)[i] = stream_read_tree (ib, data_in);
1048 }
1049
1050 /* Read the tree of lexical scopes for the function. */
1051 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1052 unsigned block_leaf_count = streamer_read_uhwi (ib);
1053 while (block_leaf_count--)
1054 stream_read_tree (ib, data_in);
1055
1056 if (!streamer_read_uhwi (ib))
1057 return;
1058
1059 push_struct_function (fn_decl);
1060 fn = DECL_STRUCT_FUNCTION (fn_decl);
1061 init_tree_ssa (fn);
1062 /* We input IL in SSA form. */
1063 cfun->gimple_df->in_ssa_p = true;
1064
1065 gimple_register_cfg_hooks ();
1066
1067 node = cgraph_node::get (fn_decl);
1068 if (!node)
1069 node = cgraph_node::create (fn_decl);
1070 input_struct_function_base (fn, data_in, ib);
1071 input_cfg (ib_cfg, data_in, fn);
1072
1073 /* Read all the SSA names. */
1074 input_ssa_names (ib, data_in, fn);
1075
1076 /* Read the exception handling regions in the function. */
1077 input_eh_regions (ib, data_in, fn);
1078
1079 gcc_assert (DECL_INITIAL (fn_decl));
1080 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1081
1082 /* Read all the basic blocks. */
1083 tag = streamer_read_record_start (ib);
1084 while (tag)
1085 {
1086 input_bb (ib, tag, data_in, fn,
1087 node->count_materialization_scale);
1088 tag = streamer_read_record_start (ib);
1089 }
1090
1091 /* Fix up the call statements that are mentioned in the callgraph
1092 edges. */
1093 set_gimple_stmt_max_uid (cfun, 0);
1094 FOR_ALL_BB_FN (bb, cfun)
1095 {
1096 gimple_stmt_iterator gsi;
1097 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1098 {
1099 gimple *stmt = gsi_stmt (gsi);
1100 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1101 }
1102 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1103 {
1104 gimple *stmt = gsi_stmt (gsi);
1105 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1106 }
1107 }
1108 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1109 FOR_ALL_BB_FN (bb, cfun)
1110 {
1111 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1112 while (!gsi_end_p (bsi))
1113 {
1114 gimple *stmt = gsi_stmt (bsi);
1115 gsi_next (&bsi);
1116 stmts[gimple_uid (stmt)] = stmt;
1117 }
1118 bsi = gsi_start_bb (bb);
1119 while (!gsi_end_p (bsi))
1120 {
1121 gimple *stmt = gsi_stmt (bsi);
1122 bool remove = false;
1123 /* If we're recompiling LTO objects with debug stmts but
1124 we're not supposed to have debug stmts, remove them now.
1125 We can't remove them earlier because this would cause uid
1126 mismatches in fixups, but we can do it at this point, as
1127 long as debug stmts don't require fixups.
1128 Similarly remove all IFN_*SAN_* internal calls */
1129 if (!flag_wpa)
1130 {
1131 if (!MAY_HAVE_DEBUG_STMTS && is_gimple_debug (stmt))
1132 remove = true;
1133 if (is_gimple_call (stmt)
1134 && gimple_call_internal_p (stmt))
1135 {
1136 switch (gimple_call_internal_fn (stmt))
1137 {
1138 case IFN_UBSAN_NULL:
1139 if ((flag_sanitize
1140 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1141 remove = true;
1142 break;
1143 case IFN_UBSAN_BOUNDS:
1144 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1145 remove = true;
1146 break;
1147 case IFN_UBSAN_VPTR:
1148 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1149 remove = true;
1150 break;
1151 case IFN_UBSAN_OBJECT_SIZE:
1152 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1153 remove = true;
1154 break;
1155 case IFN_UBSAN_PTR:
1156 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1157 remove = true;
1158 break;
1159 case IFN_ASAN_MARK:
1160 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1161 remove = true;
1162 break;
1163 case IFN_TSAN_FUNC_EXIT:
1164 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1165 remove = true;
1166 break;
1167 default:
1168 break;
1169 }
1170 gcc_assert (!remove || gimple_call_lhs (stmt) == NULL_TREE);
1171 }
1172 }
1173 if (remove)
1174 {
1175 gimple_stmt_iterator gsi = bsi;
1176 gsi_next (&bsi);
1177 unlink_stmt_vdef (stmt);
1178 release_defs (stmt);
1179 gsi_remove (&gsi, true);
1180 }
1181 else
1182 {
1183 gsi_next (&bsi);
1184 stmts[gimple_uid (stmt)] = stmt;
1185 }
1186 }
1187 }
1188
1189 /* Set the gimple body to the statement sequence in the entry
1190 basic block. FIXME lto, this is fairly hacky. The existence
1191 of a gimple body is used by the cgraph routines, but we should
1192 really use the presence of the CFG. */
1193 {
1194 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1195 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1196 }
1197
1198 update_max_bb_count ();
1199 fixup_call_stmt_edges (node, stmts);
1200 execute_all_ipa_stmt_fixups (node, stmts);
1201
1202 update_ssa (TODO_update_ssa_only_virtuals);
1203 free_dominance_info (CDI_DOMINATORS);
1204 free_dominance_info (CDI_POST_DOMINATORS);
1205 free (stmts);
1206 pop_cfun ();
1207 }
1208
1209 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1210
1211 static void
1212 input_constructor (tree var, struct data_in *data_in,
1213 struct lto_input_block *ib)
1214 {
1215 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1216 }
1217
1218
1219 /* Read the body from DATA for function NODE and fill it in.
1220 FILE_DATA are the global decls and types. SECTION_TYPE is either
1221 LTO_section_function_body or LTO_section_static_initializer. If
1222 section type is LTO_section_function_body, FN must be the decl for
1223 that function. */
1224
1225 static void
1226 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1227 const char *data, enum lto_section_type section_type)
1228 {
1229 const struct lto_function_header *header;
1230 struct data_in *data_in;
1231 int cfg_offset;
1232 int main_offset;
1233 int string_offset;
1234 tree fn_decl = node->decl;
1235
1236 header = (const struct lto_function_header *) data;
1237 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1238 {
1239 cfg_offset = sizeof (struct lto_function_header);
1240 main_offset = cfg_offset + header->cfg_size;
1241 string_offset = main_offset + header->main_size;
1242 }
1243 else
1244 {
1245 main_offset = sizeof (struct lto_function_header);
1246 string_offset = main_offset + header->main_size;
1247 }
1248
1249 data_in = lto_data_in_create (file_data, data + string_offset,
1250 header->string_size, vNULL);
1251
1252 if (section_type == LTO_section_function_body)
1253 {
1254 struct lto_in_decl_state *decl_state;
1255 unsigned from;
1256
1257 gcc_checking_assert (node);
1258
1259 /* Use the function's decl state. */
1260 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1261 gcc_assert (decl_state);
1262 file_data->current_decl_state = decl_state;
1263
1264
1265 /* Set up the struct function. */
1266 from = data_in->reader_cache->nodes.length ();
1267 lto_input_block ib_main (data + main_offset, header->main_size,
1268 file_data->mode_table);
1269 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1270 {
1271 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1272 file_data->mode_table);
1273 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1274 }
1275 else
1276 input_constructor (fn_decl, data_in, &ib_main);
1277 data_in->location_cache.apply_location_cache ();
1278 /* And fixup types we streamed locally. */
1279 {
1280 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1281 unsigned len = cache->nodes.length ();
1282 unsigned i;
1283 for (i = len; i-- > from;)
1284 {
1285 tree t = streamer_tree_cache_get_tree (cache, i);
1286 if (t == NULL_TREE)
1287 continue;
1288
1289 if (TYPE_P (t))
1290 {
1291 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1292 if (type_with_alias_set_p (t)
1293 && canonical_type_used_p (t))
1294 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1295 if (TYPE_MAIN_VARIANT (t) != t)
1296 {
1297 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1298 TYPE_NEXT_VARIANT (t)
1299 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1300 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1301 }
1302 }
1303 }
1304 }
1305
1306 /* Restore decl state */
1307 file_data->current_decl_state = file_data->global_decl_state;
1308 }
1309
1310 lto_data_in_delete (data_in);
1311 }
1312
1313
1314 /* Read the body of NODE using DATA. FILE_DATA holds the global
1315 decls and types. */
1316
1317 void
1318 lto_input_function_body (struct lto_file_decl_data *file_data,
1319 struct cgraph_node *node, const char *data)
1320 {
1321 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1322 }
1323
1324 /* Read the body of NODE using DATA. FILE_DATA holds the global
1325 decls and types. */
1326
1327 void
1328 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1329 struct varpool_node *node, const char *data)
1330 {
1331 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1332 }
1333
1334
1335 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1336 are only applied to prevailing tree nodes during tree merging. */
1337 vec<dref_entry> dref_queue;
1338
1339 /* Read the physical representation of a tree node EXPR from
1340 input block IB using the per-file context in DATA_IN. */
1341
1342 static void
1343 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1344 {
1345 /* Read all the bitfield values in EXPR. Note that for LTO, we
1346 only write language-independent bitfields, so no more unpacking is
1347 needed. */
1348 streamer_read_tree_bitfields (ib, data_in, expr);
1349
1350 /* Read all the pointer fields in EXPR. */
1351 streamer_read_tree_body (ib, data_in, expr);
1352
1353 /* Read any LTO-specific data not read by the tree streamer. */
1354 if (DECL_P (expr)
1355 && TREE_CODE (expr) != FUNCTION_DECL
1356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1357 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1358
1359 /* Stream references to early generated DIEs. Keep in sync with the
1360 trees handled in dwarf2out_register_external_die. */
1361 if ((DECL_P (expr)
1362 && TREE_CODE (expr) != FIELD_DECL
1363 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1364 && TREE_CODE (expr) != TYPE_DECL)
1365 || TREE_CODE (expr) == BLOCK)
1366 {
1367 const char *str = streamer_read_string (data_in, ib);
1368 if (str)
1369 {
1370 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1371 dref_entry e = { expr, str, off };
1372 dref_queue.safe_push (e);
1373 }
1374 }
1375 }
1376
1377 /* Read the physical representation of a tree node with tag TAG from
1378 input block IB using the per-file context in DATA_IN. */
1379
1380 static tree
1381 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1382 enum LTO_tags tag, hashval_t hash)
1383 {
1384 /* Instantiate a new tree node. */
1385 tree result = streamer_alloc_tree (ib, data_in, tag);
1386
1387 /* Enter RESULT in the reader cache. This will make RESULT
1388 available so that circular references in the rest of the tree
1389 structure can be resolved in subsequent calls to stream_read_tree. */
1390 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1391
1392 lto_read_tree_1 (ib, data_in, result);
1393
1394 /* end_marker = */ streamer_read_uchar (ib);
1395
1396 return result;
1397 }
1398
1399
1400 /* Populate the reader cache with trees materialized from the SCC
1401 following in the IB, DATA_IN stream. */
1402
1403 hashval_t
1404 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1405 unsigned *len, unsigned *entry_len)
1406 {
1407 /* A blob of unnamed tree nodes, fill the cache from it and
1408 recurse. */
1409 unsigned size = streamer_read_uhwi (ib);
1410 hashval_t scc_hash = streamer_read_uhwi (ib);
1411 unsigned scc_entry_len = 1;
1412
1413 if (size == 1)
1414 {
1415 enum LTO_tags tag = streamer_read_record_start (ib);
1416 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1417 }
1418 else
1419 {
1420 unsigned int first = data_in->reader_cache->nodes.length ();
1421 tree result;
1422
1423 scc_entry_len = streamer_read_uhwi (ib);
1424
1425 /* Materialize size trees by reading their headers. */
1426 for (unsigned i = 0; i < size; ++i)
1427 {
1428 enum LTO_tags tag = streamer_read_record_start (ib);
1429 if (tag == LTO_null
1430 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1431 || tag == LTO_tree_pickle_reference
1432 || tag == LTO_integer_cst
1433 || tag == LTO_tree_scc)
1434 gcc_unreachable ();
1435
1436 result = streamer_alloc_tree (ib, data_in, tag);
1437 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1438 }
1439
1440 /* Read the tree bitpacks and references. */
1441 for (unsigned i = 0; i < size; ++i)
1442 {
1443 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1444 first + i);
1445 lto_read_tree_1 (ib, data_in, result);
1446 /* end_marker = */ streamer_read_uchar (ib);
1447 }
1448 }
1449
1450 *len = size;
1451 *entry_len = scc_entry_len;
1452 return scc_hash;
1453 }
1454
1455
1456 /* Read a tree from input block IB using the per-file context in
1457 DATA_IN. This context is used, for example, to resolve references
1458 to previously read nodes. */
1459
1460 tree
1461 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1462 enum LTO_tags tag, hashval_t hash)
1463 {
1464 tree result;
1465
1466 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1467
1468 if (tag == LTO_null)
1469 result = NULL_TREE;
1470 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1471 {
1472 /* If TAG is a reference to an indexable tree, the next value
1473 in IB is the index into the table where we expect to find
1474 that tree. */
1475 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1476 }
1477 else if (tag == LTO_tree_pickle_reference)
1478 {
1479 /* If TAG is a reference to a previously read tree, look it up in
1480 the reader cache. */
1481 result = streamer_get_pickled_tree (ib, data_in);
1482 }
1483 else if (tag == LTO_integer_cst)
1484 {
1485 /* For shared integer constants in singletons we can use the
1486 existing tree integer constant merging code. */
1487 tree type = stream_read_tree (ib, data_in);
1488 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1489 unsigned HOST_WIDE_INT i;
1490 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1491
1492 for (i = 0; i < len; i++)
1493 a[i] = streamer_read_hwi (ib);
1494 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1495 result = wide_int_to_tree (type, wide_int::from_array
1496 (a, len, TYPE_PRECISION (type)));
1497 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1498 }
1499 else if (tag == LTO_tree_scc)
1500 gcc_unreachable ();
1501 else
1502 {
1503 /* Otherwise, materialize a new node from IB. */
1504 result = lto_read_tree (ib, data_in, tag, hash);
1505 }
1506
1507 return result;
1508 }
1509
1510 tree
1511 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1512 {
1513 enum LTO_tags tag;
1514
1515 /* Input and skip SCCs. */
1516 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1517 {
1518 unsigned len, entry_len;
1519 lto_input_scc (ib, data_in, &len, &entry_len);
1520
1521 /* Register DECLs with the debuginfo machinery. */
1522 while (!dref_queue.is_empty ())
1523 {
1524 dref_entry e = dref_queue.pop ();
1525 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1526 }
1527 }
1528 return lto_input_tree_1 (ib, data_in, tag, 0);
1529 }
1530
1531
1532 /* Input toplevel asms. */
1533
1534 void
1535 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1536 {
1537 size_t len;
1538 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1539 NULL, &len);
1540 const struct lto_simple_header_with_strings *header
1541 = (const struct lto_simple_header_with_strings *) data;
1542 int string_offset;
1543 struct data_in *data_in;
1544 tree str;
1545
1546 if (! data)
1547 return;
1548
1549 string_offset = sizeof (*header) + header->main_size;
1550
1551 lto_input_block ib (data + sizeof (*header), header->main_size,
1552 file_data->mode_table);
1553
1554 data_in = lto_data_in_create (file_data, data + string_offset,
1555 header->string_size, vNULL);
1556
1557 while ((str = streamer_read_string_cst (data_in, &ib)))
1558 {
1559 asm_node *node = symtab->finalize_toplevel_asm (str);
1560 node->order = streamer_read_hwi (&ib) + order_base;
1561 if (node->order >= symtab->order)
1562 symtab->order = node->order + 1;
1563 }
1564
1565 lto_data_in_delete (data_in);
1566
1567 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1568 }
1569
1570
1571 /* Input mode table. */
1572
1573 void
1574 lto_input_mode_table (struct lto_file_decl_data *file_data)
1575 {
1576 size_t len;
1577 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1578 NULL, &len);
1579 if (! data)
1580 {
1581 internal_error ("cannot read LTO mode table from %s",
1582 file_data->file_name);
1583 return;
1584 }
1585
1586 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1587 file_data->mode_table = table;
1588 const struct lto_simple_header_with_strings *header
1589 = (const struct lto_simple_header_with_strings *) data;
1590 int string_offset;
1591 struct data_in *data_in;
1592 string_offset = sizeof (*header) + header->main_size;
1593
1594 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1595 data_in = lto_data_in_create (file_data, data + string_offset,
1596 header->string_size, vNULL);
1597 bitpack_d bp = streamer_read_bitpack (&ib);
1598
1599 table[VOIDmode] = VOIDmode;
1600 table[BLKmode] = BLKmode;
1601 unsigned int m;
1602 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1603 {
1604 enum mode_class mclass
1605 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1606 unsigned int size = bp_unpack_value (&bp, 8);
1607 unsigned int prec = bp_unpack_value (&bp, 16);
1608 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1609 unsigned int nunits = bp_unpack_value (&bp, 8);
1610 unsigned int ibit = 0, fbit = 0;
1611 unsigned int real_fmt_len = 0;
1612 const char *real_fmt_name = NULL;
1613 switch (mclass)
1614 {
1615 case MODE_FRACT:
1616 case MODE_UFRACT:
1617 case MODE_ACCUM:
1618 case MODE_UACCUM:
1619 ibit = bp_unpack_value (&bp, 8);
1620 fbit = bp_unpack_value (&bp, 8);
1621 break;
1622 case MODE_FLOAT:
1623 case MODE_DECIMAL_FLOAT:
1624 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1625 &real_fmt_len);
1626 break;
1627 default:
1628 break;
1629 }
1630 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1631 if not found, fallback to all modes. */
1632 int pass;
1633 for (pass = 0; pass < 2; pass++)
1634 for (machine_mode mr = pass ? VOIDmode
1635 : GET_CLASS_NARROWEST_MODE (mclass);
1636 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1637 pass ? mr = (machine_mode) (mr + 1)
1638 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1639 if (GET_MODE_CLASS (mr) != mclass
1640 || GET_MODE_SIZE (mr) != size
1641 || GET_MODE_PRECISION (mr) != prec
1642 || (inner == m
1643 ? GET_MODE_INNER (mr) != mr
1644 : GET_MODE_INNER (mr) != table[(int) inner])
1645 || GET_MODE_IBIT (mr) != ibit
1646 || GET_MODE_FBIT (mr) != fbit
1647 || GET_MODE_NUNITS (mr) != nunits)
1648 continue;
1649 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1650 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1651 continue;
1652 else
1653 {
1654 table[m] = mr;
1655 pass = 2;
1656 break;
1657 }
1658 unsigned int mname_len;
1659 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1660 if (pass == 2)
1661 {
1662 switch (mclass)
1663 {
1664 case MODE_VECTOR_INT:
1665 case MODE_VECTOR_FLOAT:
1666 case MODE_VECTOR_FRACT:
1667 case MODE_VECTOR_UFRACT:
1668 case MODE_VECTOR_ACCUM:
1669 case MODE_VECTOR_UACCUM:
1670 /* For unsupported vector modes just use BLKmode,
1671 if the scalar mode is supported. */
1672 if (table[(int) inner] != VOIDmode)
1673 {
1674 table[m] = BLKmode;
1675 break;
1676 }
1677 /* FALLTHRU */
1678 default:
1679 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1680 break;
1681 }
1682 }
1683 }
1684 lto_data_in_delete (data_in);
1685
1686 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1687 }
1688
1689
1690 /* Initialization for the LTO reader. */
1691
1692 void
1693 lto_reader_init (void)
1694 {
1695 lto_streamer_init ();
1696 file_name_hash_table
1697 = new hash_table<freeing_string_slot_hasher> (37);
1698 }
1699
1700
1701 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1702 table to use with LEN strings. RESOLUTIONS is the vector of linker
1703 resolutions (NULL if not using a linker plugin). */
1704
1705 struct data_in *
1706 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1707 unsigned len,
1708 vec<ld_plugin_symbol_resolution_t> resolutions)
1709 {
1710 struct data_in *data_in = new (struct data_in);
1711 data_in->file_data = file_data;
1712 data_in->strings = strings;
1713 data_in->strings_len = len;
1714 data_in->globals_resolution = resolutions;
1715 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1716 return data_in;
1717 }
1718
1719
1720 /* Remove DATA_IN. */
1721
1722 void
1723 lto_data_in_delete (struct data_in *data_in)
1724 {
1725 data_in->globals_resolution.release ();
1726 streamer_tree_cache_delete (data_in->reader_cache);
1727 delete data_in;
1728 }