lto-streamer-in.c (lto_input_data_block): Adjust T const cast to avoid warning.
[gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44
45
46 struct freeing_string_slot_hasher : string_slot_hasher
47 {
48 static inline void remove (value_type *);
49 };
50
51 inline void
52 freeing_string_slot_hasher::remove (value_type *v)
53 {
54 free (v);
55 }
56
57 /* The table to hold the file names. */
58 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
59
60
61 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
62 number of valid tag values to check. */
63
64 void
65 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
66 {
67 va_list ap;
68 int i;
69
70 va_start (ap, ntags);
71 for (i = 0; i < ntags; i++)
72 if ((unsigned) actual == va_arg (ap, unsigned))
73 {
74 va_end (ap);
75 return;
76 }
77
78 va_end (ap);
79 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
80 }
81
82
83 /* Read LENGTH bytes from STREAM to ADDR. */
84
85 void
86 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
87 {
88 size_t i;
89 unsigned char *const buffer = (unsigned char *) addr;
90
91 for (i = 0; i < length; i++)
92 buffer[i] = streamer_read_uchar (ib);
93 }
94
95
96 /* Lookup STRING in file_name_hash_table. If found, return the existing
97 string, otherwise insert STRING as the canonical version. */
98
99 static const char *
100 canon_file_name (const char *string)
101 {
102 string_slot **slot;
103 struct string_slot s_slot;
104 size_t len = strlen (string);
105
106 s_slot.s = string;
107 s_slot.len = len;
108
109 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
110 if (*slot == NULL)
111 {
112 char *saved_string;
113 struct string_slot *new_slot;
114
115 saved_string = (char *) xmalloc (len + 1);
116 new_slot = XCNEW (struct string_slot);
117 memcpy (saved_string, string, len + 1);
118 new_slot->s = saved_string;
119 new_slot->len = len;
120 *slot = new_slot;
121 return saved_string;
122 }
123 else
124 {
125 struct string_slot *old_slot = *slot;
126 return old_slot->s;
127 }
128 }
129
130 /* Pointer to currently alive instance of lto_location_cache. */
131
132 lto_location_cache *lto_location_cache::current_cache;
133
134 /* Sort locations in source order. Start with file from last application. */
135
136 int
137 lto_location_cache::cmp_loc (const void *pa, const void *pb)
138 {
139 const cached_location *a = ((const cached_location *)pa);
140 const cached_location *b = ((const cached_location *)pb);
141 const char *current_file = current_cache->current_file;
142 int current_line = current_cache->current_line;
143
144 if (a->file == current_file && b->file != current_file)
145 return -1;
146 if (a->file != current_file && b->file == current_file)
147 return 1;
148 if (a->file == current_file && b->file == current_file)
149 {
150 if (a->line == current_line && b->line != current_line)
151 return -1;
152 if (a->line != current_line && b->line == current_line)
153 return 1;
154 }
155 if (a->file != b->file)
156 return strcmp (a->file, b->file);
157 if (a->sysp != b->sysp)
158 return a->sysp ? 1 : -1;
159 if (a->line != b->line)
160 return a->line - b->line;
161 return a->col - b->col;
162 }
163
164 /* Apply all changes in location cache. Add locations into linemap and patch
165 trees. */
166
167 bool
168 lto_location_cache::apply_location_cache ()
169 {
170 static const char *prev_file;
171 if (!loc_cache.length ())
172 return false;
173 if (loc_cache.length () > 1)
174 loc_cache.qsort (cmp_loc);
175
176 for (unsigned int i = 0; i < loc_cache.length (); i++)
177 {
178 struct cached_location loc = loc_cache[i];
179
180 if (current_file != loc.file)
181 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
182 loc.sysp, loc.file, loc.line);
183 else if (current_line != loc.line)
184 {
185 int max = loc.col;
186
187 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
188 if (loc.file != loc_cache[j].file
189 || loc.line != loc_cache[j].line)
190 break;
191 else if (max < loc_cache[j].col)
192 max = loc_cache[j].col;
193 linemap_line_start (line_table, loc.line, max + 1);
194 }
195 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
196 if (current_file == loc.file && current_line == loc.line
197 && current_col == loc.col)
198 *loc.loc = current_loc;
199 else
200 current_loc = *loc.loc = linemap_position_for_column (line_table,
201 loc.col);
202 current_line = loc.line;
203 prev_file = current_file = loc.file;
204 current_col = loc.col;
205 }
206 loc_cache.truncate (0);
207 accepted_length = 0;
208 return true;
209 }
210
211 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
212
213 void
214 lto_location_cache::accept_location_cache ()
215 {
216 gcc_assert (current_cache == this);
217 accepted_length = loc_cache.length ();
218 }
219
220 /* Tree merging did suceed; throw away recent changes. */
221
222 void
223 lto_location_cache::revert_location_cache ()
224 {
225 loc_cache.truncate (accepted_length);
226 }
227
228 /* Read a location bitpack from input block IB and either update *LOC directly
229 or add it to the location cache.
230 It is neccesary to call apply_location_cache to get *LOC updated. */
231
232 void
233 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
234 struct data_in *data_in)
235 {
236 static const char *stream_file;
237 static int stream_line;
238 static int stream_col;
239 static bool stream_sysp;
240 bool file_change, line_change, column_change;
241
242 gcc_assert (current_cache == this);
243
244 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
245
246 if (*loc < RESERVED_LOCATION_COUNT)
247 return;
248
249 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
250 ICE on it. */
251
252 file_change = bp_unpack_value (bp, 1);
253 line_change = bp_unpack_value (bp, 1);
254 column_change = bp_unpack_value (bp, 1);
255
256 if (file_change)
257 {
258 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
259 stream_sysp = bp_unpack_value (bp, 1);
260 }
261
262 if (line_change)
263 stream_line = bp_unpack_var_len_unsigned (bp);
264
265 if (column_change)
266 stream_col = bp_unpack_var_len_unsigned (bp);
267
268 /* This optimization saves location cache operations druing gimple
269 streaming. */
270
271 if (current_file == stream_file && current_line == stream_line
272 && current_col == stream_col && current_sysp == stream_sysp)
273 {
274 *loc = current_loc;
275 return;
276 }
277
278 struct cached_location entry
279 = {stream_file, loc, stream_line, stream_col, stream_sysp};
280 loc_cache.safe_push (entry);
281 }
282
283 /* Read a location bitpack from input block IB and either update *LOC directly
284 or add it to the location cache.
285 It is neccesary to call apply_location_cache to get *LOC updated. */
286
287 void
288 lto_input_location (location_t *loc, struct bitpack_d *bp,
289 struct data_in *data_in)
290 {
291 data_in->location_cache.input_location (loc, bp, data_in);
292 }
293
294 /* Read location and return it instead of going through location caching.
295 This should be used only when the resulting location is not going to be
296 discarded. */
297
298 location_t
299 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
300 {
301 location_t loc;
302 stream_input_location (&loc, bp, data_in);
303 data_in->location_cache.apply_location_cache ();
304 return loc;
305 }
306
307 /* Read a reference to a tree node from DATA_IN using input block IB.
308 TAG is the expected node that should be found in IB, if TAG belongs
309 to one of the indexable trees, expect to read a reference index to
310 be looked up in one of the symbol tables, otherwise read the pysical
311 representation of the tree using stream_read_tree. FN is the
312 function scope for the read tree. */
313
314 tree
315 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
316 struct function *fn, enum LTO_tags tag)
317 {
318 unsigned HOST_WIDE_INT ix_u;
319 tree result = NULL_TREE;
320
321 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
322
323 switch (tag)
324 {
325 case LTO_type_ref:
326 ix_u = streamer_read_uhwi (ib);
327 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
328 break;
329
330 case LTO_ssa_name_ref:
331 ix_u = streamer_read_uhwi (ib);
332 result = (*SSANAMES (fn))[ix_u];
333 break;
334
335 case LTO_field_decl_ref:
336 ix_u = streamer_read_uhwi (ib);
337 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
338 break;
339
340 case LTO_function_decl_ref:
341 ix_u = streamer_read_uhwi (ib);
342 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
343 break;
344
345 case LTO_type_decl_ref:
346 ix_u = streamer_read_uhwi (ib);
347 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
348 break;
349
350 case LTO_namespace_decl_ref:
351 ix_u = streamer_read_uhwi (ib);
352 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
353 break;
354
355 case LTO_global_decl_ref:
356 case LTO_result_decl_ref:
357 case LTO_const_decl_ref:
358 case LTO_imported_decl_ref:
359 case LTO_label_decl_ref:
360 case LTO_translation_unit_decl_ref:
361 case LTO_namelist_decl_ref:
362 ix_u = streamer_read_uhwi (ib);
363 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
364 break;
365
366 default:
367 gcc_unreachable ();
368 }
369
370 gcc_assert (result);
371
372 return result;
373 }
374
375
376 /* Read and return a double-linked list of catch handlers from input
377 block IB, using descriptors in DATA_IN. */
378
379 static struct eh_catch_d *
380 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
381 eh_catch *last_p)
382 {
383 eh_catch first;
384 enum LTO_tags tag;
385
386 *last_p = first = NULL;
387 tag = streamer_read_record_start (ib);
388 while (tag)
389 {
390 tree list;
391 eh_catch n;
392
393 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
394
395 /* Read the catch node. */
396 n = ggc_cleared_alloc<eh_catch_d> ();
397 n->type_list = stream_read_tree (ib, data_in);
398 n->filter_list = stream_read_tree (ib, data_in);
399 n->label = stream_read_tree (ib, data_in);
400
401 /* Register all the types in N->FILTER_LIST. */
402 for (list = n->filter_list; list; list = TREE_CHAIN (list))
403 add_type_for_runtime (TREE_VALUE (list));
404
405 /* Chain N to the end of the list. */
406 if (*last_p)
407 (*last_p)->next_catch = n;
408 n->prev_catch = *last_p;
409 *last_p = n;
410
411 /* Set the head of the list the first time through the loop. */
412 if (first == NULL)
413 first = n;
414
415 tag = streamer_read_record_start (ib);
416 }
417
418 return first;
419 }
420
421
422 /* Read and return EH region IX from input block IB, using descriptors
423 in DATA_IN. */
424
425 static eh_region
426 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
427 {
428 enum LTO_tags tag;
429 eh_region r;
430
431 /* Read the region header. */
432 tag = streamer_read_record_start (ib);
433 if (tag == LTO_null)
434 return NULL;
435
436 r = ggc_cleared_alloc<eh_region_d> ();
437 r->index = streamer_read_hwi (ib);
438
439 gcc_assert (r->index == ix);
440
441 /* Read all the region pointers as region numbers. We'll fix up
442 the pointers once the whole array has been read. */
443 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
446
447 switch (tag)
448 {
449 case LTO_ert_cleanup:
450 r->type = ERT_CLEANUP;
451 break;
452
453 case LTO_ert_try:
454 {
455 struct eh_catch_d *last_catch;
456 r->type = ERT_TRY;
457 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
458 &last_catch);
459 r->u.eh_try.last_catch = last_catch;
460 break;
461 }
462
463 case LTO_ert_allowed_exceptions:
464 {
465 tree l;
466
467 r->type = ERT_ALLOWED_EXCEPTIONS;
468 r->u.allowed.type_list = stream_read_tree (ib, data_in);
469 r->u.allowed.label = stream_read_tree (ib, data_in);
470 r->u.allowed.filter = streamer_read_uhwi (ib);
471
472 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
473 add_type_for_runtime (TREE_VALUE (l));
474 }
475 break;
476
477 case LTO_ert_must_not_throw:
478 {
479 r->type = ERT_MUST_NOT_THROW;
480 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
481 bitpack_d bp = streamer_read_bitpack (ib);
482 r->u.must_not_throw.failure_loc
483 = stream_input_location_now (&bp, data_in);
484 }
485 break;
486
487 default:
488 gcc_unreachable ();
489 }
490
491 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
492
493 return r;
494 }
495
496
497 /* Read and return EH landing pad IX from input block IB, using descriptors
498 in DATA_IN. */
499
500 static eh_landing_pad
501 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
502 {
503 enum LTO_tags tag;
504 eh_landing_pad lp;
505
506 /* Read the landing pad header. */
507 tag = streamer_read_record_start (ib);
508 if (tag == LTO_null)
509 return NULL;
510
511 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
512
513 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
514 lp->index = streamer_read_hwi (ib);
515 gcc_assert (lp->index == ix);
516 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
517 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
518 lp->post_landing_pad = stream_read_tree (ib, data_in);
519
520 return lp;
521 }
522
523
524 /* After reading the EH regions, pointers to peer and children regions
525 are region numbers. This converts all these region numbers into
526 real pointers into the rematerialized regions for FN. ROOT_REGION
527 is the region number for the root EH region in FN. */
528
529 static void
530 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
531 {
532 unsigned i;
533 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
534 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
535 eh_region r;
536 eh_landing_pad lp;
537
538 gcc_assert (eh_array && lp_array);
539
540 gcc_assert (root_region >= 0);
541 fn->eh->region_tree = (*eh_array)[root_region];
542
543 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
544 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
545
546 /* Convert all the index numbers stored in pointer fields into
547 pointers to the corresponding slots in the EH region array. */
548 FOR_EACH_VEC_ELT (*eh_array, i, r)
549 {
550 /* The array may contain NULL regions. */
551 if (r == NULL)
552 continue;
553
554 gcc_assert (i == (unsigned) r->index);
555 FIXUP_EH_REGION (r->outer);
556 FIXUP_EH_REGION (r->inner);
557 FIXUP_EH_REGION (r->next_peer);
558 FIXUP_EH_LP (r->landing_pads);
559 }
560
561 /* Convert all the index numbers stored in pointer fields into
562 pointers to the corresponding slots in the EH landing pad array. */
563 FOR_EACH_VEC_ELT (*lp_array, i, lp)
564 {
565 /* The array may contain NULL landing pads. */
566 if (lp == NULL)
567 continue;
568
569 gcc_assert (i == (unsigned) lp->index);
570 FIXUP_EH_LP (lp->next_lp);
571 FIXUP_EH_REGION (lp->region);
572 }
573
574 #undef FIXUP_EH_REGION
575 #undef FIXUP_EH_LP
576 }
577
578
579 /* Initialize EH support. */
580
581 void
582 lto_init_eh (void)
583 {
584 static bool eh_initialized_p = false;
585
586 if (eh_initialized_p)
587 return;
588
589 /* Contrary to most other FEs, we only initialize EH support when at
590 least one of the files in the set contains exception regions in
591 it. Since this happens much later than the call to init_eh in
592 lang_dependent_init, we have to set flag_exceptions and call
593 init_eh again to initialize the EH tables. */
594 flag_exceptions = 1;
595 init_eh ();
596
597 eh_initialized_p = true;
598 }
599
600
601 /* Read the exception table for FN from IB using the data descriptors
602 in DATA_IN. */
603
604 static void
605 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
606 struct function *fn)
607 {
608 HOST_WIDE_INT i, root_region, len;
609 enum LTO_tags tag;
610
611 tag = streamer_read_record_start (ib);
612 if (tag == LTO_null)
613 return;
614
615 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
616
617 /* If the file contains EH regions, then it was compiled with
618 -fexceptions. In that case, initialize the backend EH
619 machinery. */
620 lto_init_eh ();
621
622 gcc_assert (fn->eh);
623
624 root_region = streamer_read_hwi (ib);
625 gcc_assert (root_region == (int) root_region);
626
627 /* Read the EH region array. */
628 len = streamer_read_hwi (ib);
629 gcc_assert (len == (int) len);
630 if (len > 0)
631 {
632 vec_safe_grow_cleared (fn->eh->region_array, len);
633 for (i = 0; i < len; i++)
634 {
635 eh_region r = input_eh_region (ib, data_in, i);
636 (*fn->eh->region_array)[i] = r;
637 }
638 }
639
640 /* Read the landing pads. */
641 len = streamer_read_hwi (ib);
642 gcc_assert (len == (int) len);
643 if (len > 0)
644 {
645 vec_safe_grow_cleared (fn->eh->lp_array, len);
646 for (i = 0; i < len; i++)
647 {
648 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
649 (*fn->eh->lp_array)[i] = lp;
650 }
651 }
652
653 /* Read the runtime type data. */
654 len = streamer_read_hwi (ib);
655 gcc_assert (len == (int) len);
656 if (len > 0)
657 {
658 vec_safe_grow_cleared (fn->eh->ttype_data, len);
659 for (i = 0; i < len; i++)
660 {
661 tree ttype = stream_read_tree (ib, data_in);
662 (*fn->eh->ttype_data)[i] = ttype;
663 }
664 }
665
666 /* Read the table of action chains. */
667 len = streamer_read_hwi (ib);
668 gcc_assert (len == (int) len);
669 if (len > 0)
670 {
671 if (targetm.arm_eabi_unwinder)
672 {
673 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
674 for (i = 0; i < len; i++)
675 {
676 tree t = stream_read_tree (ib, data_in);
677 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
678 }
679 }
680 else
681 {
682 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
683 for (i = 0; i < len; i++)
684 {
685 uchar c = streamer_read_uchar (ib);
686 (*fn->eh->ehspec_data.other)[i] = c;
687 }
688 }
689 }
690
691 /* Reconstruct the EH region tree by fixing up the peer/children
692 pointers. */
693 fixup_eh_region_pointers (fn, root_region);
694
695 tag = streamer_read_record_start (ib);
696 lto_tag_check_range (tag, LTO_null, LTO_null);
697 }
698
699
700 /* Make a new basic block with index INDEX in function FN. */
701
702 static basic_block
703 make_new_block (struct function *fn, unsigned int index)
704 {
705 basic_block bb = alloc_block ();
706 bb->index = index;
707 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
708 n_basic_blocks_for_fn (fn)++;
709 return bb;
710 }
711
712
713 /* Read the CFG for function FN from input block IB. */
714
715 static void
716 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
717 struct function *fn,
718 int count_materialization_scale)
719 {
720 unsigned int bb_count;
721 basic_block p_bb;
722 unsigned int i;
723 int index;
724
725 init_empty_tree_cfg_for_function (fn);
726 init_ssa_operands (fn);
727
728 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
729 PROFILE_LAST);
730
731 bb_count = streamer_read_uhwi (ib);
732
733 last_basic_block_for_fn (fn) = bb_count;
734 if (bb_count > basic_block_info_for_fn (fn)->length ())
735 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
736
737 if (bb_count > label_to_block_map_for_fn (fn)->length ())
738 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
739
740 index = streamer_read_hwi (ib);
741 while (index != -1)
742 {
743 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
744 unsigned int edge_count;
745
746 if (bb == NULL)
747 bb = make_new_block (fn, index);
748
749 edge_count = streamer_read_uhwi (ib);
750
751 /* Connect up the CFG. */
752 for (i = 0; i < edge_count; i++)
753 {
754 unsigned int dest_index;
755 unsigned int edge_flags;
756 basic_block dest;
757 int probability;
758 gcov_type count;
759 edge e;
760
761 dest_index = streamer_read_uhwi (ib);
762 probability = (int) streamer_read_hwi (ib);
763 count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
764 count_materialization_scale);
765 edge_flags = streamer_read_uhwi (ib);
766
767 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
768
769 if (dest == NULL)
770 dest = make_new_block (fn, dest_index);
771
772 e = make_edge (bb, dest, edge_flags);
773 e->probability = probability;
774 e->count = count;
775 }
776
777 index = streamer_read_hwi (ib);
778 }
779
780 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
781 index = streamer_read_hwi (ib);
782 while (index != -1)
783 {
784 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
785 bb->prev_bb = p_bb;
786 p_bb->next_bb = bb;
787 p_bb = bb;
788 index = streamer_read_hwi (ib);
789 }
790
791 /* ??? The cfgloop interface is tied to cfun. */
792 gcc_assert (cfun == fn);
793
794 /* Input the loop tree. */
795 unsigned n_loops = streamer_read_uhwi (ib);
796 if (n_loops == 0)
797 return;
798
799 struct loops *loops = ggc_cleared_alloc<struct loops> ();
800 init_loops_structure (fn, loops, n_loops);
801 set_loops_for_fn (fn, loops);
802
803 /* Input each loop and associate it with its loop header so
804 flow_loops_find can rebuild the loop tree. */
805 for (unsigned i = 1; i < n_loops; ++i)
806 {
807 int header_index = streamer_read_hwi (ib);
808 if (header_index == -1)
809 {
810 loops->larray->quick_push (NULL);
811 continue;
812 }
813
814 struct loop *loop = alloc_loop ();
815 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
816 loop->header->loop_father = loop;
817
818 /* Read everything copy_loop_info copies. */
819 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
820 loop->any_upper_bound = streamer_read_hwi (ib);
821 if (loop->any_upper_bound)
822 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
823 loop->any_likely_upper_bound = streamer_read_hwi (ib);
824 if (loop->any_likely_upper_bound)
825 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
826 loop->any_estimate = streamer_read_hwi (ib);
827 if (loop->any_estimate)
828 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
829
830 /* Read OMP SIMD related info. */
831 loop->safelen = streamer_read_hwi (ib);
832 loop->dont_vectorize = streamer_read_hwi (ib);
833 loop->force_vectorize = streamer_read_hwi (ib);
834 loop->simduid = stream_read_tree (ib, data_in);
835
836 place_new_loop (fn, loop);
837
838 /* flow_loops_find doesn't like loops not in the tree, hook them
839 all as siblings of the tree root temporarily. */
840 flow_loop_tree_node_add (loops->tree_root, loop);
841 }
842
843 /* Rebuild the loop tree. */
844 flow_loops_find (loops);
845 }
846
847
848 /* Read the SSA names array for function FN from DATA_IN using input
849 block IB. */
850
851 static void
852 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
853 struct function *fn)
854 {
855 unsigned int i, size;
856
857 size = streamer_read_uhwi (ib);
858 init_ssanames (fn, size);
859
860 i = streamer_read_uhwi (ib);
861 while (i)
862 {
863 tree ssa_name, name;
864 bool is_default_def;
865
866 /* Skip over the elements that had been freed. */
867 while (SSANAMES (fn)->length () < i)
868 SSANAMES (fn)->quick_push (NULL_TREE);
869
870 is_default_def = (streamer_read_uchar (ib) != 0);
871 name = stream_read_tree (ib, data_in);
872 ssa_name = make_ssa_name_fn (fn, name, NULL);
873
874 if (is_default_def)
875 {
876 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
877 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
878 }
879
880 i = streamer_read_uhwi (ib);
881 }
882 }
883
884
885 /* Go through all NODE edges and fixup call_stmt pointers
886 so they point to STMTS. */
887
888 static void
889 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
890 struct function *fn)
891 {
892 #define STMT_UID_NOT_IN_RANGE(uid) \
893 (gimple_stmt_max_uid (fn) < uid || uid == 0)
894
895 struct cgraph_edge *cedge;
896 struct ipa_ref *ref = NULL;
897 unsigned int i;
898
899 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
900 {
901 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
902 fatal_error (input_location,
903 "Cgraph edge statement index out of range");
904 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
905 if (!cedge->call_stmt)
906 fatal_error (input_location,
907 "Cgraph edge statement index not found");
908 }
909 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
910 {
911 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
912 fatal_error (input_location,
913 "Cgraph edge statement index out of range");
914 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
915 if (!cedge->call_stmt)
916 fatal_error (input_location, "Cgraph edge statement index not found");
917 }
918 for (i = 0; node->iterate_reference (i, ref); i++)
919 if (ref->lto_stmt_uid)
920 {
921 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
922 fatal_error (input_location,
923 "Reference statement index out of range");
924 ref->stmt = stmts[ref->lto_stmt_uid - 1];
925 if (!ref->stmt)
926 fatal_error (input_location, "Reference statement index not found");
927 }
928 }
929
930
931 /* Fixup call_stmt pointers in NODE and all clones. */
932
933 static void
934 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
935 {
936 struct cgraph_node *node;
937 struct function *fn;
938
939 while (orig->clone_of)
940 orig = orig->clone_of;
941 fn = DECL_STRUCT_FUNCTION (orig->decl);
942
943 if (!orig->thunk.thunk_p)
944 fixup_call_stmt_edges_1 (orig, stmts, fn);
945 if (orig->clones)
946 for (node = orig->clones; node != orig;)
947 {
948 if (!node->thunk.thunk_p)
949 fixup_call_stmt_edges_1 (node, stmts, fn);
950 if (node->clones)
951 node = node->clones;
952 else if (node->next_sibling_clone)
953 node = node->next_sibling_clone;
954 else
955 {
956 while (node != orig && !node->next_sibling_clone)
957 node = node->clone_of;
958 if (node != orig)
959 node = node->next_sibling_clone;
960 }
961 }
962 }
963
964
965 /* Input the base body of struct function FN from DATA_IN
966 using input block IB. */
967
968 static void
969 input_struct_function_base (struct function *fn, struct data_in *data_in,
970 struct lto_input_block *ib)
971 {
972 struct bitpack_d bp;
973 int len;
974
975 /* Read the static chain and non-local goto save area. */
976 fn->static_chain_decl = stream_read_tree (ib, data_in);
977 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
978
979 /* Read all the local symbols. */
980 len = streamer_read_hwi (ib);
981 if (len > 0)
982 {
983 int i;
984 vec_safe_grow_cleared (fn->local_decls, len);
985 for (i = 0; i < len; i++)
986 {
987 tree t = stream_read_tree (ib, data_in);
988 (*fn->local_decls)[i] = t;
989 }
990 }
991
992 /* Input the current IL state of the function. */
993 fn->curr_properties = streamer_read_uhwi (ib);
994
995 /* Read all the attributes for FN. */
996 bp = streamer_read_bitpack (ib);
997 fn->is_thunk = bp_unpack_value (&bp, 1);
998 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
999 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1000 fn->returns_struct = bp_unpack_value (&bp, 1);
1001 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1002 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1003 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1004 fn->after_inlining = bp_unpack_value (&bp, 1);
1005 fn->stdarg = bp_unpack_value (&bp, 1);
1006 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1007 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1008 fn->calls_alloca = bp_unpack_value (&bp, 1);
1009 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1010 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1011 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1012 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1013 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1014 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1015
1016 /* Input the function start and end loci. */
1017 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1018 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1019 }
1020
1021
1022 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1023
1024 static void
1025 input_function (tree fn_decl, struct data_in *data_in,
1026 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1027 {
1028 struct function *fn;
1029 enum LTO_tags tag;
1030 gimple **stmts;
1031 basic_block bb;
1032 struct cgraph_node *node;
1033
1034 tag = streamer_read_record_start (ib);
1035 lto_tag_check (tag, LTO_function);
1036
1037 /* Read decls for parameters and args. */
1038 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1039 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1040
1041 /* Read the tree of lexical scopes for the function. */
1042 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1043 unsigned block_leaf_count = streamer_read_uhwi (ib);
1044 while (block_leaf_count--)
1045 stream_read_tree (ib, data_in);
1046
1047 if (!streamer_read_uhwi (ib))
1048 return;
1049
1050 push_struct_function (fn_decl);
1051 fn = DECL_STRUCT_FUNCTION (fn_decl);
1052 init_tree_ssa (fn);
1053 /* We input IL in SSA form. */
1054 cfun->gimple_df->in_ssa_p = true;
1055
1056 gimple_register_cfg_hooks ();
1057
1058 node = cgraph_node::get (fn_decl);
1059 if (!node)
1060 node = cgraph_node::create (fn_decl);
1061 input_struct_function_base (fn, data_in, ib);
1062 input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
1063
1064 /* Read all the SSA names. */
1065 input_ssa_names (ib, data_in, fn);
1066
1067 /* Read the exception handling regions in the function. */
1068 input_eh_regions (ib, data_in, fn);
1069
1070 gcc_assert (DECL_INITIAL (fn_decl));
1071 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1072
1073 /* Read all the basic blocks. */
1074 tag = streamer_read_record_start (ib);
1075 while (tag)
1076 {
1077 input_bb (ib, tag, data_in, fn,
1078 node->count_materialization_scale);
1079 tag = streamer_read_record_start (ib);
1080 }
1081
1082 /* Fix up the call statements that are mentioned in the callgraph
1083 edges. */
1084 set_gimple_stmt_max_uid (cfun, 0);
1085 FOR_ALL_BB_FN (bb, cfun)
1086 {
1087 gimple_stmt_iterator gsi;
1088 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1089 {
1090 gimple *stmt = gsi_stmt (gsi);
1091 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1092 }
1093 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1094 {
1095 gimple *stmt = gsi_stmt (gsi);
1096 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1097 }
1098 }
1099 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1100 FOR_ALL_BB_FN (bb, cfun)
1101 {
1102 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1103 while (!gsi_end_p (bsi))
1104 {
1105 gimple *stmt = gsi_stmt (bsi);
1106 gsi_next (&bsi);
1107 stmts[gimple_uid (stmt)] = stmt;
1108 }
1109 bsi = gsi_start_bb (bb);
1110 while (!gsi_end_p (bsi))
1111 {
1112 gimple *stmt = gsi_stmt (bsi);
1113 bool remove = false;
1114 /* If we're recompiling LTO objects with debug stmts but
1115 we're not supposed to have debug stmts, remove them now.
1116 We can't remove them earlier because this would cause uid
1117 mismatches in fixups, but we can do it at this point, as
1118 long as debug stmts don't require fixups.
1119 Similarly remove all IFN_*SAN_* internal calls */
1120 if (!flag_wpa)
1121 {
1122 if (!MAY_HAVE_DEBUG_STMTS && is_gimple_debug (stmt))
1123 remove = true;
1124 if (is_gimple_call (stmt)
1125 && gimple_call_internal_p (stmt))
1126 {
1127 switch (gimple_call_internal_fn (stmt))
1128 {
1129 case IFN_UBSAN_NULL:
1130 if ((flag_sanitize
1131 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1132 remove = true;
1133 break;
1134 case IFN_UBSAN_BOUNDS:
1135 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1136 remove = true;
1137 break;
1138 case IFN_UBSAN_VPTR:
1139 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1140 remove = true;
1141 break;
1142 case IFN_UBSAN_OBJECT_SIZE:
1143 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1144 remove = true;
1145 break;
1146 case IFN_ASAN_MARK:
1147 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1148 remove = true;
1149 break;
1150 case IFN_TSAN_FUNC_EXIT:
1151 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1152 remove = true;
1153 break;
1154 default:
1155 break;
1156 }
1157 gcc_assert (!remove || gimple_call_lhs (stmt) == NULL_TREE);
1158 }
1159 }
1160 if (remove)
1161 {
1162 gimple_stmt_iterator gsi = bsi;
1163 gsi_next (&bsi);
1164 unlink_stmt_vdef (stmt);
1165 release_defs (stmt);
1166 gsi_remove (&gsi, true);
1167 }
1168 else
1169 {
1170 gsi_next (&bsi);
1171 stmts[gimple_uid (stmt)] = stmt;
1172 }
1173 }
1174 }
1175
1176 /* Set the gimple body to the statement sequence in the entry
1177 basic block. FIXME lto, this is fairly hacky. The existence
1178 of a gimple body is used by the cgraph routines, but we should
1179 really use the presence of the CFG. */
1180 {
1181 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1182 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1183 }
1184
1185 fixup_call_stmt_edges (node, stmts);
1186 execute_all_ipa_stmt_fixups (node, stmts);
1187
1188 update_ssa (TODO_update_ssa_only_virtuals);
1189 free_dominance_info (CDI_DOMINATORS);
1190 free_dominance_info (CDI_POST_DOMINATORS);
1191 free (stmts);
1192 pop_cfun ();
1193 }
1194
1195 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1196
1197 static void
1198 input_constructor (tree var, struct data_in *data_in,
1199 struct lto_input_block *ib)
1200 {
1201 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1202 }
1203
1204
1205 /* Read the body from DATA for function NODE and fill it in.
1206 FILE_DATA are the global decls and types. SECTION_TYPE is either
1207 LTO_section_function_body or LTO_section_static_initializer. If
1208 section type is LTO_section_function_body, FN must be the decl for
1209 that function. */
1210
1211 static void
1212 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1213 const char *data, enum lto_section_type section_type)
1214 {
1215 const struct lto_function_header *header;
1216 struct data_in *data_in;
1217 int cfg_offset;
1218 int main_offset;
1219 int string_offset;
1220 tree fn_decl = node->decl;
1221
1222 header = (const struct lto_function_header *) data;
1223 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1224 {
1225 cfg_offset = sizeof (struct lto_function_header);
1226 main_offset = cfg_offset + header->cfg_size;
1227 string_offset = main_offset + header->main_size;
1228 }
1229 else
1230 {
1231 main_offset = sizeof (struct lto_function_header);
1232 string_offset = main_offset + header->main_size;
1233 }
1234
1235 data_in = lto_data_in_create (file_data, data + string_offset,
1236 header->string_size, vNULL);
1237
1238 if (section_type == LTO_section_function_body)
1239 {
1240 struct lto_in_decl_state *decl_state;
1241 unsigned from;
1242
1243 gcc_checking_assert (node);
1244
1245 /* Use the function's decl state. */
1246 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1247 gcc_assert (decl_state);
1248 file_data->current_decl_state = decl_state;
1249
1250
1251 /* Set up the struct function. */
1252 from = data_in->reader_cache->nodes.length ();
1253 lto_input_block ib_main (data + main_offset, header->main_size,
1254 file_data->mode_table);
1255 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1256 {
1257 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1258 file_data->mode_table);
1259 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1260 }
1261 else
1262 input_constructor (fn_decl, data_in, &ib_main);
1263 data_in->location_cache.apply_location_cache ();
1264 /* And fixup types we streamed locally. */
1265 {
1266 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1267 unsigned len = cache->nodes.length ();
1268 unsigned i;
1269 for (i = len; i-- > from;)
1270 {
1271 tree t = streamer_tree_cache_get_tree (cache, i);
1272 if (t == NULL_TREE)
1273 continue;
1274
1275 if (TYPE_P (t))
1276 {
1277 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1278 if (type_with_alias_set_p (t)
1279 && canonical_type_used_p (t))
1280 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1281 if (TYPE_MAIN_VARIANT (t) != t)
1282 {
1283 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1284 TYPE_NEXT_VARIANT (t)
1285 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1286 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1287 }
1288 }
1289 }
1290 }
1291
1292 /* Restore decl state */
1293 file_data->current_decl_state = file_data->global_decl_state;
1294 }
1295
1296 lto_data_in_delete (data_in);
1297 }
1298
1299
1300 /* Read the body of NODE using DATA. FILE_DATA holds the global
1301 decls and types. */
1302
1303 void
1304 lto_input_function_body (struct lto_file_decl_data *file_data,
1305 struct cgraph_node *node, const char *data)
1306 {
1307 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1308 }
1309
1310 /* Read the body of NODE using DATA. FILE_DATA holds the global
1311 decls and types. */
1312
1313 void
1314 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1315 struct varpool_node *node, const char *data)
1316 {
1317 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1318 }
1319
1320
1321 /* Read the physical representation of a tree node EXPR from
1322 input block IB using the per-file context in DATA_IN. */
1323
1324 static void
1325 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1326 {
1327 /* Read all the bitfield values in EXPR. Note that for LTO, we
1328 only write language-independent bitfields, so no more unpacking is
1329 needed. */
1330 streamer_read_tree_bitfields (ib, data_in, expr);
1331
1332 /* Read all the pointer fields in EXPR. */
1333 streamer_read_tree_body (ib, data_in, expr);
1334
1335 /* Read any LTO-specific data not read by the tree streamer. */
1336 if (DECL_P (expr)
1337 && TREE_CODE (expr) != FUNCTION_DECL
1338 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1339 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1340 }
1341
1342 /* Read the physical representation of a tree node with tag TAG from
1343 input block IB using the per-file context in DATA_IN. */
1344
1345 static tree
1346 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1347 enum LTO_tags tag, hashval_t hash)
1348 {
1349 /* Instantiate a new tree node. */
1350 tree result = streamer_alloc_tree (ib, data_in, tag);
1351
1352 /* Enter RESULT in the reader cache. This will make RESULT
1353 available so that circular references in the rest of the tree
1354 structure can be resolved in subsequent calls to stream_read_tree. */
1355 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1356
1357 lto_read_tree_1 (ib, data_in, result);
1358
1359 /* end_marker = */ streamer_read_uchar (ib);
1360
1361 return result;
1362 }
1363
1364
1365 /* Populate the reader cache with trees materialized from the SCC
1366 following in the IB, DATA_IN stream. */
1367
1368 hashval_t
1369 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1370 unsigned *len, unsigned *entry_len)
1371 {
1372 /* A blob of unnamed tree nodes, fill the cache from it and
1373 recurse. */
1374 unsigned size = streamer_read_uhwi (ib);
1375 hashval_t scc_hash = streamer_read_uhwi (ib);
1376 unsigned scc_entry_len = 1;
1377
1378 if (size == 1)
1379 {
1380 enum LTO_tags tag = streamer_read_record_start (ib);
1381 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1382 }
1383 else
1384 {
1385 unsigned int first = data_in->reader_cache->nodes.length ();
1386 tree result;
1387
1388 scc_entry_len = streamer_read_uhwi (ib);
1389
1390 /* Materialize size trees by reading their headers. */
1391 for (unsigned i = 0; i < size; ++i)
1392 {
1393 enum LTO_tags tag = streamer_read_record_start (ib);
1394 if (tag == LTO_null
1395 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1396 || tag == LTO_tree_pickle_reference
1397 || tag == LTO_integer_cst
1398 || tag == LTO_tree_scc)
1399 gcc_unreachable ();
1400
1401 result = streamer_alloc_tree (ib, data_in, tag);
1402 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1403 }
1404
1405 /* Read the tree bitpacks and references. */
1406 for (unsigned i = 0; i < size; ++i)
1407 {
1408 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1409 first + i);
1410 lto_read_tree_1 (ib, data_in, result);
1411 /* end_marker = */ streamer_read_uchar (ib);
1412 }
1413 }
1414
1415 *len = size;
1416 *entry_len = scc_entry_len;
1417 return scc_hash;
1418 }
1419
1420
1421 /* Read a tree from input block IB using the per-file context in
1422 DATA_IN. This context is used, for example, to resolve references
1423 to previously read nodes. */
1424
1425 tree
1426 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1427 enum LTO_tags tag, hashval_t hash)
1428 {
1429 tree result;
1430
1431 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1432
1433 if (tag == LTO_null)
1434 result = NULL_TREE;
1435 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1436 {
1437 /* If TAG is a reference to an indexable tree, the next value
1438 in IB is the index into the table where we expect to find
1439 that tree. */
1440 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1441 }
1442 else if (tag == LTO_tree_pickle_reference)
1443 {
1444 /* If TAG is a reference to a previously read tree, look it up in
1445 the reader cache. */
1446 result = streamer_get_pickled_tree (ib, data_in);
1447 }
1448 else if (tag == LTO_integer_cst)
1449 {
1450 /* For shared integer constants in singletons we can use the
1451 existing tree integer constant merging code. */
1452 tree type = stream_read_tree (ib, data_in);
1453 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1454 unsigned HOST_WIDE_INT i;
1455 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1456
1457 for (i = 0; i < len; i++)
1458 a[i] = streamer_read_hwi (ib);
1459 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1460 result = wide_int_to_tree (type, wide_int::from_array
1461 (a, len, TYPE_PRECISION (type)));
1462 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1463 }
1464 else if (tag == LTO_tree_scc)
1465 gcc_unreachable ();
1466 else
1467 {
1468 /* Otherwise, materialize a new node from IB. */
1469 result = lto_read_tree (ib, data_in, tag, hash);
1470 }
1471
1472 return result;
1473 }
1474
1475 tree
1476 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1477 {
1478 enum LTO_tags tag;
1479
1480 /* Input and skip SCCs. */
1481 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1482 {
1483 unsigned len, entry_len;
1484 lto_input_scc (ib, data_in, &len, &entry_len);
1485 }
1486 return lto_input_tree_1 (ib, data_in, tag, 0);
1487 }
1488
1489
1490 /* Input toplevel asms. */
1491
1492 void
1493 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1494 {
1495 size_t len;
1496 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1497 NULL, &len);
1498 const struct lto_simple_header_with_strings *header
1499 = (const struct lto_simple_header_with_strings *) data;
1500 int string_offset;
1501 struct data_in *data_in;
1502 tree str;
1503
1504 if (! data)
1505 return;
1506
1507 string_offset = sizeof (*header) + header->main_size;
1508
1509 lto_input_block ib (data + sizeof (*header), header->main_size,
1510 file_data->mode_table);
1511
1512 data_in = lto_data_in_create (file_data, data + string_offset,
1513 header->string_size, vNULL);
1514
1515 while ((str = streamer_read_string_cst (data_in, &ib)))
1516 {
1517 asm_node *node = symtab->finalize_toplevel_asm (str);
1518 node->order = streamer_read_hwi (&ib) + order_base;
1519 if (node->order >= symtab->order)
1520 symtab->order = node->order + 1;
1521 }
1522
1523 lto_data_in_delete (data_in);
1524
1525 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1526 }
1527
1528
1529 /* Input mode table. */
1530
1531 void
1532 lto_input_mode_table (struct lto_file_decl_data *file_data)
1533 {
1534 size_t len;
1535 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1536 NULL, &len);
1537 if (! data)
1538 {
1539 internal_error ("cannot read LTO mode table from %s",
1540 file_data->file_name);
1541 return;
1542 }
1543
1544 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1545 file_data->mode_table = table;
1546 const struct lto_simple_header_with_strings *header
1547 = (const struct lto_simple_header_with_strings *) data;
1548 int string_offset;
1549 struct data_in *data_in;
1550 string_offset = sizeof (*header) + header->main_size;
1551
1552 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1553 data_in = lto_data_in_create (file_data, data + string_offset,
1554 header->string_size, vNULL);
1555 bitpack_d bp = streamer_read_bitpack (&ib);
1556
1557 table[VOIDmode] = VOIDmode;
1558 table[BLKmode] = BLKmode;
1559 unsigned int m;
1560 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1561 {
1562 enum mode_class mclass
1563 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1564 unsigned int size = bp_unpack_value (&bp, 8);
1565 unsigned int prec = bp_unpack_value (&bp, 16);
1566 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1567 unsigned int nunits = bp_unpack_value (&bp, 8);
1568 unsigned int ibit = 0, fbit = 0;
1569 unsigned int real_fmt_len = 0;
1570 const char *real_fmt_name = NULL;
1571 switch (mclass)
1572 {
1573 case MODE_FRACT:
1574 case MODE_UFRACT:
1575 case MODE_ACCUM:
1576 case MODE_UACCUM:
1577 ibit = bp_unpack_value (&bp, 8);
1578 fbit = bp_unpack_value (&bp, 8);
1579 break;
1580 case MODE_FLOAT:
1581 case MODE_DECIMAL_FLOAT:
1582 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1583 &real_fmt_len);
1584 break;
1585 default:
1586 break;
1587 }
1588 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1589 if not found, fallback to all modes. */
1590 int pass;
1591 for (pass = 0; pass < 2; pass++)
1592 for (machine_mode mr = pass ? VOIDmode
1593 : GET_CLASS_NARROWEST_MODE (mclass);
1594 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1595 pass ? mr = (machine_mode) (mr + 1)
1596 : mr = GET_MODE_WIDER_MODE (mr))
1597 if (GET_MODE_CLASS (mr) != mclass
1598 || GET_MODE_SIZE (mr) != size
1599 || GET_MODE_PRECISION (mr) != prec
1600 || (inner == m
1601 ? GET_MODE_INNER (mr) != mr
1602 : GET_MODE_INNER (mr) != table[(int) inner])
1603 || GET_MODE_IBIT (mr) != ibit
1604 || GET_MODE_FBIT (mr) != fbit
1605 || GET_MODE_NUNITS (mr) != nunits)
1606 continue;
1607 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1608 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1609 continue;
1610 else
1611 {
1612 table[m] = mr;
1613 pass = 2;
1614 break;
1615 }
1616 unsigned int mname_len;
1617 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1618 if (pass == 2)
1619 {
1620 switch (mclass)
1621 {
1622 case MODE_VECTOR_INT:
1623 case MODE_VECTOR_FLOAT:
1624 case MODE_VECTOR_FRACT:
1625 case MODE_VECTOR_UFRACT:
1626 case MODE_VECTOR_ACCUM:
1627 case MODE_VECTOR_UACCUM:
1628 /* For unsupported vector modes just use BLKmode,
1629 if the scalar mode is supported. */
1630 if (table[(int) inner] != VOIDmode)
1631 {
1632 table[m] = BLKmode;
1633 break;
1634 }
1635 /* FALLTHRU */
1636 default:
1637 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1638 break;
1639 }
1640 }
1641 }
1642 lto_data_in_delete (data_in);
1643
1644 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1645 }
1646
1647
1648 /* Initialization for the LTO reader. */
1649
1650 void
1651 lto_reader_init (void)
1652 {
1653 lto_streamer_init ();
1654 file_name_hash_table
1655 = new hash_table<freeing_string_slot_hasher> (37);
1656 }
1657
1658
1659 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1660 table to use with LEN strings. RESOLUTIONS is the vector of linker
1661 resolutions (NULL if not using a linker plugin). */
1662
1663 struct data_in *
1664 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1665 unsigned len,
1666 vec<ld_plugin_symbol_resolution_t> resolutions)
1667 {
1668 struct data_in *data_in = new (struct data_in);
1669 data_in->file_data = file_data;
1670 data_in->strings = strings;
1671 data_in->strings_len = len;
1672 data_in->globals_resolution = resolutions;
1673 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1674 return data_in;
1675 }
1676
1677
1678 /* Remove DATA_IN. */
1679
1680 void
1681 lto_data_in_delete (struct data_in *data_in)
1682 {
1683 data_in->globals_resolution.release ();
1684 streamer_tree_cache_delete (data_in->reader_cache);
1685 delete data_in;
1686 }