data-streamer-in.c (streamer_read_wide_int): New.
[gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2016 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44
45
46 struct freeing_string_slot_hasher : string_slot_hasher
47 {
48 static inline void remove (value_type *);
49 };
50
51 inline void
52 freeing_string_slot_hasher::remove (value_type *v)
53 {
54 free (v);
55 }
56
57 /* The table to hold the file names. */
58 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
59
60
61 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
62 number of valid tag values to check. */
63
64 void
65 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
66 {
67 va_list ap;
68 int i;
69
70 va_start (ap, ntags);
71 for (i = 0; i < ntags; i++)
72 if ((unsigned) actual == va_arg (ap, unsigned))
73 {
74 va_end (ap);
75 return;
76 }
77
78 va_end (ap);
79 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
80 }
81
82
83 /* Read LENGTH bytes from STREAM to ADDR. */
84
85 void
86 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
87 {
88 size_t i;
89 unsigned char *const buffer = (unsigned char *const) addr;
90
91 for (i = 0; i < length; i++)
92 buffer[i] = streamer_read_uchar (ib);
93 }
94
95
96 /* Lookup STRING in file_name_hash_table. If found, return the existing
97 string, otherwise insert STRING as the canonical version. */
98
99 static const char *
100 canon_file_name (const char *string)
101 {
102 string_slot **slot;
103 struct string_slot s_slot;
104 size_t len = strlen (string);
105
106 s_slot.s = string;
107 s_slot.len = len;
108
109 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
110 if (*slot == NULL)
111 {
112 char *saved_string;
113 struct string_slot *new_slot;
114
115 saved_string = (char *) xmalloc (len + 1);
116 new_slot = XCNEW (struct string_slot);
117 memcpy (saved_string, string, len + 1);
118 new_slot->s = saved_string;
119 new_slot->len = len;
120 *slot = new_slot;
121 return saved_string;
122 }
123 else
124 {
125 struct string_slot *old_slot = *slot;
126 return old_slot->s;
127 }
128 }
129
130 /* Pointer to currently alive instance of lto_location_cache. */
131
132 lto_location_cache *lto_location_cache::current_cache;
133
134 /* Sort locations in source order. Start with file from last application. */
135
136 int
137 lto_location_cache::cmp_loc (const void *pa, const void *pb)
138 {
139 const cached_location *a = ((const cached_location *)pa);
140 const cached_location *b = ((const cached_location *)pb);
141 const char *current_file = current_cache->current_file;
142 int current_line = current_cache->current_line;
143
144 if (a->file == current_file && b->file != current_file)
145 return -1;
146 if (a->file != current_file && b->file == current_file)
147 return 1;
148 if (a->file == current_file && b->file == current_file)
149 {
150 if (a->line == current_line && b->line != current_line)
151 return -1;
152 if (a->line != current_line && b->line == current_line)
153 return 1;
154 }
155 if (a->file != b->file)
156 return strcmp (a->file, b->file);
157 if (a->sysp != b->sysp)
158 return a->sysp ? 1 : -1;
159 if (a->line != b->line)
160 return a->line - b->line;
161 return a->col - b->col;
162 }
163
164 /* Apply all changes in location cache. Add locations into linemap and patch
165 trees. */
166
167 bool
168 lto_location_cache::apply_location_cache ()
169 {
170 static const char *prev_file;
171 if (!loc_cache.length ())
172 return false;
173 if (loc_cache.length () > 1)
174 loc_cache.qsort (cmp_loc);
175
176 for (unsigned int i = 0; i < loc_cache.length (); i++)
177 {
178 struct cached_location loc = loc_cache[i];
179
180 if (current_file != loc.file)
181 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
182 loc.sysp, loc.file, loc.line);
183 else if (current_line != loc.line)
184 {
185 int max = loc.col;
186
187 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
188 if (loc.file != loc_cache[j].file
189 || loc.line != loc_cache[j].line)
190 break;
191 else if (max < loc_cache[j].col)
192 max = loc_cache[j].col;
193 linemap_line_start (line_table, loc.line, max + 1);
194 }
195 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
196 if (current_file == loc.file && current_line == loc.line
197 && current_col == loc.col)
198 *loc.loc = current_loc;
199 else
200 current_loc = *loc.loc = linemap_position_for_column (line_table,
201 loc.col);
202 current_line = loc.line;
203 prev_file = current_file = loc.file;
204 current_col = loc.col;
205 }
206 loc_cache.truncate (0);
207 accepted_length = 0;
208 return true;
209 }
210
211 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
212
213 void
214 lto_location_cache::accept_location_cache ()
215 {
216 gcc_assert (current_cache == this);
217 accepted_length = loc_cache.length ();
218 }
219
220 /* Tree merging did suceed; throw away recent changes. */
221
222 void
223 lto_location_cache::revert_location_cache ()
224 {
225 loc_cache.truncate (accepted_length);
226 }
227
228 /* Read a location bitpack from input block IB and either update *LOC directly
229 or add it to the location cache.
230 It is neccesary to call apply_location_cache to get *LOC updated. */
231
232 void
233 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
234 struct data_in *data_in)
235 {
236 static const char *stream_file;
237 static int stream_line;
238 static int stream_col;
239 static bool stream_sysp;
240 bool file_change, line_change, column_change;
241
242 gcc_assert (current_cache == this);
243
244 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
245
246 if (*loc < RESERVED_LOCATION_COUNT)
247 return;
248
249 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
250 ICE on it. */
251
252 file_change = bp_unpack_value (bp, 1);
253 line_change = bp_unpack_value (bp, 1);
254 column_change = bp_unpack_value (bp, 1);
255
256 if (file_change)
257 {
258 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
259 stream_sysp = bp_unpack_value (bp, 1);
260 }
261
262 if (line_change)
263 stream_line = bp_unpack_var_len_unsigned (bp);
264
265 if (column_change)
266 stream_col = bp_unpack_var_len_unsigned (bp);
267
268 /* This optimization saves location cache operations druing gimple
269 streaming. */
270
271 if (current_file == stream_file && current_line == stream_line
272 && current_col == stream_col && current_sysp == stream_sysp)
273 {
274 *loc = current_loc;
275 return;
276 }
277
278 struct cached_location entry
279 = {stream_file, loc, stream_line, stream_col, stream_sysp};
280 loc_cache.safe_push (entry);
281 }
282
283 /* Read a location bitpack from input block IB and either update *LOC directly
284 or add it to the location cache.
285 It is neccesary to call apply_location_cache to get *LOC updated. */
286
287 void
288 lto_input_location (location_t *loc, struct bitpack_d *bp,
289 struct data_in *data_in)
290 {
291 data_in->location_cache.input_location (loc, bp, data_in);
292 }
293
294 /* Read location and return it instead of going through location caching.
295 This should be used only when the resulting location is not going to be
296 discarded. */
297
298 location_t
299 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
300 {
301 location_t loc;
302 stream_input_location (&loc, bp, data_in);
303 data_in->location_cache.apply_location_cache ();
304 return loc;
305 }
306
307 /* Read a reference to a tree node from DATA_IN using input block IB.
308 TAG is the expected node that should be found in IB, if TAG belongs
309 to one of the indexable trees, expect to read a reference index to
310 be looked up in one of the symbol tables, otherwise read the pysical
311 representation of the tree using stream_read_tree. FN is the
312 function scope for the read tree. */
313
314 tree
315 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
316 struct function *fn, enum LTO_tags tag)
317 {
318 unsigned HOST_WIDE_INT ix_u;
319 tree result = NULL_TREE;
320
321 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
322
323 switch (tag)
324 {
325 case LTO_type_ref:
326 ix_u = streamer_read_uhwi (ib);
327 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
328 break;
329
330 case LTO_ssa_name_ref:
331 ix_u = streamer_read_uhwi (ib);
332 result = (*SSANAMES (fn))[ix_u];
333 break;
334
335 case LTO_field_decl_ref:
336 ix_u = streamer_read_uhwi (ib);
337 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
338 break;
339
340 case LTO_function_decl_ref:
341 ix_u = streamer_read_uhwi (ib);
342 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
343 break;
344
345 case LTO_type_decl_ref:
346 ix_u = streamer_read_uhwi (ib);
347 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
348 break;
349
350 case LTO_namespace_decl_ref:
351 ix_u = streamer_read_uhwi (ib);
352 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
353 break;
354
355 case LTO_global_decl_ref:
356 case LTO_result_decl_ref:
357 case LTO_const_decl_ref:
358 case LTO_imported_decl_ref:
359 case LTO_label_decl_ref:
360 case LTO_translation_unit_decl_ref:
361 case LTO_namelist_decl_ref:
362 ix_u = streamer_read_uhwi (ib);
363 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
364 break;
365
366 default:
367 gcc_unreachable ();
368 }
369
370 gcc_assert (result);
371
372 return result;
373 }
374
375
376 /* Read and return a double-linked list of catch handlers from input
377 block IB, using descriptors in DATA_IN. */
378
379 static struct eh_catch_d *
380 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
381 eh_catch *last_p)
382 {
383 eh_catch first;
384 enum LTO_tags tag;
385
386 *last_p = first = NULL;
387 tag = streamer_read_record_start (ib);
388 while (tag)
389 {
390 tree list;
391 eh_catch n;
392
393 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
394
395 /* Read the catch node. */
396 n = ggc_cleared_alloc<eh_catch_d> ();
397 n->type_list = stream_read_tree (ib, data_in);
398 n->filter_list = stream_read_tree (ib, data_in);
399 n->label = stream_read_tree (ib, data_in);
400
401 /* Register all the types in N->FILTER_LIST. */
402 for (list = n->filter_list; list; list = TREE_CHAIN (list))
403 add_type_for_runtime (TREE_VALUE (list));
404
405 /* Chain N to the end of the list. */
406 if (*last_p)
407 (*last_p)->next_catch = n;
408 n->prev_catch = *last_p;
409 *last_p = n;
410
411 /* Set the head of the list the first time through the loop. */
412 if (first == NULL)
413 first = n;
414
415 tag = streamer_read_record_start (ib);
416 }
417
418 return first;
419 }
420
421
422 /* Read and return EH region IX from input block IB, using descriptors
423 in DATA_IN. */
424
425 static eh_region
426 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
427 {
428 enum LTO_tags tag;
429 eh_region r;
430
431 /* Read the region header. */
432 tag = streamer_read_record_start (ib);
433 if (tag == LTO_null)
434 return NULL;
435
436 r = ggc_cleared_alloc<eh_region_d> ();
437 r->index = streamer_read_hwi (ib);
438
439 gcc_assert (r->index == ix);
440
441 /* Read all the region pointers as region numbers. We'll fix up
442 the pointers once the whole array has been read. */
443 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
446
447 switch (tag)
448 {
449 case LTO_ert_cleanup:
450 r->type = ERT_CLEANUP;
451 break;
452
453 case LTO_ert_try:
454 {
455 struct eh_catch_d *last_catch;
456 r->type = ERT_TRY;
457 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
458 &last_catch);
459 r->u.eh_try.last_catch = last_catch;
460 break;
461 }
462
463 case LTO_ert_allowed_exceptions:
464 {
465 tree l;
466
467 r->type = ERT_ALLOWED_EXCEPTIONS;
468 r->u.allowed.type_list = stream_read_tree (ib, data_in);
469 r->u.allowed.label = stream_read_tree (ib, data_in);
470 r->u.allowed.filter = streamer_read_uhwi (ib);
471
472 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
473 add_type_for_runtime (TREE_VALUE (l));
474 }
475 break;
476
477 case LTO_ert_must_not_throw:
478 {
479 r->type = ERT_MUST_NOT_THROW;
480 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
481 bitpack_d bp = streamer_read_bitpack (ib);
482 r->u.must_not_throw.failure_loc
483 = stream_input_location_now (&bp, data_in);
484 }
485 break;
486
487 default:
488 gcc_unreachable ();
489 }
490
491 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
492
493 return r;
494 }
495
496
497 /* Read and return EH landing pad IX from input block IB, using descriptors
498 in DATA_IN. */
499
500 static eh_landing_pad
501 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
502 {
503 enum LTO_tags tag;
504 eh_landing_pad lp;
505
506 /* Read the landing pad header. */
507 tag = streamer_read_record_start (ib);
508 if (tag == LTO_null)
509 return NULL;
510
511 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
512
513 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
514 lp->index = streamer_read_hwi (ib);
515 gcc_assert (lp->index == ix);
516 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
517 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
518 lp->post_landing_pad = stream_read_tree (ib, data_in);
519
520 return lp;
521 }
522
523
524 /* After reading the EH regions, pointers to peer and children regions
525 are region numbers. This converts all these region numbers into
526 real pointers into the rematerialized regions for FN. ROOT_REGION
527 is the region number for the root EH region in FN. */
528
529 static void
530 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
531 {
532 unsigned i;
533 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
534 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
535 eh_region r;
536 eh_landing_pad lp;
537
538 gcc_assert (eh_array && lp_array);
539
540 gcc_assert (root_region >= 0);
541 fn->eh->region_tree = (*eh_array)[root_region];
542
543 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
544 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
545
546 /* Convert all the index numbers stored in pointer fields into
547 pointers to the corresponding slots in the EH region array. */
548 FOR_EACH_VEC_ELT (*eh_array, i, r)
549 {
550 /* The array may contain NULL regions. */
551 if (r == NULL)
552 continue;
553
554 gcc_assert (i == (unsigned) r->index);
555 FIXUP_EH_REGION (r->outer);
556 FIXUP_EH_REGION (r->inner);
557 FIXUP_EH_REGION (r->next_peer);
558 FIXUP_EH_LP (r->landing_pads);
559 }
560
561 /* Convert all the index numbers stored in pointer fields into
562 pointers to the corresponding slots in the EH landing pad array. */
563 FOR_EACH_VEC_ELT (*lp_array, i, lp)
564 {
565 /* The array may contain NULL landing pads. */
566 if (lp == NULL)
567 continue;
568
569 gcc_assert (i == (unsigned) lp->index);
570 FIXUP_EH_LP (lp->next_lp);
571 FIXUP_EH_REGION (lp->region);
572 }
573
574 #undef FIXUP_EH_REGION
575 #undef FIXUP_EH_LP
576 }
577
578
579 /* Initialize EH support. */
580
581 void
582 lto_init_eh (void)
583 {
584 static bool eh_initialized_p = false;
585
586 if (eh_initialized_p)
587 return;
588
589 /* Contrary to most other FEs, we only initialize EH support when at
590 least one of the files in the set contains exception regions in
591 it. Since this happens much later than the call to init_eh in
592 lang_dependent_init, we have to set flag_exceptions and call
593 init_eh again to initialize the EH tables. */
594 flag_exceptions = 1;
595 init_eh ();
596
597 eh_initialized_p = true;
598 }
599
600
601 /* Read the exception table for FN from IB using the data descriptors
602 in DATA_IN. */
603
604 static void
605 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
606 struct function *fn)
607 {
608 HOST_WIDE_INT i, root_region, len;
609 enum LTO_tags tag;
610
611 tag = streamer_read_record_start (ib);
612 if (tag == LTO_null)
613 return;
614
615 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
616
617 /* If the file contains EH regions, then it was compiled with
618 -fexceptions. In that case, initialize the backend EH
619 machinery. */
620 lto_init_eh ();
621
622 gcc_assert (fn->eh);
623
624 root_region = streamer_read_hwi (ib);
625 gcc_assert (root_region == (int) root_region);
626
627 /* Read the EH region array. */
628 len = streamer_read_hwi (ib);
629 gcc_assert (len == (int) len);
630 if (len > 0)
631 {
632 vec_safe_grow_cleared (fn->eh->region_array, len);
633 for (i = 0; i < len; i++)
634 {
635 eh_region r = input_eh_region (ib, data_in, i);
636 (*fn->eh->region_array)[i] = r;
637 }
638 }
639
640 /* Read the landing pads. */
641 len = streamer_read_hwi (ib);
642 gcc_assert (len == (int) len);
643 if (len > 0)
644 {
645 vec_safe_grow_cleared (fn->eh->lp_array, len);
646 for (i = 0; i < len; i++)
647 {
648 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
649 (*fn->eh->lp_array)[i] = lp;
650 }
651 }
652
653 /* Read the runtime type data. */
654 len = streamer_read_hwi (ib);
655 gcc_assert (len == (int) len);
656 if (len > 0)
657 {
658 vec_safe_grow_cleared (fn->eh->ttype_data, len);
659 for (i = 0; i < len; i++)
660 {
661 tree ttype = stream_read_tree (ib, data_in);
662 (*fn->eh->ttype_data)[i] = ttype;
663 }
664 }
665
666 /* Read the table of action chains. */
667 len = streamer_read_hwi (ib);
668 gcc_assert (len == (int) len);
669 if (len > 0)
670 {
671 if (targetm.arm_eabi_unwinder)
672 {
673 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
674 for (i = 0; i < len; i++)
675 {
676 tree t = stream_read_tree (ib, data_in);
677 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
678 }
679 }
680 else
681 {
682 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
683 for (i = 0; i < len; i++)
684 {
685 uchar c = streamer_read_uchar (ib);
686 (*fn->eh->ehspec_data.other)[i] = c;
687 }
688 }
689 }
690
691 /* Reconstruct the EH region tree by fixing up the peer/children
692 pointers. */
693 fixup_eh_region_pointers (fn, root_region);
694
695 tag = streamer_read_record_start (ib);
696 lto_tag_check_range (tag, LTO_null, LTO_null);
697 }
698
699
700 /* Make a new basic block with index INDEX in function FN. */
701
702 static basic_block
703 make_new_block (struct function *fn, unsigned int index)
704 {
705 basic_block bb = alloc_block ();
706 bb->index = index;
707 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
708 n_basic_blocks_for_fn (fn)++;
709 return bb;
710 }
711
712
713 /* Read the CFG for function FN from input block IB. */
714
715 static void
716 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
717 struct function *fn,
718 int count_materialization_scale)
719 {
720 unsigned int bb_count;
721 basic_block p_bb;
722 unsigned int i;
723 int index;
724
725 init_empty_tree_cfg_for_function (fn);
726 init_ssa_operands (fn);
727
728 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
729 PROFILE_LAST);
730
731 bb_count = streamer_read_uhwi (ib);
732
733 last_basic_block_for_fn (fn) = bb_count;
734 if (bb_count > basic_block_info_for_fn (fn)->length ())
735 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
736
737 if (bb_count > label_to_block_map_for_fn (fn)->length ())
738 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
739
740 index = streamer_read_hwi (ib);
741 while (index != -1)
742 {
743 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
744 unsigned int edge_count;
745
746 if (bb == NULL)
747 bb = make_new_block (fn, index);
748
749 edge_count = streamer_read_uhwi (ib);
750
751 /* Connect up the CFG. */
752 for (i = 0; i < edge_count; i++)
753 {
754 unsigned int dest_index;
755 unsigned int edge_flags;
756 basic_block dest;
757 int probability;
758 gcov_type count;
759 edge e;
760
761 dest_index = streamer_read_uhwi (ib);
762 probability = (int) streamer_read_hwi (ib);
763 count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
764 count_materialization_scale);
765 edge_flags = streamer_read_uhwi (ib);
766
767 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
768
769 if (dest == NULL)
770 dest = make_new_block (fn, dest_index);
771
772 e = make_edge (bb, dest, edge_flags);
773 e->probability = probability;
774 e->count = count;
775 }
776
777 index = streamer_read_hwi (ib);
778 }
779
780 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
781 index = streamer_read_hwi (ib);
782 while (index != -1)
783 {
784 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
785 bb->prev_bb = p_bb;
786 p_bb->next_bb = bb;
787 p_bb = bb;
788 index = streamer_read_hwi (ib);
789 }
790
791 /* ??? The cfgloop interface is tied to cfun. */
792 gcc_assert (cfun == fn);
793
794 /* Input the loop tree. */
795 unsigned n_loops = streamer_read_uhwi (ib);
796 if (n_loops == 0)
797 return;
798
799 struct loops *loops = ggc_cleared_alloc<struct loops> ();
800 init_loops_structure (fn, loops, n_loops);
801 set_loops_for_fn (fn, loops);
802
803 /* Input each loop and associate it with its loop header so
804 flow_loops_find can rebuild the loop tree. */
805 for (unsigned i = 1; i < n_loops; ++i)
806 {
807 int header_index = streamer_read_hwi (ib);
808 if (header_index == -1)
809 {
810 loops->larray->quick_push (NULL);
811 continue;
812 }
813
814 struct loop *loop = alloc_loop ();
815 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
816 loop->header->loop_father = loop;
817
818 /* Read everything copy_loop_info copies. */
819 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
820 loop->any_upper_bound = streamer_read_hwi (ib);
821 if (loop->any_upper_bound)
822 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
823 loop->any_likely_upper_bound = streamer_read_hwi (ib);
824 if (loop->any_likely_upper_bound)
825 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
826 loop->any_estimate = streamer_read_hwi (ib);
827 if (loop->any_estimate)
828 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
829
830 /* Read OMP SIMD related info. */
831 loop->safelen = streamer_read_hwi (ib);
832 loop->dont_vectorize = streamer_read_hwi (ib);
833 loop->force_vectorize = streamer_read_hwi (ib);
834 loop->simduid = stream_read_tree (ib, data_in);
835
836 place_new_loop (fn, loop);
837
838 /* flow_loops_find doesn't like loops not in the tree, hook them
839 all as siblings of the tree root temporarily. */
840 flow_loop_tree_node_add (loops->tree_root, loop);
841 }
842
843 /* Rebuild the loop tree. */
844 flow_loops_find (loops);
845 }
846
847
848 /* Read the SSA names array for function FN from DATA_IN using input
849 block IB. */
850
851 static void
852 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
853 struct function *fn)
854 {
855 unsigned int i, size;
856
857 size = streamer_read_uhwi (ib);
858 init_ssanames (fn, size);
859
860 i = streamer_read_uhwi (ib);
861 while (i)
862 {
863 tree ssa_name, name;
864 bool is_default_def;
865
866 /* Skip over the elements that had been freed. */
867 while (SSANAMES (fn)->length () < i)
868 SSANAMES (fn)->quick_push (NULL_TREE);
869
870 is_default_def = (streamer_read_uchar (ib) != 0);
871 name = stream_read_tree (ib, data_in);
872 ssa_name = make_ssa_name_fn (fn, name, NULL);
873
874 if (is_default_def)
875 {
876 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
877 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
878 }
879
880 i = streamer_read_uhwi (ib);
881 }
882 }
883
884
885 /* Go through all NODE edges and fixup call_stmt pointers
886 so they point to STMTS. */
887
888 static void
889 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
890 struct function *fn)
891 {
892 struct cgraph_edge *cedge;
893 struct ipa_ref *ref = NULL;
894 unsigned int i;
895
896 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
897 {
898 if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
899 fatal_error (input_location,
900 "Cgraph edge statement index out of range");
901 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
902 if (!cedge->call_stmt)
903 fatal_error (input_location,
904 "Cgraph edge statement index not found");
905 }
906 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
907 {
908 if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
909 fatal_error (input_location,
910 "Cgraph edge statement index out of range");
911 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
912 if (!cedge->call_stmt)
913 fatal_error (input_location, "Cgraph edge statement index not found");
914 }
915 for (i = 0; node->iterate_reference (i, ref); i++)
916 if (ref->lto_stmt_uid)
917 {
918 if (gimple_stmt_max_uid (fn) < ref->lto_stmt_uid)
919 fatal_error (input_location,
920 "Reference statement index out of range");
921 ref->stmt = stmts[ref->lto_stmt_uid - 1];
922 if (!ref->stmt)
923 fatal_error (input_location, "Reference statement index not found");
924 }
925 }
926
927
928 /* Fixup call_stmt pointers in NODE and all clones. */
929
930 static void
931 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
932 {
933 struct cgraph_node *node;
934 struct function *fn;
935
936 while (orig->clone_of)
937 orig = orig->clone_of;
938 fn = DECL_STRUCT_FUNCTION (orig->decl);
939
940 fixup_call_stmt_edges_1 (orig, stmts, fn);
941 if (orig->clones)
942 for (node = orig->clones; node != orig;)
943 {
944 if (!node->thunk.thunk_p)
945 fixup_call_stmt_edges_1 (node, stmts, fn);
946 if (node->clones)
947 node = node->clones;
948 else if (node->next_sibling_clone)
949 node = node->next_sibling_clone;
950 else
951 {
952 while (node != orig && !node->next_sibling_clone)
953 node = node->clone_of;
954 if (node != orig)
955 node = node->next_sibling_clone;
956 }
957 }
958 }
959
960
961 /* Input the base body of struct function FN from DATA_IN
962 using input block IB. */
963
964 static void
965 input_struct_function_base (struct function *fn, struct data_in *data_in,
966 struct lto_input_block *ib)
967 {
968 struct bitpack_d bp;
969 int len;
970
971 /* Read the static chain and non-local goto save area. */
972 fn->static_chain_decl = stream_read_tree (ib, data_in);
973 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
974
975 /* Read all the local symbols. */
976 len = streamer_read_hwi (ib);
977 if (len > 0)
978 {
979 int i;
980 vec_safe_grow_cleared (fn->local_decls, len);
981 for (i = 0; i < len; i++)
982 {
983 tree t = stream_read_tree (ib, data_in);
984 (*fn->local_decls)[i] = t;
985 }
986 }
987
988 /* Input the current IL state of the function. */
989 fn->curr_properties = streamer_read_uhwi (ib);
990
991 /* Read all the attributes for FN. */
992 bp = streamer_read_bitpack (ib);
993 fn->is_thunk = bp_unpack_value (&bp, 1);
994 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
995 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
996 fn->returns_struct = bp_unpack_value (&bp, 1);
997 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
998 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
999 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1000 fn->after_inlining = bp_unpack_value (&bp, 1);
1001 fn->stdarg = bp_unpack_value (&bp, 1);
1002 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1003 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1004 fn->calls_alloca = bp_unpack_value (&bp, 1);
1005 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1006 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1007 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1008 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1009 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1010 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1011
1012 /* Input the function start and end loci. */
1013 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1014 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1015 }
1016
1017
1018 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1019
1020 static void
1021 input_function (tree fn_decl, struct data_in *data_in,
1022 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1023 {
1024 struct function *fn;
1025 enum LTO_tags tag;
1026 gimple **stmts;
1027 basic_block bb;
1028 struct cgraph_node *node;
1029
1030 tag = streamer_read_record_start (ib);
1031 lto_tag_check (tag, LTO_function);
1032
1033 /* Read decls for parameters and args. */
1034 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1035 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1036
1037 /* Read the tree of lexical scopes for the function. */
1038 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1039
1040 if (!streamer_read_uhwi (ib))
1041 return;
1042
1043 push_struct_function (fn_decl);
1044 fn = DECL_STRUCT_FUNCTION (fn_decl);
1045 init_tree_ssa (fn);
1046 /* We input IL in SSA form. */
1047 cfun->gimple_df->in_ssa_p = true;
1048
1049 gimple_register_cfg_hooks ();
1050
1051 node = cgraph_node::get (fn_decl);
1052 if (!node)
1053 node = cgraph_node::create (fn_decl);
1054 input_struct_function_base (fn, data_in, ib);
1055 input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
1056
1057 /* Read all the SSA names. */
1058 input_ssa_names (ib, data_in, fn);
1059
1060 /* Read the exception handling regions in the function. */
1061 input_eh_regions (ib, data_in, fn);
1062
1063 gcc_assert (DECL_INITIAL (fn_decl));
1064 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1065
1066 /* Read all the basic blocks. */
1067 tag = streamer_read_record_start (ib);
1068 while (tag)
1069 {
1070 input_bb (ib, tag, data_in, fn,
1071 node->count_materialization_scale);
1072 tag = streamer_read_record_start (ib);
1073 }
1074
1075 /* Fix up the call statements that are mentioned in the callgraph
1076 edges. */
1077 set_gimple_stmt_max_uid (cfun, 0);
1078 FOR_ALL_BB_FN (bb, cfun)
1079 {
1080 gimple_stmt_iterator gsi;
1081 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1082 {
1083 gimple *stmt = gsi_stmt (gsi);
1084 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1085 }
1086 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1087 {
1088 gimple *stmt = gsi_stmt (gsi);
1089 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1090 }
1091 }
1092 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1093 FOR_ALL_BB_FN (bb, cfun)
1094 {
1095 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1096 while (!gsi_end_p (bsi))
1097 {
1098 gimple *stmt = gsi_stmt (bsi);
1099 gsi_next (&bsi);
1100 stmts[gimple_uid (stmt)] = stmt;
1101 }
1102 bsi = gsi_start_bb (bb);
1103 while (!gsi_end_p (bsi))
1104 {
1105 gimple *stmt = gsi_stmt (bsi);
1106 /* If we're recompiling LTO objects with debug stmts but
1107 we're not supposed to have debug stmts, remove them now.
1108 We can't remove them earlier because this would cause uid
1109 mismatches in fixups, but we can do it at this point, as
1110 long as debug stmts don't require fixups. */
1111 if (!MAY_HAVE_DEBUG_STMTS && !flag_wpa && is_gimple_debug (stmt))
1112 {
1113 gimple_stmt_iterator gsi = bsi;
1114 gsi_next (&bsi);
1115 gsi_remove (&gsi, true);
1116 }
1117 else
1118 {
1119 gsi_next (&bsi);
1120 stmts[gimple_uid (stmt)] = stmt;
1121 }
1122 }
1123 }
1124
1125 /* Set the gimple body to the statement sequence in the entry
1126 basic block. FIXME lto, this is fairly hacky. The existence
1127 of a gimple body is used by the cgraph routines, but we should
1128 really use the presence of the CFG. */
1129 {
1130 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1131 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1132 }
1133
1134 fixup_call_stmt_edges (node, stmts);
1135 execute_all_ipa_stmt_fixups (node, stmts);
1136
1137 update_ssa (TODO_update_ssa_only_virtuals);
1138 free_dominance_info (CDI_DOMINATORS);
1139 free_dominance_info (CDI_POST_DOMINATORS);
1140 free (stmts);
1141 pop_cfun ();
1142 }
1143
1144 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1145
1146 static void
1147 input_constructor (tree var, struct data_in *data_in,
1148 struct lto_input_block *ib)
1149 {
1150 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1151 }
1152
1153
1154 /* Read the body from DATA for function NODE and fill it in.
1155 FILE_DATA are the global decls and types. SECTION_TYPE is either
1156 LTO_section_function_body or LTO_section_static_initializer. If
1157 section type is LTO_section_function_body, FN must be the decl for
1158 that function. */
1159
1160 static void
1161 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1162 const char *data, enum lto_section_type section_type)
1163 {
1164 const struct lto_function_header *header;
1165 struct data_in *data_in;
1166 int cfg_offset;
1167 int main_offset;
1168 int string_offset;
1169 tree fn_decl = node->decl;
1170
1171 header = (const struct lto_function_header *) data;
1172 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1173 {
1174 cfg_offset = sizeof (struct lto_function_header);
1175 main_offset = cfg_offset + header->cfg_size;
1176 string_offset = main_offset + header->main_size;
1177 }
1178 else
1179 {
1180 main_offset = sizeof (struct lto_function_header);
1181 string_offset = main_offset + header->main_size;
1182 }
1183
1184 data_in = lto_data_in_create (file_data, data + string_offset,
1185 header->string_size, vNULL);
1186
1187 if (section_type == LTO_section_function_body)
1188 {
1189 struct lto_in_decl_state *decl_state;
1190 unsigned from;
1191
1192 gcc_checking_assert (node);
1193
1194 /* Use the function's decl state. */
1195 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1196 gcc_assert (decl_state);
1197 file_data->current_decl_state = decl_state;
1198
1199
1200 /* Set up the struct function. */
1201 from = data_in->reader_cache->nodes.length ();
1202 lto_input_block ib_main (data + main_offset, header->main_size,
1203 file_data->mode_table);
1204 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1205 {
1206 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1207 file_data->mode_table);
1208 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1209 }
1210 else
1211 input_constructor (fn_decl, data_in, &ib_main);
1212 data_in->location_cache.apply_location_cache ();
1213 /* And fixup types we streamed locally. */
1214 {
1215 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1216 unsigned len = cache->nodes.length ();
1217 unsigned i;
1218 for (i = len; i-- > from;)
1219 {
1220 tree t = streamer_tree_cache_get_tree (cache, i);
1221 if (t == NULL_TREE)
1222 continue;
1223
1224 if (TYPE_P (t))
1225 {
1226 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1227 if (type_with_alias_set_p (t)
1228 && canonical_type_used_p (t))
1229 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1230 if (TYPE_MAIN_VARIANT (t) != t)
1231 {
1232 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1233 TYPE_NEXT_VARIANT (t)
1234 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1235 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1236 }
1237 }
1238 }
1239 }
1240
1241 /* Restore decl state */
1242 file_data->current_decl_state = file_data->global_decl_state;
1243 }
1244
1245 lto_data_in_delete (data_in);
1246 }
1247
1248
1249 /* Read the body of NODE using DATA. FILE_DATA holds the global
1250 decls and types. */
1251
1252 void
1253 lto_input_function_body (struct lto_file_decl_data *file_data,
1254 struct cgraph_node *node, const char *data)
1255 {
1256 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1257 }
1258
1259 /* Read the body of NODE using DATA. FILE_DATA holds the global
1260 decls and types. */
1261
1262 void
1263 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1264 struct varpool_node *node, const char *data)
1265 {
1266 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1267 }
1268
1269
1270 /* Read the physical representation of a tree node EXPR from
1271 input block IB using the per-file context in DATA_IN. */
1272
1273 static void
1274 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1275 {
1276 /* Read all the bitfield values in EXPR. Note that for LTO, we
1277 only write language-independent bitfields, so no more unpacking is
1278 needed. */
1279 streamer_read_tree_bitfields (ib, data_in, expr);
1280
1281 /* Read all the pointer fields in EXPR. */
1282 streamer_read_tree_body (ib, data_in, expr);
1283
1284 /* Read any LTO-specific data not read by the tree streamer. */
1285 if (DECL_P (expr)
1286 && TREE_CODE (expr) != FUNCTION_DECL
1287 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1288 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1289
1290 #ifdef LTO_STREAMER_DEBUG
1291 /* Remove the mapping to RESULT's original address set by
1292 streamer_alloc_tree. */
1293 lto_orig_address_remove (expr);
1294 #endif
1295 }
1296
1297 /* Read the physical representation of a tree node with tag TAG from
1298 input block IB using the per-file context in DATA_IN. */
1299
1300 static tree
1301 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1302 enum LTO_tags tag, hashval_t hash)
1303 {
1304 /* Instantiate a new tree node. */
1305 tree result = streamer_alloc_tree (ib, data_in, tag);
1306
1307 /* Enter RESULT in the reader cache. This will make RESULT
1308 available so that circular references in the rest of the tree
1309 structure can be resolved in subsequent calls to stream_read_tree. */
1310 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1311
1312 lto_read_tree_1 (ib, data_in, result);
1313
1314 /* end_marker = */ streamer_read_uchar (ib);
1315
1316 return result;
1317 }
1318
1319
1320 /* Populate the reader cache with trees materialized from the SCC
1321 following in the IB, DATA_IN stream. */
1322
1323 hashval_t
1324 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1325 unsigned *len, unsigned *entry_len)
1326 {
1327 /* A blob of unnamed tree nodes, fill the cache from it and
1328 recurse. */
1329 unsigned size = streamer_read_uhwi (ib);
1330 hashval_t scc_hash = streamer_read_uhwi (ib);
1331 unsigned scc_entry_len = 1;
1332
1333 if (size == 1)
1334 {
1335 enum LTO_tags tag = streamer_read_record_start (ib);
1336 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1337 }
1338 else
1339 {
1340 unsigned int first = data_in->reader_cache->nodes.length ();
1341 tree result;
1342
1343 scc_entry_len = streamer_read_uhwi (ib);
1344
1345 /* Materialize size trees by reading their headers. */
1346 for (unsigned i = 0; i < size; ++i)
1347 {
1348 enum LTO_tags tag = streamer_read_record_start (ib);
1349 if (tag == LTO_null
1350 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1351 || tag == LTO_tree_pickle_reference
1352 || tag == LTO_integer_cst
1353 || tag == LTO_tree_scc)
1354 gcc_unreachable ();
1355
1356 result = streamer_alloc_tree (ib, data_in, tag);
1357 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1358 }
1359
1360 /* Read the tree bitpacks and references. */
1361 for (unsigned i = 0; i < size; ++i)
1362 {
1363 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1364 first + i);
1365 lto_read_tree_1 (ib, data_in, result);
1366 /* end_marker = */ streamer_read_uchar (ib);
1367 }
1368 }
1369
1370 *len = size;
1371 *entry_len = scc_entry_len;
1372 return scc_hash;
1373 }
1374
1375
1376 /* Read a tree from input block IB using the per-file context in
1377 DATA_IN. This context is used, for example, to resolve references
1378 to previously read nodes. */
1379
1380 tree
1381 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1382 enum LTO_tags tag, hashval_t hash)
1383 {
1384 tree result;
1385
1386 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1387
1388 if (tag == LTO_null)
1389 result = NULL_TREE;
1390 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1391 {
1392 /* If TAG is a reference to an indexable tree, the next value
1393 in IB is the index into the table where we expect to find
1394 that tree. */
1395 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1396 }
1397 else if (tag == LTO_tree_pickle_reference)
1398 {
1399 /* If TAG is a reference to a previously read tree, look it up in
1400 the reader cache. */
1401 result = streamer_get_pickled_tree (ib, data_in);
1402 }
1403 else if (tag == LTO_integer_cst)
1404 {
1405 /* For shared integer constants in singletons we can use the
1406 existing tree integer constant merging code. */
1407 tree type = stream_read_tree (ib, data_in);
1408 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1409 unsigned HOST_WIDE_INT i;
1410 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1411
1412 for (i = 0; i < len; i++)
1413 a[i] = streamer_read_hwi (ib);
1414 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1415 result = wide_int_to_tree (type, wide_int::from_array
1416 (a, len, TYPE_PRECISION (type)));
1417 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1418 }
1419 else if (tag == LTO_tree_scc)
1420 gcc_unreachable ();
1421 else
1422 {
1423 /* Otherwise, materialize a new node from IB. */
1424 result = lto_read_tree (ib, data_in, tag, hash);
1425 }
1426
1427 return result;
1428 }
1429
1430 tree
1431 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1432 {
1433 enum LTO_tags tag;
1434
1435 /* Input and skip SCCs. */
1436 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1437 {
1438 unsigned len, entry_len;
1439 lto_input_scc (ib, data_in, &len, &entry_len);
1440 }
1441 return lto_input_tree_1 (ib, data_in, tag, 0);
1442 }
1443
1444
1445 /* Input toplevel asms. */
1446
1447 void
1448 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1449 {
1450 size_t len;
1451 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1452 NULL, &len);
1453 const struct lto_simple_header_with_strings *header
1454 = (const struct lto_simple_header_with_strings *) data;
1455 int string_offset;
1456 struct data_in *data_in;
1457 tree str;
1458
1459 if (! data)
1460 return;
1461
1462 string_offset = sizeof (*header) + header->main_size;
1463
1464 lto_input_block ib (data + sizeof (*header), header->main_size,
1465 file_data->mode_table);
1466
1467 data_in = lto_data_in_create (file_data, data + string_offset,
1468 header->string_size, vNULL);
1469
1470 while ((str = streamer_read_string_cst (data_in, &ib)))
1471 {
1472 asm_node *node = symtab->finalize_toplevel_asm (str);
1473 node->order = streamer_read_hwi (&ib) + order_base;
1474 if (node->order >= symtab->order)
1475 symtab->order = node->order + 1;
1476 }
1477
1478 lto_data_in_delete (data_in);
1479
1480 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1481 }
1482
1483
1484 /* Input mode table. */
1485
1486 void
1487 lto_input_mode_table (struct lto_file_decl_data *file_data)
1488 {
1489 size_t len;
1490 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1491 NULL, &len);
1492 if (! data)
1493 {
1494 internal_error ("cannot read LTO mode table from %s",
1495 file_data->file_name);
1496 return;
1497 }
1498
1499 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1500 file_data->mode_table = table;
1501 const struct lto_simple_header_with_strings *header
1502 = (const struct lto_simple_header_with_strings *) data;
1503 int string_offset;
1504 struct data_in *data_in;
1505 string_offset = sizeof (*header) + header->main_size;
1506
1507 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1508 data_in = lto_data_in_create (file_data, data + string_offset,
1509 header->string_size, vNULL);
1510 bitpack_d bp = streamer_read_bitpack (&ib);
1511
1512 table[VOIDmode] = VOIDmode;
1513 table[BLKmode] = BLKmode;
1514 unsigned int m;
1515 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1516 {
1517 enum mode_class mclass
1518 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1519 unsigned int size = bp_unpack_value (&bp, 8);
1520 unsigned int prec = bp_unpack_value (&bp, 16);
1521 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1522 unsigned int nunits = bp_unpack_value (&bp, 8);
1523 unsigned int ibit = 0, fbit = 0;
1524 unsigned int real_fmt_len = 0;
1525 const char *real_fmt_name = NULL;
1526 switch (mclass)
1527 {
1528 case MODE_FRACT:
1529 case MODE_UFRACT:
1530 case MODE_ACCUM:
1531 case MODE_UACCUM:
1532 ibit = bp_unpack_value (&bp, 8);
1533 fbit = bp_unpack_value (&bp, 8);
1534 break;
1535 case MODE_FLOAT:
1536 case MODE_DECIMAL_FLOAT:
1537 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1538 &real_fmt_len);
1539 break;
1540 default:
1541 break;
1542 }
1543 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1544 if not found, fallback to all modes. */
1545 int pass;
1546 for (pass = 0; pass < 2; pass++)
1547 for (machine_mode mr = pass ? VOIDmode
1548 : GET_CLASS_NARROWEST_MODE (mclass);
1549 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1550 pass ? mr = (machine_mode) (mr + 1)
1551 : mr = GET_MODE_WIDER_MODE (mr))
1552 if (GET_MODE_CLASS (mr) != mclass
1553 || GET_MODE_SIZE (mr) != size
1554 || GET_MODE_PRECISION (mr) != prec
1555 || (inner == m
1556 ? GET_MODE_INNER (mr) != mr
1557 : GET_MODE_INNER (mr) != table[(int) inner])
1558 || GET_MODE_IBIT (mr) != ibit
1559 || GET_MODE_FBIT (mr) != fbit
1560 || GET_MODE_NUNITS (mr) != nunits)
1561 continue;
1562 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1563 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1564 continue;
1565 else
1566 {
1567 table[m] = mr;
1568 pass = 2;
1569 break;
1570 }
1571 unsigned int mname_len;
1572 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1573 if (pass == 2)
1574 {
1575 switch (mclass)
1576 {
1577 case MODE_VECTOR_INT:
1578 case MODE_VECTOR_FLOAT:
1579 case MODE_VECTOR_FRACT:
1580 case MODE_VECTOR_UFRACT:
1581 case MODE_VECTOR_ACCUM:
1582 case MODE_VECTOR_UACCUM:
1583 /* For unsupported vector modes just use BLKmode,
1584 if the scalar mode is supported. */
1585 if (table[(int) inner] != VOIDmode)
1586 {
1587 table[m] = BLKmode;
1588 break;
1589 }
1590 /* FALLTHRU */
1591 default:
1592 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1593 break;
1594 }
1595 }
1596 }
1597 lto_data_in_delete (data_in);
1598
1599 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1600 }
1601
1602
1603 /* Initialization for the LTO reader. */
1604
1605 void
1606 lto_reader_init (void)
1607 {
1608 lto_streamer_init ();
1609 file_name_hash_table
1610 = new hash_table<freeing_string_slot_hasher> (37);
1611 }
1612
1613
1614 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1615 table to use with LEN strings. RESOLUTIONS is the vector of linker
1616 resolutions (NULL if not using a linker plugin). */
1617
1618 struct data_in *
1619 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1620 unsigned len,
1621 vec<ld_plugin_symbol_resolution_t> resolutions)
1622 {
1623 struct data_in *data_in = new (struct data_in);
1624 data_in->file_data = file_data;
1625 data_in->strings = strings;
1626 data_in->strings_len = len;
1627 data_in->globals_resolution = resolutions;
1628 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1629 return data_in;
1630 }
1631
1632
1633 /* Remove DATA_IN. */
1634
1635 void
1636 lto_data_in_delete (struct data_in *data_in)
1637 {
1638 data_in->globals_resolution.release ();
1639 streamer_tree_cache_delete (data_in->reader_cache);
1640 delete data_in;
1641 }