C-family, Objective-C [1/3] : Implement Wobjc-root-class [PR77404].
[gcc.git] / gcc / ipa-modref.c
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
25
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analysis however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
31
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
36 is duplicated. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "backend.h"
42 #include "tree.h"
43 #include "gimple.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
47 #include "tree-dfa.h"
48 #include "cgraph.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
55 #include "alias.h"
56 #include "calls.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
60 #include "ipa-prop.h"
61 #include "ipa-fnsummary.h"
62 #include "attr-fnspec.h"
63 #include "symtab-clones.h"
64
65 /* We record fnspec specifiers for call edges since they depends on actual
66 gimple statements. */
67
68 class fnspec_summary
69 {
70 public:
71 char *fnspec;
72
73 fnspec_summary ()
74 : fnspec (NULL)
75 {
76 }
77
78 ~fnspec_summary ()
79 {
80 free (fnspec);
81 }
82 };
83
84 /* Summary holding fnspec string for a given call. */
85
86 class fnspec_summaries_t : public call_summary <fnspec_summary *>
87 {
88 public:
89 fnspec_summaries_t (symbol_table *symtab)
90 : call_summary <fnspec_summary *> (symtab) {}
91 /* Hook that is called by summary when an edge is duplicated. */
92 virtual void duplicate (cgraph_edge *,
93 cgraph_edge *,
94 fnspec_summary *src,
95 fnspec_summary *dst)
96 {
97 dst->fnspec = xstrdup (src->fnspec);
98 }
99 };
100
101 static fnspec_summaries_t *fnspec_summaries = NULL;
102
103 /* Class (from which there is one global instance) that holds modref summaries
104 for all analyzed functions. */
105
106 class GTY((user)) modref_summaries
107 : public fast_function_summary <modref_summary *, va_gc>
108 {
109 public:
110 modref_summaries (symbol_table *symtab)
111 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
112 virtual void insert (cgraph_node *, modref_summary *state);
113 virtual void duplicate (cgraph_node *src_node,
114 cgraph_node *dst_node,
115 modref_summary *src_data,
116 modref_summary *dst_data);
117 static modref_summaries *create_ggc (symbol_table *symtab)
118 {
119 return new (ggc_alloc_no_dtor<modref_summaries> ())
120 modref_summaries (symtab);
121 }
122 };
123
124 class modref_summary_lto;
125
126 /* Class (from which there is one global instance) that holds modref summaries
127 for all analyzed functions. */
128
129 class GTY((user)) modref_summaries_lto
130 : public fast_function_summary <modref_summary_lto *, va_gc>
131 {
132 public:
133 modref_summaries_lto (symbol_table *symtab)
134 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
135 propagated (false) {}
136 virtual void insert (cgraph_node *, modref_summary_lto *state);
137 virtual void duplicate (cgraph_node *src_node,
138 cgraph_node *dst_node,
139 modref_summary_lto *src_data,
140 modref_summary_lto *dst_data);
141 static modref_summaries_lto *create_ggc (symbol_table *symtab)
142 {
143 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
144 modref_summaries_lto (symtab);
145 }
146 bool propagated;
147 };
148
149 /* Global variable holding all modref summaries
150 (from analysis to IPA propagation time). */
151
152 static GTY(()) fast_function_summary <modref_summary *, va_gc>
153 *summaries;
154
155 /* Global variable holding all modref optimization summaries
156 (from IPA propagation time or used by local optimization pass). */
157
158 static GTY(()) fast_function_summary <modref_summary *, va_gc>
159 *optimization_summaries;
160
161 /* LTO summaries hold info from analysis to LTO streaming or from LTO
162 stream-in through propagation to LTO stream-out. */
163
164 static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
165 *summaries_lto;
166
167 /* Summary for a single function which this pass produces. */
168
169 modref_summary::modref_summary ()
170 : loads (NULL), stores (NULL), writes_errno (NULL)
171 {
172 }
173
174 modref_summary::~modref_summary ()
175 {
176 if (loads)
177 ggc_delete (loads);
178 if (stores)
179 ggc_delete (stores);
180 }
181
182 /* Return true if summary is potentially useful for optimization. */
183
184 bool
185 modref_summary::useful_p (int ecf_flags)
186 {
187 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
188 return false;
189 if (loads && !loads->every_base)
190 return true;
191 if (ecf_flags & ECF_PURE)
192 return false;
193 return stores && !stores->every_base;
194 }
195
196 /* Single function summary used for LTO. */
197
198 typedef modref_tree <tree> modref_records_lto;
199 struct GTY(()) modref_summary_lto
200 {
201 /* Load and stores in functions using types rather then alias sets.
202
203 This is necessary to make the information streamable for LTO but is also
204 more verbose and thus more likely to hit the limits. */
205 modref_records_lto *loads;
206 modref_records_lto *stores;
207 bool writes_errno;
208
209 modref_summary_lto ();
210 ~modref_summary_lto ();
211 void dump (FILE *);
212 bool useful_p (int ecf_flags);
213 };
214
215 /* Summary for a single function which this pass produces. */
216
217 modref_summary_lto::modref_summary_lto ()
218 : loads (NULL), stores (NULL), writes_errno (NULL)
219 {
220 }
221
222 modref_summary_lto::~modref_summary_lto ()
223 {
224 if (loads)
225 ggc_delete (loads);
226 if (stores)
227 ggc_delete (stores);
228 }
229
230
231 /* Return true if lto summary is potentially useful for optimization. */
232
233 bool
234 modref_summary_lto::useful_p (int ecf_flags)
235 {
236 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
237 return false;
238 if (loads && !loads->every_base)
239 return true;
240 if (ecf_flags & ECF_PURE)
241 return false;
242 return stores && !stores->every_base;
243 }
244
245 /* Dump A to OUT. */
246
247 static void
248 dump_access (modref_access_node *a, FILE *out)
249 {
250 fprintf (out, " access:");
251 if (a->parm_index != -1)
252 {
253 fprintf (out, " Parm %i", a->parm_index);
254 if (a->parm_offset_known)
255 {
256 fprintf (out, " param offset:");
257 print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
258 }
259 }
260 if (a->range_info_useful_p ())
261 {
262 fprintf (out, " offset:");
263 print_dec ((poly_int64_pod)a->offset, out, SIGNED);
264 fprintf (out, " size:");
265 print_dec ((poly_int64_pod)a->size, out, SIGNED);
266 fprintf (out, " max_size:");
267 print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
268 }
269 fprintf (out, "\n");
270 }
271
272 /* Dump records TT to OUT. */
273
274 static void
275 dump_records (modref_records *tt, FILE *out)
276 {
277 fprintf (out, " Limits: %i bases, %i refs\n",
278 (int)tt->max_bases, (int)tt->max_refs);
279 if (tt->every_base)
280 {
281 fprintf (out, " Every base\n");
282 return;
283 }
284 size_t i;
285 modref_base_node <alias_set_type> *n;
286 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
287 {
288 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
289 if (n->every_ref)
290 {
291 fprintf (out, " Every ref\n");
292 continue;
293 }
294 size_t j;
295 modref_ref_node <alias_set_type> *r;
296 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
297 {
298 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
299 if (r->every_access)
300 {
301 fprintf (out, " Every access\n");
302 continue;
303 }
304 size_t k;
305 modref_access_node *a;
306 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
307 dump_access (a, out);
308 }
309 }
310 }
311
312 /* Dump records TT to OUT. */
313
314 static void
315 dump_lto_records (modref_records_lto *tt, FILE *out)
316 {
317 fprintf (out, " Limits: %i bases, %i refs\n",
318 (int)tt->max_bases, (int)tt->max_refs);
319 if (tt->every_base)
320 {
321 fprintf (out, " Every base\n");
322 return;
323 }
324 size_t i;
325 modref_base_node <tree> *n;
326 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
327 {
328 fprintf (out, " Base %i:", (int)i);
329 print_generic_expr (dump_file, n->base);
330 fprintf (out, " (alias set %i)\n",
331 n->base ? get_alias_set (n->base) : 0);
332 if (n->every_ref)
333 {
334 fprintf (out, " Every ref\n");
335 continue;
336 }
337 size_t j;
338 modref_ref_node <tree> *r;
339 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
340 {
341 fprintf (out, " Ref %i:", (int)j);
342 print_generic_expr (dump_file, r->ref);
343 fprintf (out, " (alias set %i)\n",
344 r->ref ? get_alias_set (r->ref) : 0);
345 if (r->every_access)
346 {
347 fprintf (out, " Every access\n");
348 continue;
349 }
350 size_t k;
351 modref_access_node *a;
352 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
353 dump_access (a, out);
354 }
355 }
356 }
357
358 /* Dump summary. */
359
360 void
361 modref_summary::dump (FILE *out)
362 {
363 if (loads)
364 {
365 fprintf (out, " loads:\n");
366 dump_records (loads, out);
367 }
368 if (stores)
369 {
370 fprintf (out, " stores:\n");
371 dump_records (stores, out);
372 }
373 if (writes_errno)
374 fprintf (out, " Writes errno\n");
375 }
376
377 /* Dump summary. */
378
379 void
380 modref_summary_lto::dump (FILE *out)
381 {
382 fprintf (out, " loads:\n");
383 dump_lto_records (loads, out);
384 fprintf (out, " stores:\n");
385 dump_lto_records (stores, out);
386 if (writes_errno)
387 fprintf (out, " Writes errno\n");
388 }
389
390 /* Get function summary for FUNC if it exists, return NULL otherwise. */
391
392 modref_summary *
393 get_modref_function_summary (cgraph_node *func)
394 {
395 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
396 if (!optimization_summaries)
397 return NULL;
398
399 /* A single function body may be represented by multiple symbols with
400 different visibility. For example, if FUNC is an interposable alias,
401 we don't want to return anything, even if we have summary for the target
402 function. */
403 enum availability avail;
404 func = func->function_or_virtual_thunk_symbol
405 (&avail, cgraph_node::get (current_function_decl));
406 if (avail <= AVAIL_INTERPOSABLE)
407 return NULL;
408
409 modref_summary *r = optimization_summaries->get (func);
410 return r;
411 }
412
413 /* Construct modref_access_node from REF. */
414 static modref_access_node
415 get_access (ao_ref *ref)
416 {
417 tree base;
418
419 base = ao_ref_base (ref);
420 modref_access_node a = {ref->offset, ref->size, ref->max_size,
421 0, -1, false};
422 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
423 {
424 tree memref = base;
425 base = TREE_OPERAND (base, 0);
426 if (TREE_CODE (base) == SSA_NAME
427 && SSA_NAME_IS_DEFAULT_DEF (base)
428 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
429 {
430 a.parm_index = 0;
431 for (tree t = DECL_ARGUMENTS (current_function_decl);
432 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
433 {
434 if (!t)
435 {
436 a.parm_index = -1;
437 break;
438 }
439 a.parm_index++;
440 }
441 if (TREE_CODE (memref) == MEM_REF)
442 {
443 a.parm_offset_known
444 = wi::to_poly_wide (TREE_OPERAND
445 (memref, 1)).to_shwi (&a.parm_offset);
446 }
447 else
448 a.parm_offset_known = false;
449 }
450 else
451 a.parm_index = -1;
452 }
453 else
454 a.parm_index = -1;
455 return a;
456 }
457
458 /* Record access into the modref_records data structure. */
459
460 static void
461 record_access (modref_records *tt, ao_ref *ref)
462 {
463 alias_set_type base_set = !flag_strict_aliasing ? 0
464 : ao_ref_base_alias_set (ref);
465 alias_set_type ref_set = !flag_strict_aliasing ? 0
466 : (ao_ref_alias_set (ref));
467 modref_access_node a = get_access (ref);
468 if (dump_file)
469 {
470 fprintf (dump_file, " - Recording base_set=%i ref_set=%i parm=%i\n",
471 base_set, ref_set, a.parm_index);
472 }
473 tt->insert (base_set, ref_set, a);
474 }
475
476 /* IPA version of record_access_tree. */
477
478 static void
479 record_access_lto (modref_records_lto *tt, ao_ref *ref)
480 {
481 /* get_alias_set sometimes use different type to compute the alias set
482 than TREE_TYPE (base). Do same adjustments. */
483 tree base_type = NULL_TREE, ref_type = NULL_TREE;
484 if (flag_strict_aliasing)
485 {
486 tree base;
487
488 base = ref->ref;
489 while (handled_component_p (base))
490 base = TREE_OPERAND (base, 0);
491
492 base_type = reference_alias_ptr_type_1 (&base);
493
494 if (!base_type)
495 base_type = TREE_TYPE (base);
496 else
497 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
498 ? NULL_TREE : TREE_TYPE (base_type);
499
500 tree ref_expr = ref->ref;
501 ref_type = reference_alias_ptr_type_1 (&ref_expr);
502
503 if (!ref_type)
504 ref_type = TREE_TYPE (ref_expr);
505 else
506 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
507 ? NULL_TREE : TREE_TYPE (ref_type);
508
509 /* Sanity check that we are in sync with what get_alias_set does. */
510 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
511 || get_alias_set (base_type)
512 == ao_ref_base_alias_set (ref));
513 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
514 || get_alias_set (ref_type)
515 == ao_ref_alias_set (ref));
516
517 /* Do not bother to record types that have no meaningful alias set.
518 Also skip variably modified types since these go to local streams. */
519 if (base_type && (!get_alias_set (base_type)
520 || variably_modified_type_p (base_type, NULL_TREE)))
521 base_type = NULL_TREE;
522 if (ref_type && (!get_alias_set (ref_type)
523 || variably_modified_type_p (ref_type, NULL_TREE)))
524 ref_type = NULL_TREE;
525 }
526 modref_access_node a = get_access (ref);
527 if (dump_file)
528 {
529 fprintf (dump_file, " - Recording base type:");
530 print_generic_expr (dump_file, base_type);
531 fprintf (dump_file, " (alias set %i) ref type:",
532 base_type ? get_alias_set (base_type) : 0);
533 print_generic_expr (dump_file, ref_type);
534 fprintf (dump_file, " (alias set %i) parm:%i\n",
535 ref_type ? get_alias_set (ref_type) : 0,
536 a.parm_index);
537 }
538
539 tt->insert (base_type, ref_type, a);
540 }
541
542 /* Returns true if and only if we should store the access to EXPR.
543 Some accesses, e.g. loads from automatic variables, are not interesting. */
544
545 static bool
546 record_access_p (tree expr)
547 {
548 if (refs_local_or_readonly_memory_p (expr))
549 {
550 if (dump_file)
551 fprintf (dump_file, " - Read-only or local, ignoring.\n");
552 return false;
553 }
554 return true;
555 }
556
557 /* Return true if ECF flags says that stores can be ignored. */
558
559 static bool
560 ignore_stores_p (tree caller, int flags)
561 {
562 if (flags & ECF_PURE)
563 return true;
564 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
565 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
566 return true;
567 return false;
568 }
569
570 /* Determine parm_map for argument I of STMT. */
571
572 modref_parm_map
573 parm_map_for_arg (gimple *stmt, int i)
574 {
575 tree op = gimple_call_arg (stmt, i);
576 bool offset_known;
577 poly_int64 offset;
578 struct modref_parm_map parm_map;
579
580 parm_map.parm_offset_known = false;
581 parm_map.parm_offset = 0;
582
583 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
584 if (TREE_CODE (op) == SSA_NAME
585 && SSA_NAME_IS_DEFAULT_DEF (op)
586 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
587 {
588 int index = 0;
589 for (tree t = DECL_ARGUMENTS (current_function_decl);
590 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
591 {
592 if (!t)
593 {
594 index = -1;
595 break;
596 }
597 index++;
598 }
599 parm_map.parm_index = index;
600 parm_map.parm_offset_known = offset_known;
601 parm_map.parm_offset = offset;
602 }
603 else if (points_to_local_or_readonly_memory_p (op))
604 parm_map.parm_index = -2;
605 else
606 parm_map.parm_index = -1;
607 return parm_map;
608 }
609
610 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
611 int CUR_SUMMARY. Return true if something changed.
612 If IGNORE_STORES is true, do not merge stores. */
613
614 bool
615 merge_call_side_effects (modref_summary *cur_summary,
616 gimple *stmt, modref_summary *callee_summary,
617 bool ignore_stores, cgraph_node *callee_node)
618 {
619 auto_vec <modref_parm_map, 32> parm_map;
620 bool changed = false;
621
622 if (dump_file)
623 fprintf (dump_file, " - Merging side effects of %s with parm map:",
624 callee_node->dump_name ());
625
626 /* We can not safely optimize based on summary of callee if it does
627 not always bind to current def: it is possible that memory load
628 was optimized out earlier which may not happen in the interposed
629 variant. */
630 if (!callee_node->binds_to_current_def_p ())
631 {
632 if (dump_file)
633 fprintf (dump_file, " - May be interposed: collapsing loads.\n");
634 cur_summary->loads->collapse ();
635 }
636
637 parm_map.safe_grow_cleared (gimple_call_num_args (stmt));
638 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
639 {
640 parm_map[i] = parm_map_for_arg (stmt, i);
641 if (dump_file)
642 {
643 fprintf (dump_file, " %i", parm_map[i].parm_index);
644 if (parm_map[i].parm_offset_known)
645 {
646 fprintf (dump_file, " offset:");
647 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
648 dump_file, SIGNED);
649 }
650 }
651 }
652 if (dump_file)
653 fprintf (dump_file, "\n");
654
655 /* Merge with callee's summary. */
656 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map);
657 if (!ignore_stores)
658 {
659 changed |= cur_summary->stores->merge (callee_summary->stores,
660 &parm_map);
661 if (!cur_summary->writes_errno
662 && callee_summary->writes_errno)
663 {
664 cur_summary->writes_errno = true;
665 changed = true;
666 }
667 }
668 return changed;
669 }
670
671 /* Return access mode for argument I of call STMT with FNSPEC. */
672
673 static modref_access_node
674 get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
675 unsigned int i, modref_parm_map &map)
676 {
677 tree size = NULL_TREE;
678 unsigned int size_arg;
679
680 if (!fnspec.arg_specified_p (i))
681 ;
682 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
683 size = gimple_call_arg (call, size_arg);
684 else if (fnspec.arg_access_size_given_by_type_p (i))
685 {
686 tree callee = gimple_call_fndecl (call);
687 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
688
689 for (unsigned int p = 0; p < i; p++)
690 t = TREE_CHAIN (t);
691 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
692 }
693 modref_access_node a = {0, -1, -1,
694 map.parm_offset, map.parm_index,
695 map.parm_offset_known};
696 poly_int64 size_hwi;
697 if (size
698 && poly_int_tree_p (size, &size_hwi)
699 && coeffs_in_range_p (size_hwi, 0,
700 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
701 {
702 a.size = -1;
703 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
704 }
705 return a;
706 }
707
708 /* Collapse loads and return true if something changed. */
709
710 static bool
711 collapse_loads (modref_summary *cur_summary,
712 modref_summary_lto *cur_summary_lto)
713 {
714 bool changed = false;
715
716 if (cur_summary && !cur_summary->loads->every_base)
717 {
718 cur_summary->loads->collapse ();
719 changed = true;
720 }
721 if (cur_summary_lto
722 && !cur_summary_lto->loads->every_base)
723 {
724 cur_summary_lto->loads->collapse ();
725 changed = true;
726 }
727 return changed;
728 }
729
730 /* Collapse loads and return true if something changed. */
731
732 static bool
733 collapse_stores (modref_summary *cur_summary,
734 modref_summary_lto *cur_summary_lto)
735 {
736 bool changed = false;
737
738 if (cur_summary && !cur_summary->stores->every_base)
739 {
740 cur_summary->stores->collapse ();
741 changed = true;
742 }
743 if (cur_summary_lto
744 && !cur_summary_lto->stores->every_base)
745 {
746 cur_summary_lto->stores->collapse ();
747 changed = true;
748 }
749 return changed;
750 }
751
752
753 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
754 If IGNORE_STORES is true ignore them.
755 Return false if no useful summary can be produced. */
756
757 static bool
758 process_fnspec (modref_summary *cur_summary,
759 modref_summary_lto *cur_summary_lto,
760 gcall *call, bool ignore_stores)
761 {
762 attr_fnspec fnspec = gimple_call_fnspec (call);
763 if (!fnspec.known_p ())
764 {
765 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
766 fprintf (dump_file, " Builtin with no fnspec: %s\n",
767 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
768 if (ignore_stores)
769 {
770 collapse_loads (cur_summary, cur_summary_lto);
771 return true;
772 }
773 return false;
774 }
775 if (fnspec.global_memory_read_p ())
776 collapse_loads (cur_summary, cur_summary_lto);
777 else
778 {
779 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
780 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
781 ;
782 else if (!fnspec.arg_specified_p (i)
783 || fnspec.arg_maybe_read_p (i))
784 {
785 modref_parm_map map = parm_map_for_arg (call, i);
786
787 if (map.parm_index == -2)
788 continue;
789 if (map.parm_index == -1)
790 {
791 collapse_loads (cur_summary, cur_summary_lto);
792 break;
793 }
794 if (cur_summary)
795 cur_summary->loads->insert (0, 0,
796 get_access_for_fnspec (call,
797 fnspec, i,
798 map));
799 if (cur_summary_lto)
800 cur_summary_lto->loads->insert (0, 0,
801 get_access_for_fnspec (call,
802 fnspec, i,
803 map));
804 }
805 }
806 if (ignore_stores)
807 return true;
808 if (fnspec.global_memory_written_p ())
809 collapse_stores (cur_summary, cur_summary_lto);
810 else
811 {
812 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
813 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
814 ;
815 else if (!fnspec.arg_specified_p (i)
816 || fnspec.arg_maybe_written_p (i))
817 {
818 modref_parm_map map = parm_map_for_arg (call, i);
819
820 if (map.parm_index == -2)
821 continue;
822 if (map.parm_index == -1)
823 {
824 collapse_stores (cur_summary, cur_summary_lto);
825 break;
826 }
827 if (cur_summary)
828 cur_summary->stores->insert (0, 0,
829 get_access_for_fnspec (call,
830 fnspec, i,
831 map));
832 if (cur_summary_lto)
833 cur_summary_lto->stores->insert (0, 0,
834 get_access_for_fnspec (call,
835 fnspec, i,
836 map));
837 }
838 if (fnspec.errno_maybe_written_p () && flag_errno_math)
839 {
840 if (cur_summary)
841 cur_summary->writes_errno = true;
842 if (cur_summary_lto)
843 cur_summary_lto->writes_errno = true;
844 }
845 }
846 return true;
847 }
848
849 /* Analyze function call STMT in function F.
850 Remember recursive calls in RECURSIVE_CALLS. */
851
852 static bool
853 analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
854 gcall *stmt, vec <gimple *> *recursive_calls)
855 {
856 /* Check flags on the function call. In certain cases, analysis can be
857 simplified. */
858 int flags = gimple_call_flags (stmt);
859 if (flags & (ECF_CONST | ECF_NOVOPS))
860 {
861 if (dump_file)
862 fprintf (dump_file,
863 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
864 "except for args.\n");
865 return true;
866 }
867
868 /* Pure functions do not affect global memory. Stores by functions which are
869 noreturn and do not throw can safely be ignored. */
870 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
871
872 /* Next, we try to get the callee's function declaration. The goal is to
873 merge their summary with ours. */
874 tree callee = gimple_call_fndecl (stmt);
875
876 /* Check if this is an indirect call. */
877 if (!callee)
878 {
879 if (dump_file)
880 fprintf (dump_file, gimple_call_internal_p (stmt)
881 ? " - Internal call" : " - Indirect call.\n");
882 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
883 }
884 /* We only need to handle internal calls in IPA mode. */
885 gcc_checking_assert (!cur_summary_lto);
886
887 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
888
889 /* If this is a recursive call, the target summary is the same as ours, so
890 there's nothing to do. */
891 if (recursive_call_p (current_function_decl, callee))
892 {
893 recursive_calls->safe_push (stmt);
894 if (dump_file)
895 fprintf (dump_file, " - Skipping recursive call.\n");
896 return true;
897 }
898
899 gcc_assert (callee_node != NULL);
900
901 /* Get the function symbol and its availability. */
902 enum availability avail;
903 callee_node = callee_node->function_symbol (&avail);
904 if (avail <= AVAIL_INTERPOSABLE)
905 {
906 if (dump_file)
907 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
908 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
909 }
910
911 /* Get callee's modref summary. As above, if there's no summary, we either
912 have to give up or, if stores are ignored, we can just purge loads. */
913 modref_summary *callee_summary = optimization_summaries->get (callee_node);
914 if (!callee_summary)
915 {
916 if (dump_file)
917 fprintf (dump_file, " - No modref summary available for callee.\n");
918 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
919 }
920
921 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
922 callee_node);
923
924 return true;
925 }
926
927 /* Support analysis in non-lto and lto mode in parallel. */
928
929 struct summary_ptrs
930 {
931 struct modref_summary *nolto;
932 struct modref_summary_lto *lto;
933 };
934
935 /* Helper for analyze_stmt. */
936
937 static bool
938 analyze_load (gimple *, tree, tree op, void *data)
939 {
940 modref_summary *summary = ((summary_ptrs *)data)->nolto;
941 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
942
943 if (dump_file)
944 {
945 fprintf (dump_file, " - Analyzing load: ");
946 print_generic_expr (dump_file, op);
947 fprintf (dump_file, "\n");
948 }
949
950 if (!record_access_p (op))
951 return false;
952
953 ao_ref r;
954 ao_ref_init (&r, op);
955
956 if (summary)
957 record_access (summary->loads, &r);
958 if (summary_lto)
959 record_access_lto (summary_lto->loads, &r);
960 return false;
961 }
962
963 /* Helper for analyze_stmt. */
964
965 static bool
966 analyze_store (gimple *, tree, tree op, void *data)
967 {
968 modref_summary *summary = ((summary_ptrs *)data)->nolto;
969 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
970
971 if (dump_file)
972 {
973 fprintf (dump_file, " - Analyzing store: ");
974 print_generic_expr (dump_file, op);
975 fprintf (dump_file, "\n");
976 }
977
978 if (!record_access_p (op))
979 return false;
980
981 ao_ref r;
982 ao_ref_init (&r, op);
983
984 if (summary)
985 record_access (summary->stores, &r);
986 if (summary_lto)
987 record_access_lto (summary_lto->stores, &r);
988 return false;
989 }
990
991 /* Analyze statement STMT of function F.
992 If IPA is true do not merge in side effects of calls. */
993
994 static bool
995 analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
996 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls)
997 {
998 /* In general we can not ignore clobbers because they are barriers for code
999 motion, however after inlining it is safe to do because local optimization
1000 passes do not consider clobbers from other functions.
1001 Similar logic is in ipa-pure-const.c. */
1002 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
1003 return true;
1004
1005 struct summary_ptrs sums = {summary, summary_lto};
1006
1007 /* Analyze all loads and stores in STMT. */
1008 walk_stmt_load_store_ops (stmt, &sums,
1009 analyze_load, analyze_store);
1010
1011 switch (gimple_code (stmt))
1012 {
1013 case GIMPLE_ASM:
1014 /* If the ASM statement does not read nor write memory, there's nothing
1015 to do. Otherwise just give up. */
1016 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1017 return true;
1018 if (dump_file)
1019 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1020 "which clobbers memory.\n");
1021 return false;
1022 case GIMPLE_CALL:
1023 if (!ipa || gimple_call_internal_p (stmt))
1024 return analyze_call (summary, summary_lto,
1025 as_a <gcall *> (stmt), recursive_calls);
1026 else
1027 {
1028 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1029
1030 if (fnspec.known_p ()
1031 && (!fnspec.global_memory_read_p ()
1032 || !fnspec.global_memory_written_p ()))
1033 {
1034 fnspec_summaries->get_create
1035 (cgraph_node::get (current_function_decl)->get_edge (stmt))
1036 ->fnspec = xstrdup (fnspec.get_str ());
1037 if (dump_file)
1038 fprintf (dump_file, " Recorded fnspec %s\n", fnspec.get_str ());
1039 }
1040 }
1041 return true;
1042 default:
1043 /* Nothing to do for other types of statements. */
1044 return true;
1045 }
1046 }
1047
1048 /* Remove summary of current function because during the function body
1049 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1050 mode of scan. */
1051
1052 static void
1053 remove_summary (bool lto, bool nolto, bool ipa)
1054 {
1055 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1056 if (!ipa)
1057 optimization_summaries->remove (fnode);
1058 else
1059 {
1060 if (nolto)
1061 summaries->remove (fnode);
1062 if (lto)
1063 summaries_lto->remove (fnode);
1064 }
1065 if (dump_file)
1066 fprintf (dump_file,
1067 " - modref done with result: not tracked.\n");
1068 }
1069
1070 /* Analyze function F. IPA indicates whether we're running in local mode
1071 (false) or the IPA mode (true). */
1072
1073 static void
1074 analyze_function (function *f, bool ipa)
1075 {
1076 if (dump_file)
1077 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
1078 function_name (f), ipa,
1079 TREE_READONLY (current_function_decl) ? " (const)" : "",
1080 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
1081
1082 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
1083 if (!flag_ipa_modref)
1084 return;
1085
1086 /* Compute no-LTO summaries when local optimization is going to happen. */
1087 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
1088 || (in_lto_p && !flag_wpa
1089 && flag_incremental_link != INCREMENTAL_LINK_LTO));
1090 /* Compute LTO when LTO streaming is going to happen. */
1091 bool lto = ipa && ((flag_lto && !in_lto_p)
1092 || flag_wpa
1093 || flag_incremental_link == INCREMENTAL_LINK_LTO);
1094 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1095
1096 modref_summary *summary = NULL;
1097 modref_summary_lto *summary_lto = NULL;
1098
1099 /* Initialize the summary.
1100 If we run in local mode there is possibly pre-existing summary from
1101 IPA pass. Dump it so it is easy to compare if mod-ref info has
1102 improved. */
1103 if (!ipa)
1104 {
1105 if (!optimization_summaries)
1106 optimization_summaries = modref_summaries::create_ggc (symtab);
1107 else /* Remove existing summary if we are re-running the pass. */
1108 {
1109 if (dump_file
1110 && (summary
1111 = optimization_summaries->get (cgraph_node::get (f->decl)))
1112 != NULL
1113 && summary->loads)
1114 {
1115 fprintf (dump_file, "Past summary:\n");
1116 optimization_summaries->get
1117 (cgraph_node::get (f->decl))->dump (dump_file);
1118 }
1119 optimization_summaries->remove (cgraph_node::get (f->decl));
1120 }
1121 summary = optimization_summaries->get_create (cgraph_node::get (f->decl));
1122 gcc_checking_assert (nolto && !lto);
1123 }
1124 /* In IPA mode we analyze every function precisely once. Assert that. */
1125 else
1126 {
1127 if (nolto)
1128 {
1129 if (!summaries)
1130 summaries = modref_summaries::create_ggc (symtab);
1131 else
1132 summaries->remove (cgraph_node::get (f->decl));
1133 summary = summaries->get_create (cgraph_node::get (f->decl));
1134 }
1135 if (lto)
1136 {
1137 if (!summaries_lto)
1138 summaries_lto = modref_summaries_lto::create_ggc (symtab);
1139 else
1140 summaries_lto->remove (cgraph_node::get (f->decl));
1141 summary_lto = summaries_lto->get_create (cgraph_node::get (f->decl));
1142 }
1143 if (!fnspec_summaries)
1144 fnspec_summaries = new fnspec_summaries_t (symtab);
1145 }
1146
1147
1148 /* Create and initialize summary for F.
1149 Note that summaries may be already allocated from previous
1150 run of the pass. */
1151 if (nolto)
1152 {
1153 gcc_assert (!summary->loads);
1154 summary->loads = modref_records::create_ggc (param_modref_max_bases,
1155 param_modref_max_refs,
1156 param_modref_max_accesses);
1157 gcc_assert (!summary->stores);
1158 summary->stores = modref_records::create_ggc (param_modref_max_bases,
1159 param_modref_max_refs,
1160 param_modref_max_accesses);
1161 summary->writes_errno = false;
1162 }
1163 if (lto)
1164 {
1165 gcc_assert (!summary_lto->loads);
1166 summary_lto->loads = modref_records_lto::create_ggc
1167 (param_modref_max_bases,
1168 param_modref_max_refs,
1169 param_modref_max_accesses);
1170 gcc_assert (!summary_lto->stores);
1171 summary_lto->stores = modref_records_lto::create_ggc
1172 (param_modref_max_bases,
1173 param_modref_max_refs,
1174 param_modref_max_accesses);
1175 summary_lto->writes_errno = false;
1176 }
1177 int ecf_flags = flags_from_decl_or_type (current_function_decl);
1178 auto_vec <gimple *, 32> recursive_calls;
1179
1180 /* Analyze each statement in each basic block of the function. If the
1181 statement cannot be analyzed (for any reason), the entire function cannot
1182 be analyzed by modref. */
1183 basic_block bb;
1184 FOR_EACH_BB_FN (bb, f)
1185 {
1186 gimple_stmt_iterator si;
1187 for (si = gsi_after_labels (bb); !gsi_end_p (si); gsi_next (&si))
1188 {
1189 if (!analyze_stmt (summary, summary_lto,
1190 gsi_stmt (si), ipa, &recursive_calls)
1191 || ((!summary || !summary->useful_p (ecf_flags))
1192 && (!summary_lto || !summary_lto->useful_p (ecf_flags))))
1193 {
1194 remove_summary (lto, nolto, ipa);
1195 return;
1196 }
1197 }
1198 }
1199
1200 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
1201 This needs to be done after all other side effects are computed. */
1202 if (!ipa)
1203 {
1204 bool changed = true;
1205 while (changed)
1206 {
1207 changed = false;
1208 for (unsigned i = 0; i < recursive_calls.length (); i++)
1209 {
1210 changed |= merge_call_side_effects
1211 (summary, recursive_calls[i], summary,
1212 ignore_stores_p (current_function_decl,
1213 gimple_call_flags
1214 (recursive_calls[i])),
1215 fnode);
1216 if (!summary->useful_p (ecf_flags))
1217 {
1218 remove_summary (lto, nolto, ipa);
1219 return;
1220 }
1221 }
1222 }
1223 }
1224 if (summary && !summary->useful_p (ecf_flags))
1225 {
1226 if (!ipa)
1227 optimization_summaries->remove (fnode);
1228 else
1229 summaries->remove (fnode);
1230 summary = NULL;
1231 }
1232 if (summary_lto && !summary_lto->useful_p (ecf_flags))
1233 {
1234 summaries_lto->remove (fnode);
1235 summary_lto = NULL;
1236 }
1237
1238 if (dump_file)
1239 {
1240 fprintf (dump_file, " - modref done with result: tracked.\n");
1241 if (summary)
1242 summary->dump (dump_file);
1243 if (summary_lto)
1244 summary_lto->dump (dump_file);
1245 }
1246 }
1247
1248 /* Callback for generate_summary. */
1249
1250 static void
1251 modref_generate (void)
1252 {
1253 struct cgraph_node *node;
1254 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1255 {
1256 function *f = DECL_STRUCT_FUNCTION (node->decl);
1257 if (!f)
1258 continue;
1259 push_cfun (f);
1260 analyze_function (f, true);
1261 pop_cfun ();
1262 }
1263 }
1264
1265 /* Called when a new function is inserted to callgraph late. */
1266
1267 void
1268 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
1269 {
1270 /* Local passes ought to be executed by the pass manager. */
1271 if (this == optimization_summaries)
1272 {
1273 optimization_summaries->remove (node);
1274 return;
1275 }
1276 if (!DECL_STRUCT_FUNCTION (node->decl))
1277 {
1278 summaries->remove (node);
1279 return;
1280 }
1281 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1282 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1283 pop_cfun ();
1284 }
1285
1286 /* Called when a new function is inserted to callgraph late. */
1287
1288 void
1289 modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
1290 {
1291 /* We do not support adding new function when IPA information is already
1292 propagated. This is done only by SIMD cloning that is not very
1293 critical. */
1294 if (!DECL_STRUCT_FUNCTION (node->decl)
1295 || propagated)
1296 {
1297 summaries_lto->remove (node);
1298 return;
1299 }
1300 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1301 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1302 pop_cfun ();
1303 }
1304
1305 /* Called when new clone is inserted to callgraph late. */
1306
1307 void
1308 modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
1309 modref_summary *src_data,
1310 modref_summary *dst_data)
1311 {
1312 /* Do not duplicate optimization summaries; we do not handle parameter
1313 transforms on them. */
1314 if (this == optimization_summaries)
1315 {
1316 optimization_summaries->remove (dst);
1317 return;
1318 }
1319 dst_data->stores = modref_records::create_ggc
1320 (src_data->stores->max_bases,
1321 src_data->stores->max_refs,
1322 src_data->stores->max_accesses);
1323 dst_data->stores->copy_from (src_data->stores);
1324 dst_data->loads = modref_records::create_ggc
1325 (src_data->loads->max_bases,
1326 src_data->loads->max_refs,
1327 src_data->loads->max_accesses);
1328 dst_data->loads->copy_from (src_data->loads);
1329 dst_data->writes_errno = src_data->writes_errno;
1330 }
1331
1332 /* Called when new clone is inserted to callgraph late. */
1333
1334 void
1335 modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
1336 modref_summary_lto *src_data,
1337 modref_summary_lto *dst_data)
1338 {
1339 /* Be sure that no further cloning happens after ipa-modref. If it does
1340 we will need to update signatures for possible param changes. */
1341 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
1342 dst_data->stores = modref_records_lto::create_ggc
1343 (src_data->stores->max_bases,
1344 src_data->stores->max_refs,
1345 src_data->stores->max_accesses);
1346 dst_data->stores->copy_from (src_data->stores);
1347 dst_data->loads = modref_records_lto::create_ggc
1348 (src_data->loads->max_bases,
1349 src_data->loads->max_refs,
1350 src_data->loads->max_accesses);
1351 dst_data->loads->copy_from (src_data->loads);
1352 dst_data->writes_errno = src_data->writes_errno;
1353 }
1354
1355 namespace
1356 {
1357 /* Definition of the modref pass on GIMPLE. */
1358 const pass_data pass_data_modref = {
1359 GIMPLE_PASS,
1360 "modref",
1361 OPTGROUP_IPA,
1362 TV_TREE_MODREF,
1363 (PROP_cfg | PROP_ssa),
1364 0,
1365 0,
1366 0,
1367 0,
1368 };
1369
1370 class pass_modref : public gimple_opt_pass
1371 {
1372 public:
1373 pass_modref (gcc::context *ctxt)
1374 : gimple_opt_pass (pass_data_modref, ctxt) {}
1375
1376 /* opt_pass methods: */
1377 opt_pass *clone ()
1378 {
1379 return new pass_modref (m_ctxt);
1380 }
1381 virtual bool gate (function *)
1382 {
1383 return flag_ipa_modref;
1384 }
1385 virtual unsigned int execute (function *);
1386 };
1387
1388 /* Encode TT to the output block OB using the summary streaming API. */
1389
1390 static void
1391 write_modref_records (modref_records_lto *tt, struct output_block *ob)
1392 {
1393 streamer_write_uhwi (ob, tt->max_bases);
1394 streamer_write_uhwi (ob, tt->max_refs);
1395 streamer_write_uhwi (ob, tt->max_accesses);
1396
1397 streamer_write_uhwi (ob, tt->every_base);
1398 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
1399 size_t i;
1400 modref_base_node <tree> *base_node;
1401 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
1402 {
1403 stream_write_tree (ob, base_node->base, true);
1404
1405 streamer_write_uhwi (ob, base_node->every_ref);
1406 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
1407
1408 size_t j;
1409 modref_ref_node <tree> *ref_node;
1410 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
1411 {
1412 stream_write_tree (ob, ref_node->ref, true);
1413 streamer_write_uhwi (ob, ref_node->every_access);
1414 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
1415
1416 size_t k;
1417 modref_access_node *access_node;
1418 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
1419 {
1420 streamer_write_hwi (ob, access_node->parm_index);
1421 if (access_node->parm_index != -1)
1422 {
1423 streamer_write_uhwi (ob, access_node->parm_offset_known);
1424 if (access_node->parm_offset_known)
1425 {
1426 streamer_write_poly_int64 (ob, access_node->parm_offset);
1427 streamer_write_poly_int64 (ob, access_node->offset);
1428 streamer_write_poly_int64 (ob, access_node->size);
1429 streamer_write_poly_int64 (ob, access_node->max_size);
1430 }
1431 }
1432 }
1433 }
1434 }
1435 }
1436
1437 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1438 This assumes that the tree was encoded using write_modref_tree.
1439 Either nolto_ret or lto_ret is initialized by the tree depending whether
1440 LTO streaming is expected or not. */
1441
1442 void
1443 read_modref_records (lto_input_block *ib, struct data_in *data_in,
1444 modref_records **nolto_ret,
1445 modref_records_lto **lto_ret)
1446 {
1447 size_t max_bases = streamer_read_uhwi (ib);
1448 size_t max_refs = streamer_read_uhwi (ib);
1449 size_t max_accesses = streamer_read_uhwi (ib);
1450
1451 if (lto_ret)
1452 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
1453 max_accesses);
1454 if (nolto_ret)
1455 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
1456 max_accesses);
1457 gcc_checking_assert (lto_ret || nolto_ret);
1458
1459 size_t every_base = streamer_read_uhwi (ib);
1460 size_t nbase = streamer_read_uhwi (ib);
1461
1462 gcc_assert (!every_base || nbase == 0);
1463 if (every_base)
1464 {
1465 if (nolto_ret)
1466 (*nolto_ret)->collapse ();
1467 if (lto_ret)
1468 (*lto_ret)->collapse ();
1469 }
1470 for (size_t i = 0; i < nbase; i++)
1471 {
1472 tree base_tree = stream_read_tree (ib, data_in);
1473 modref_base_node <alias_set_type> *nolto_base_node = NULL;
1474 modref_base_node <tree> *lto_base_node = NULL;
1475
1476 /* At stream in time we have LTO alias info. Check if we streamed in
1477 something obviously unnecessary. Do not glob types by alias sets;
1478 it is not 100% clear that ltrans types will get merged same way.
1479 Types may get refined based on ODR type conflicts. */
1480 if (base_tree && !get_alias_set (base_tree))
1481 {
1482 if (dump_file)
1483 {
1484 fprintf (dump_file, "Streamed in alias set 0 type ");
1485 print_generic_expr (dump_file, base_tree);
1486 fprintf (dump_file, "\n");
1487 }
1488 base_tree = NULL;
1489 }
1490
1491 if (nolto_ret)
1492 nolto_base_node = (*nolto_ret)->insert_base (base_tree
1493 ? get_alias_set (base_tree)
1494 : 0);
1495 if (lto_ret)
1496 lto_base_node = (*lto_ret)->insert_base (base_tree);
1497 size_t every_ref = streamer_read_uhwi (ib);
1498 size_t nref = streamer_read_uhwi (ib);
1499
1500 gcc_assert (!every_ref || nref == 0);
1501 if (every_ref)
1502 {
1503 if (nolto_base_node)
1504 nolto_base_node->collapse ();
1505 if (lto_base_node)
1506 lto_base_node->collapse ();
1507 }
1508 for (size_t j = 0; j < nref; j++)
1509 {
1510 tree ref_tree = stream_read_tree (ib, data_in);
1511
1512 if (ref_tree && !get_alias_set (ref_tree))
1513 {
1514 if (dump_file)
1515 {
1516 fprintf (dump_file, "Streamed in alias set 0 type ");
1517 print_generic_expr (dump_file, ref_tree);
1518 fprintf (dump_file, "\n");
1519 }
1520 ref_tree = NULL;
1521 }
1522
1523 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
1524 modref_ref_node <tree> *lto_ref_node = NULL;
1525
1526 if (nolto_base_node)
1527 nolto_ref_node
1528 = nolto_base_node->insert_ref (ref_tree
1529 ? get_alias_set (ref_tree) : 0,
1530 max_refs);
1531 if (lto_base_node)
1532 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
1533
1534 size_t every_access = streamer_read_uhwi (ib);
1535 size_t naccesses = streamer_read_uhwi (ib);
1536
1537 if (nolto_ref_node)
1538 nolto_ref_node->every_access = every_access;
1539 if (lto_ref_node)
1540 lto_ref_node->every_access = every_access;
1541
1542 for (size_t k = 0; k < naccesses; k++)
1543 {
1544 int parm_index = streamer_read_hwi (ib);
1545 bool parm_offset_known = false;
1546 poly_int64 parm_offset = 0;
1547 poly_int64 offset = 0;
1548 poly_int64 size = -1;
1549 poly_int64 max_size = -1;
1550
1551 if (parm_index != -1)
1552 {
1553 parm_offset_known = streamer_read_uhwi (ib);
1554 if (parm_offset_known)
1555 {
1556 parm_offset = streamer_read_poly_int64 (ib);
1557 offset = streamer_read_poly_int64 (ib);
1558 size = streamer_read_poly_int64 (ib);
1559 max_size = streamer_read_poly_int64 (ib);
1560 }
1561 }
1562 modref_access_node a = {offset, size, max_size, parm_offset,
1563 parm_index, parm_offset_known};
1564 if (nolto_ref_node)
1565 nolto_ref_node->insert_access (a, max_accesses);
1566 if (lto_ref_node)
1567 lto_ref_node->insert_access (a, max_accesses);
1568 }
1569 }
1570 }
1571 if (lto_ret)
1572 (*lto_ret)->cleanup ();
1573 if (nolto_ret)
1574 (*nolto_ret)->cleanup ();
1575 }
1576
1577 /* Callback for write_summary. */
1578
1579 static void
1580 modref_write ()
1581 {
1582 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
1583 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
1584 unsigned int count = 0;
1585 int i;
1586
1587 if (!summaries_lto)
1588 {
1589 streamer_write_uhwi (ob, 0);
1590 streamer_write_char_stream (ob->main_stream, 0);
1591 produce_asm (ob, NULL);
1592 destroy_output_block (ob);
1593 return;
1594 }
1595
1596 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1597 {
1598 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1599 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1600 modref_summary_lto *r;
1601
1602 if (cnode && cnode->definition && !cnode->alias
1603 && (r = summaries_lto->get (cnode))
1604 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
1605 count++;
1606 }
1607 streamer_write_uhwi (ob, count);
1608
1609 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1610 {
1611 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1612 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1613
1614 if (cnode && cnode->definition && !cnode->alias)
1615 {
1616 modref_summary_lto *r = summaries_lto->get (cnode);
1617
1618 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
1619 continue;
1620
1621 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
1622
1623 write_modref_records (r->loads, ob);
1624 write_modref_records (r->stores, ob);
1625
1626 struct bitpack_d bp = bitpack_create (ob->main_stream);
1627 bp_pack_value (&bp, r->writes_errno, 1);
1628 if (!flag_wpa)
1629 {
1630 for (cgraph_edge *e = cnode->indirect_calls;
1631 e; e = e->next_callee)
1632 {
1633 class fnspec_summary *sum = fnspec_summaries->get (e);
1634 bp_pack_value (&bp, sum != NULL, 1);
1635 if (sum)
1636 bp_pack_string (ob, &bp, sum->fnspec, true);
1637 }
1638 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
1639 {
1640 class fnspec_summary *sum = fnspec_summaries->get (e);
1641 bp_pack_value (&bp, sum != NULL, 1);
1642 if (sum)
1643 bp_pack_string (ob, &bp, sum->fnspec, true);
1644 }
1645 }
1646 streamer_write_bitpack (&bp);
1647 }
1648 }
1649 streamer_write_char_stream (ob->main_stream, 0);
1650 produce_asm (ob, NULL);
1651 destroy_output_block (ob);
1652 }
1653
1654 static void
1655 read_section (struct lto_file_decl_data *file_data, const char *data,
1656 size_t len)
1657 {
1658 const struct lto_function_header *header
1659 = (const struct lto_function_header *) data;
1660 const int cfg_offset = sizeof (struct lto_function_header);
1661 const int main_offset = cfg_offset + header->cfg_size;
1662 const int string_offset = main_offset + header->main_size;
1663 struct data_in *data_in;
1664 unsigned int i;
1665 unsigned int f_count;
1666
1667 lto_input_block ib ((const char *) data + main_offset, header->main_size,
1668 file_data->mode_table);
1669
1670 data_in
1671 = lto_data_in_create (file_data, (const char *) data + string_offset,
1672 header->string_size, vNULL);
1673 f_count = streamer_read_uhwi (&ib);
1674 for (i = 0; i < f_count; i++)
1675 {
1676 struct cgraph_node *node;
1677 lto_symtab_encoder_t encoder;
1678
1679 unsigned int index = streamer_read_uhwi (&ib);
1680 encoder = file_data->symtab_node_encoder;
1681 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
1682 index));
1683
1684 modref_summary *modref_sum = summaries
1685 ? summaries->get_create (node) : NULL;
1686 modref_summary_lto *modref_sum_lto = summaries_lto
1687 ? summaries_lto->get_create (node)
1688 : NULL;
1689 if (optimization_summaries)
1690 modref_sum = optimization_summaries->get_create (node);
1691
1692 if (modref_sum)
1693 modref_sum->writes_errno = false;
1694 if (modref_sum_lto)
1695 modref_sum_lto->writes_errno = false;
1696
1697 gcc_assert (!modref_sum || (!modref_sum->loads
1698 && !modref_sum->stores));
1699 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
1700 && !modref_sum_lto->stores));
1701 read_modref_records (&ib, data_in,
1702 modref_sum ? &modref_sum->loads : NULL,
1703 modref_sum_lto ? &modref_sum_lto->loads : NULL);
1704 read_modref_records (&ib, data_in,
1705 modref_sum ? &modref_sum->stores : NULL,
1706 modref_sum_lto ? &modref_sum_lto->stores : NULL);
1707 struct bitpack_d bp = streamer_read_bitpack (&ib);
1708 if (bp_unpack_value (&bp, 1))
1709 {
1710 if (modref_sum)
1711 modref_sum->writes_errno = true;
1712 if (modref_sum_lto)
1713 modref_sum_lto->writes_errno = true;
1714 }
1715 if (!flag_ltrans)
1716 {
1717 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
1718 {
1719 if (bp_unpack_value (&bp, 1))
1720 {
1721 class fnspec_summary *sum = fnspec_summaries->get_create (e);
1722 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
1723 }
1724 }
1725 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
1726 {
1727 if (bp_unpack_value (&bp, 1))
1728 {
1729 class fnspec_summary *sum = fnspec_summaries->get_create (e);
1730 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
1731 }
1732 }
1733 }
1734 if (dump_file)
1735 {
1736 fprintf (dump_file, "Read modref for %s\n",
1737 node->dump_name ());
1738 if (modref_sum)
1739 modref_sum->dump (dump_file);
1740 if (modref_sum_lto)
1741 modref_sum_lto->dump (dump_file);
1742 }
1743 }
1744
1745 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
1746 len);
1747 lto_data_in_delete (data_in);
1748 }
1749
1750 /* Callback for read_summary. */
1751
1752 static void
1753 modref_read (void)
1754 {
1755 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1756 struct lto_file_decl_data *file_data;
1757 unsigned int j = 0;
1758
1759 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
1760 if (flag_ltrans)
1761 optimization_summaries = modref_summaries::create_ggc (symtab);
1762 else
1763 {
1764 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
1765 summaries_lto = modref_summaries_lto::create_ggc (symtab);
1766 if (!flag_wpa
1767 || (flag_incremental_link == INCREMENTAL_LINK_LTO
1768 && flag_fat_lto_objects))
1769 summaries = modref_summaries::create_ggc (symtab);
1770 if (!fnspec_summaries)
1771 fnspec_summaries = new fnspec_summaries_t (symtab);
1772 }
1773
1774 while ((file_data = file_data_vec[j++]))
1775 {
1776 size_t len;
1777 const char *data = lto_get_summary_section_data (file_data,
1778 LTO_section_ipa_modref,
1779 &len);
1780 if (data)
1781 read_section (file_data, data, len);
1782 else
1783 /* Fatal error here. We do not want to support compiling ltrans units
1784 with different version of compiler or different flags than the WPA
1785 unit, so this should never happen. */
1786 fatal_error (input_location,
1787 "IPA modref summary is missing in input file");
1788 }
1789 }
1790
1791 /* If signature changed, update the summary. */
1792
1793 static void
1794 update_signature (struct cgraph_node *node)
1795 {
1796 clone_info *info = clone_info::get (node);
1797 if (!info || !info->param_adjustments)
1798 return;
1799
1800 modref_summary *r = optimization_summaries
1801 ? optimization_summaries->get (node) : NULL;
1802 modref_summary_lto *r_lto = summaries_lto
1803 ? summaries_lto->get (node) : NULL;
1804 if (!r && !r_lto)
1805 return;
1806 if (dump_file)
1807 {
1808 fprintf (dump_file, "Updating summary for %s from:\n",
1809 node->dump_name ());
1810 r->dump (dump_file);
1811 }
1812
1813 size_t i, max = 0;
1814 ipa_adjusted_param *p;
1815
1816 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
1817 {
1818 int idx = info->param_adjustments->get_original_index (i);
1819 if (idx > (int)max)
1820 max = idx;
1821 }
1822
1823 auto_vec <int, 32> map;
1824
1825 map.reserve (max + 1);
1826 for (i = 0; i <= max; i++)
1827 map.quick_push (-1);
1828 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
1829 {
1830 int idx = info->param_adjustments->get_original_index (i);
1831 if (idx >= 0)
1832 map[idx] = i;
1833 }
1834 if (r)
1835 {
1836 r->loads->remap_params (&map);
1837 r->stores->remap_params (&map);
1838 }
1839 if (r_lto)
1840 {
1841 r_lto->loads->remap_params (&map);
1842 r_lto->stores->remap_params (&map);
1843 }
1844 if (dump_file)
1845 {
1846 fprintf (dump_file, "to:\n");
1847 if (r)
1848 r->dump (dump_file);
1849 if (r_lto)
1850 r_lto->dump (dump_file);
1851 }
1852 return;
1853 }
1854
1855 /* Definition of the modref IPA pass. */
1856 const pass_data pass_data_ipa_modref =
1857 {
1858 IPA_PASS, /* type */
1859 "modref", /* name */
1860 OPTGROUP_IPA, /* optinfo_flags */
1861 TV_IPA_MODREF, /* tv_id */
1862 0, /* properties_required */
1863 0, /* properties_provided */
1864 0, /* properties_destroyed */
1865 0, /* todo_flags_start */
1866 ( TODO_dump_symtab ), /* todo_flags_finish */
1867 };
1868
1869 class pass_ipa_modref : public ipa_opt_pass_d
1870 {
1871 public:
1872 pass_ipa_modref (gcc::context *ctxt)
1873 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
1874 modref_generate, /* generate_summary */
1875 modref_write, /* write_summary */
1876 modref_read, /* read_summary */
1877 modref_write, /* write_optimization_summary */
1878 modref_read, /* read_optimization_summary */
1879 NULL, /* stmt_fixup */
1880 0, /* function_transform_todo_flags_start */
1881 NULL, /* function_transform */
1882 NULL) /* variable_transform */
1883 {}
1884
1885 /* opt_pass methods: */
1886 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
1887 virtual bool gate (function *)
1888 {
1889 return true;
1890 }
1891 virtual unsigned int execute (function *);
1892
1893 };
1894
1895 }
1896
1897 unsigned int pass_modref::execute (function *f)
1898 {
1899 analyze_function (f, false);
1900 return 0;
1901 }
1902
1903 gimple_opt_pass *
1904 make_pass_modref (gcc::context *ctxt)
1905 {
1906 return new pass_modref (ctxt);
1907 }
1908
1909 ipa_opt_pass_d *
1910 make_pass_ipa_modref (gcc::context *ctxt)
1911 {
1912 return new pass_ipa_modref (ctxt);
1913 }
1914
1915 /* Skip edges from and to nodes without ipa_pure_const enabled.
1916 Ignore not available symbols. */
1917
1918 static bool
1919 ignore_edge (struct cgraph_edge *e)
1920 {
1921 /* We merge summaries of inline clones into summaries of functions they
1922 are inlined to. For that reason the complete function bodies must
1923 act as unit. */
1924 if (!e->inline_failed)
1925 return false;
1926 enum availability avail;
1927 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
1928 (&avail, e->caller);
1929
1930 return (avail <= AVAIL_INTERPOSABLE
1931 || ((!optimization_summaries || !optimization_summaries->get (callee))
1932 && (!summaries_lto || !summaries_lto->get (callee)))
1933 || flags_from_decl_or_type (e->callee->decl)
1934 & (ECF_CONST | ECF_NOVOPS));
1935 }
1936
1937 /* Compute parm_map for CALLEE_EDGE. */
1938
1939 static bool
1940 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
1941 {
1942 class ipa_edge_args *args;
1943 if (ipa_node_params_sum
1944 && !callee_edge->call_stmt_cannot_inline_p
1945 && (args = IPA_EDGE_REF (callee_edge)) != NULL)
1946 {
1947 int i, count = ipa_get_cs_argument_count (args);
1948 class ipa_node_params *caller_parms_info, *callee_pi;
1949 class ipa_call_summary *es
1950 = ipa_call_summaries->get (callee_edge);
1951 cgraph_node *callee
1952 = callee_edge->callee->function_or_virtual_thunk_symbol
1953 (NULL, callee_edge->caller);
1954
1955 caller_parms_info = IPA_NODE_REF (callee_edge->caller->inlined_to
1956 ? callee_edge->caller->inlined_to
1957 : callee_edge->caller);
1958 callee_pi = IPA_NODE_REF (callee);
1959
1960 (*parm_map).safe_grow_cleared (count);
1961
1962 for (i = 0; i < count; i++)
1963 {
1964 if (es && es->param[i].points_to_local_or_readonly_memory)
1965 {
1966 (*parm_map)[i].parm_index = -2;
1967 continue;
1968 }
1969
1970 struct ipa_jump_func *jf
1971 = ipa_get_ith_jump_func (args, i);
1972 if (jf && callee_pi)
1973 {
1974 tree cst = ipa_value_from_jfunc (caller_parms_info,
1975 jf,
1976 ipa_get_type
1977 (callee_pi, i));
1978 if (cst && points_to_local_or_readonly_memory_p (cst))
1979 {
1980 (*parm_map)[i].parm_index = -2;
1981 continue;
1982 }
1983 }
1984 if (jf && jf->type == IPA_JF_PASS_THROUGH)
1985 {
1986 (*parm_map)[i].parm_index
1987 = ipa_get_jf_pass_through_formal_id (jf);
1988 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
1989 {
1990 (*parm_map)[i].parm_offset_known = true;
1991 (*parm_map)[i].parm_offset = 0;
1992 }
1993 else if (ipa_get_jf_pass_through_operation (jf)
1994 == POINTER_PLUS_EXPR
1995 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
1996 &(*parm_map)[i].parm_offset))
1997 (*parm_map)[i].parm_offset_known = true;
1998 else
1999 (*parm_map)[i].parm_offset_known = false;
2000 continue;
2001 }
2002 if (jf && jf->type == IPA_JF_ANCESTOR)
2003 {
2004 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
2005 (*parm_map)[i].parm_offset_known = true;
2006 gcc_checking_assert
2007 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
2008 (*parm_map)[i].parm_offset
2009 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
2010 }
2011 else
2012 (*parm_map)[i].parm_index = -1;
2013 }
2014 if (dump_file)
2015 {
2016 fprintf (dump_file, " Parm map: ");
2017 for (i = 0; i < count; i++)
2018 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
2019 fprintf (dump_file, "\n");
2020 }
2021 return true;
2022 }
2023 return false;
2024 }
2025
2026 /* Call EDGE was inlined; merge summary from callee to the caller. */
2027
2028 void
2029 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
2030 {
2031 if (!summaries && !summaries_lto)
2032 return;
2033
2034 struct cgraph_node *to = (edge->caller->inlined_to
2035 ? edge->caller->inlined_to : edge->caller);
2036 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
2037 class modref_summary_lto *to_info_lto = summaries_lto
2038 ? summaries_lto->get (to) : NULL;
2039
2040 if (!to_info && !to_info_lto)
2041 {
2042 if (summaries)
2043 summaries->remove (edge->callee);
2044 if (summaries_lto)
2045 summaries_lto->remove (edge->callee);
2046 return;
2047 }
2048
2049 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
2050 : NULL;
2051 class modref_summary_lto *callee_info_lto
2052 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
2053 int flags = flags_from_decl_or_type (edge->callee->decl);
2054
2055 if (!callee_info && to_info)
2056 {
2057 if (ignore_stores_p (edge->caller->decl, flags))
2058 to_info->loads->collapse ();
2059 else
2060 {
2061 summaries->remove (to);
2062 to_info = NULL;
2063 }
2064 }
2065 if (!callee_info_lto && to_info_lto)
2066 {
2067 if (ignore_stores_p (edge->caller->decl, flags))
2068 to_info_lto->loads->collapse ();
2069 else
2070 {
2071 summaries_lto->remove (to);
2072 to_info_lto = NULL;
2073 }
2074 }
2075 if (callee_info || callee_info_lto)
2076 {
2077 auto_vec <modref_parm_map, 32> parm_map;
2078
2079 compute_parm_map (edge, &parm_map);
2080
2081 if (!ignore_stores_p (edge->caller->decl, flags))
2082 {
2083 if (to_info && callee_info)
2084 to_info->stores->merge (callee_info->stores, &parm_map);
2085 if (to_info_lto && callee_info_lto)
2086 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map);
2087 }
2088 if (to_info && callee_info)
2089 to_info->loads->merge (callee_info->loads, &parm_map);
2090 if (to_info_lto && callee_info_lto)
2091 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map);
2092 }
2093 if (summaries)
2094 {
2095 if (to_info && !to_info->useful_p (flags))
2096 {
2097 if (dump_file)
2098 fprintf (dump_file, "Removed mod-ref summary for %s\n",
2099 to->dump_name ());
2100 summaries->remove (to);
2101 }
2102 else if (to_info && dump_file)
2103 {
2104 if (dump_file)
2105 fprintf (dump_file, "Updated mod-ref summary for %s\n",
2106 to->dump_name ());
2107 to_info->dump (dump_file);
2108 }
2109 if (callee_info)
2110 summaries->remove (edge->callee);
2111 }
2112 if (summaries_lto)
2113 {
2114 if (to_info_lto && !to_info_lto->useful_p (flags))
2115 {
2116 if (dump_file)
2117 fprintf (dump_file, "Removed mod-ref summary for %s\n",
2118 to->dump_name ());
2119 summaries_lto->remove (to);
2120 }
2121 else if (to_info_lto && dump_file)
2122 {
2123 if (dump_file)
2124 fprintf (dump_file, "Updated mod-ref summary for %s\n",
2125 to->dump_name ());
2126 to_info_lto->dump (dump_file);
2127 }
2128 if (callee_info_lto)
2129 summaries_lto->remove (edge->callee);
2130 }
2131 return;
2132 }
2133
2134 /* Get parameter type from DECL. This is only safe for special cases
2135 like builtins we create fnspec for because the type match is checked
2136 at fnspec creation time. */
2137
2138 static tree
2139 get_parm_type (tree decl, unsigned int i)
2140 {
2141 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
2142
2143 for (unsigned int p = 0; p < i; p++)
2144 t = TREE_CHAIN (t);
2145 return TREE_VALUE (t);
2146 }
2147
2148 /* Return access mode for argument I of call E with FNSPEC. */
2149
2150 static modref_access_node
2151 get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
2152 unsigned int i, modref_parm_map &map)
2153 {
2154 tree size = NULL_TREE;
2155 unsigned int size_arg;
2156
2157 if (!fnspec.arg_specified_p (i))
2158 ;
2159 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
2160 {
2161 cgraph_node *node = e->caller->inlined_to
2162 ? e->caller->inlined_to : e->caller;
2163 class ipa_node_params *caller_parms_info = IPA_NODE_REF (node);
2164 class ipa_edge_args *args = IPA_EDGE_REF (e);
2165 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
2166
2167 if (jf)
2168 size = ipa_value_from_jfunc (caller_parms_info, jf,
2169 get_parm_type (e->callee->decl, size_arg));
2170 }
2171 else if (fnspec.arg_access_size_given_by_type_p (i))
2172 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
2173 modref_access_node a = {0, -1, -1,
2174 map.parm_offset, map.parm_index,
2175 map.parm_offset_known};
2176 poly_int64 size_hwi;
2177 if (size
2178 && poly_int_tree_p (size, &size_hwi)
2179 && coeffs_in_range_p (size_hwi, 0,
2180 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2181 {
2182 a.size = -1;
2183 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
2184 }
2185 return a;
2186 }
2187
2188 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
2189 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
2190
2191 static bool
2192 propagate_unknown_call (cgraph_node *node,
2193 cgraph_edge *e, int ecf_flags,
2194 modref_summary **cur_summary_ptr,
2195 modref_summary_lto **cur_summary_lto_ptr)
2196 {
2197 bool changed = false;
2198 modref_summary *cur_summary = cur_summary_ptr ? *cur_summary_ptr : NULL;
2199 modref_summary_lto *cur_summary_lto = cur_summary_lto_ptr
2200 ? *cur_summary_lto_ptr : NULL;
2201 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
2202 auto_vec <modref_parm_map, 32> parm_map;
2203 if (fnspec_sum
2204 && compute_parm_map (e, &parm_map))
2205 {
2206 attr_fnspec fnspec (fnspec_sum->fnspec);
2207
2208 gcc_checking_assert (fnspec.known_p ());
2209 if (fnspec.global_memory_read_p ())
2210 collapse_loads (cur_summary, cur_summary_lto);
2211 else
2212 {
2213 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
2214 for (unsigned i = 0; i < parm_map.length () && t;
2215 i++, t = TREE_CHAIN (t))
2216 if (!POINTER_TYPE_P (TREE_VALUE (t)))
2217 ;
2218 else if (!fnspec.arg_specified_p (i)
2219 || fnspec.arg_maybe_read_p (i))
2220 {
2221 modref_parm_map map = parm_map[i];
2222 if (map.parm_index == -2)
2223 continue;
2224 if (map.parm_index == -1)
2225 {
2226 collapse_loads (cur_summary, cur_summary_lto);
2227 break;
2228 }
2229 if (cur_summary)
2230 changed |= cur_summary->loads->insert
2231 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
2232 if (cur_summary_lto)
2233 changed |= cur_summary_lto->loads->insert
2234 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
2235 }
2236 }
2237 if (ignore_stores_p (node->decl, ecf_flags))
2238 ;
2239 else if (fnspec.global_memory_written_p ())
2240 collapse_stores (cur_summary, cur_summary_lto);
2241 else
2242 {
2243 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
2244 for (unsigned i = 0; i < parm_map.length () && t;
2245 i++, t = TREE_CHAIN (t))
2246 if (!POINTER_TYPE_P (TREE_VALUE (t)))
2247 ;
2248 else if (!fnspec.arg_specified_p (i)
2249 || fnspec.arg_maybe_written_p (i))
2250 {
2251 modref_parm_map map = parm_map[i];
2252 if (map.parm_index == -2)
2253 continue;
2254 if (map.parm_index == -1)
2255 {
2256 collapse_stores (cur_summary, cur_summary_lto);
2257 break;
2258 }
2259 if (cur_summary)
2260 changed |= cur_summary->stores->insert
2261 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
2262 if (cur_summary_lto)
2263 changed |= cur_summary_lto->stores->insert
2264 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
2265 }
2266 }
2267 if (fnspec.errno_maybe_written_p () && flag_errno_math)
2268 {
2269 if (cur_summary && !cur_summary->writes_errno)
2270 {
2271 cur_summary->writes_errno = true;
2272 changed = true;
2273 }
2274 if (cur_summary_lto && !cur_summary_lto->writes_errno)
2275 {
2276 cur_summary_lto->writes_errno = true;
2277 changed = true;
2278 }
2279 }
2280 return changed;
2281 }
2282 if (ignore_stores_p (node->decl, ecf_flags))
2283 {
2284 if (dump_file)
2285 fprintf (dump_file, " collapsing loads\n");
2286 return collapse_loads (cur_summary, cur_summary_lto);
2287 }
2288 if (optimization_summaries)
2289 optimization_summaries->remove (node);
2290 if (summaries_lto)
2291 summaries_lto->remove (node);
2292 if (cur_summary_ptr)
2293 *cur_summary_ptr = NULL;
2294 if (cur_summary_lto_ptr)
2295 *cur_summary_lto_ptr = NULL;
2296 if (dump_file)
2297 fprintf (dump_file, " Giving up\n");
2298 return true;
2299 }
2300
2301 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
2302
2303 static void
2304 modref_propagate_in_scc (cgraph_node *component_node)
2305 {
2306 bool changed = true;
2307 int iteration = 0;
2308
2309 while (changed)
2310 {
2311 changed = false;
2312 for (struct cgraph_node *cur = component_node; cur;
2313 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
2314 {
2315 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
2316 modref_summary *cur_summary = optimization_summaries
2317 ? optimization_summaries->get (node)
2318 : NULL;
2319 modref_summary_lto *cur_summary_lto = summaries_lto
2320 ? summaries_lto->get (node)
2321 : NULL;
2322
2323 if (!cur_summary && !cur_summary_lto)
2324 continue;
2325
2326 if (dump_file)
2327 fprintf (dump_file, " Processing %s%s%s\n",
2328 cur->dump_name (),
2329 TREE_READONLY (cur->decl) ? " (const)" : "",
2330 DECL_PURE_P (cur->decl) ? " (pure)" : "");
2331
2332 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
2333 {
2334 if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
2335 continue;
2336 if (dump_file)
2337 fprintf (dump_file, " Indirect call"
2338 "collapsing loads\n");
2339 changed |= propagate_unknown_call
2340 (node, e, e->indirect_info->ecf_flags,
2341 &cur_summary, &cur_summary_lto);
2342 if (!cur_summary && !cur_summary_lto)
2343 break;
2344 }
2345
2346 if (!cur_summary && !cur_summary_lto)
2347 continue;
2348
2349 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
2350 callee_edge = callee_edge->next_callee)
2351 {
2352 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
2353 modref_summary *callee_summary = NULL;
2354 modref_summary_lto *callee_summary_lto = NULL;
2355 struct cgraph_node *callee;
2356
2357 if (flags & (ECF_CONST | ECF_NOVOPS)
2358 || !callee_edge->inline_failed)
2359 continue;
2360
2361 /* Get the callee and its summary. */
2362 enum availability avail;
2363 callee = callee_edge->callee->function_or_virtual_thunk_symbol
2364 (&avail, cur);
2365
2366 /* It is not necessary to re-process calls outside of the
2367 SCC component. */
2368 if (iteration > 0
2369 && (!callee->aux
2370 || ((struct ipa_dfs_info *)cur->aux)->scc_no
2371 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
2372 continue;
2373
2374 if (dump_file)
2375 fprintf (dump_file, " Call to %s\n",
2376 callee_edge->callee->dump_name ());
2377
2378 bool ignore_stores = ignore_stores_p (cur->decl, flags);
2379
2380 if (avail <= AVAIL_INTERPOSABLE)
2381 {
2382 if (dump_file)
2383 fprintf (dump_file, " Call target interposable"
2384 " or not available\n");
2385 changed |= propagate_unknown_call
2386 (node, callee_edge, flags,
2387 &cur_summary, &cur_summary_lto);
2388 if (!cur_summary && !cur_summary_lto)
2389 break;
2390 continue;
2391 }
2392
2393 /* We don't know anything about CALLEE, hence we cannot tell
2394 anything about the entire component. */
2395
2396 if (cur_summary
2397 && !(callee_summary = optimization_summaries->get (callee)))
2398 {
2399 if (dump_file)
2400 fprintf (dump_file, " No call target summary\n");
2401 changed |= propagate_unknown_call
2402 (node, callee_edge, flags,
2403 &cur_summary, NULL);
2404 if (!cur_summary && !cur_summary_lto)
2405 break;
2406 }
2407 if (cur_summary_lto
2408 && !(callee_summary_lto = summaries_lto->get (callee)))
2409 {
2410 if (dump_file)
2411 fprintf (dump_file, " No call target summary\n");
2412 changed |= propagate_unknown_call
2413 (node, callee_edge, flags,
2414 NULL, &cur_summary_lto);
2415 if (!cur_summary && !cur_summary_lto)
2416 break;
2417 }
2418
2419 /* We can not safely optimize based on summary of callee if it
2420 does not always bind to current def: it is possible that
2421 memory load was optimized out earlier which may not happen in
2422 the interposed variant. */
2423 if (!callee_edge->binds_to_current_def_p ())
2424 {
2425 changed |= collapse_loads (cur_summary, cur_summary_lto);
2426 if (dump_file)
2427 fprintf (dump_file, " May not bind local;"
2428 " collapsing loads\n");
2429 }
2430
2431
2432 auto_vec <modref_parm_map, 32> parm_map;
2433
2434 compute_parm_map (callee_edge, &parm_map);
2435
2436 /* Merge in callee's information. */
2437 if (callee_summary)
2438 {
2439 changed |= cur_summary->loads->merge
2440 (callee_summary->loads, &parm_map);
2441 if (!ignore_stores)
2442 {
2443 changed |= cur_summary->stores->merge
2444 (callee_summary->stores, &parm_map);
2445 if (!cur_summary->writes_errno
2446 && callee_summary->writes_errno)
2447 {
2448 cur_summary->writes_errno = true;
2449 changed = true;
2450 }
2451 }
2452 }
2453 if (callee_summary_lto)
2454 {
2455 changed |= cur_summary_lto->loads->merge
2456 (callee_summary_lto->loads, &parm_map);
2457 if (!ignore_stores)
2458 {
2459 changed |= cur_summary_lto->stores->merge
2460 (callee_summary_lto->stores, &parm_map);
2461 if (!cur_summary_lto->writes_errno
2462 && callee_summary_lto->writes_errno)
2463 {
2464 cur_summary_lto->writes_errno = true;
2465 changed = true;
2466 }
2467 }
2468 }
2469 if (dump_file && changed)
2470 {
2471 if (cur_summary)
2472 cur_summary->dump (dump_file);
2473 if (cur_summary_lto)
2474 cur_summary_lto->dump (dump_file);
2475 }
2476 }
2477 }
2478 iteration++;
2479 }
2480 if (dump_file)
2481 {
2482 fprintf (dump_file,
2483 "Propagation finished in %i iterations\n", iteration);
2484 for (struct cgraph_node *cur = component_node; cur;
2485 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
2486 if (!cur->inlined_to)
2487 {
2488 modref_summary *cur_summary = optimization_summaries
2489 ? optimization_summaries->get (cur)
2490 : NULL;
2491 modref_summary_lto *cur_summary_lto = summaries_lto
2492 ? summaries_lto->get (cur)
2493 : NULL;
2494
2495 fprintf (dump_file, "Propagated modref for %s%s%s\n",
2496 cur->dump_name (),
2497 TREE_READONLY (cur->decl) ? " (const)" : "",
2498 DECL_PURE_P (cur->decl) ? " (pure)" : "");
2499 if (optimization_summaries)
2500 {
2501 if (cur_summary)
2502 cur_summary->dump (dump_file);
2503 else
2504 fprintf (dump_file, " Not tracked\n");
2505 }
2506 if (summaries_lto)
2507 {
2508 if (cur_summary_lto)
2509 cur_summary_lto->dump (dump_file);
2510 else
2511 fprintf (dump_file, " Not tracked (lto)\n");
2512 }
2513 }
2514 }
2515 }
2516
2517 /* Run the IPA pass. This will take a function's summaries and calls and
2518 construct new summaries which represent a transitive closure. So that
2519 summary of an analyzed function contains information about the loads and
2520 stores that the function or any function that it calls does. */
2521
2522 unsigned int
2523 pass_ipa_modref::execute (function *)
2524 {
2525 if (!summaries && !summaries_lto)
2526 return 0;
2527
2528 if (optimization_summaries)
2529 ggc_delete (optimization_summaries);
2530 optimization_summaries = summaries;
2531 summaries = NULL;
2532
2533 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
2534 symtab->cgraph_count);
2535 int order_pos;
2536 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
2537 int i;
2538
2539 /* Iterate over all strongly connected components in post-order. */
2540 for (i = 0; i < order_pos; i++)
2541 {
2542 /* Get the component's representative. That's just any node in the
2543 component from which we can traverse the entire component. */
2544 struct cgraph_node *component_node = order[i];
2545
2546 if (dump_file)
2547 fprintf (dump_file, "\n\nStart of SCC component\n");
2548
2549 modref_propagate_in_scc (component_node);
2550 }
2551 cgraph_node *node;
2552 FOR_EACH_FUNCTION (node)
2553 update_signature (node);
2554 if (summaries_lto)
2555 ((modref_summaries_lto *)summaries_lto)->propagated = true;
2556 ipa_free_postorder_info ();
2557 free (order);
2558 delete fnspec_summaries;
2559 fnspec_summaries = NULL;
2560 return 0;
2561 }
2562
2563 /* Summaries must stay alive until end of compilation. */
2564
2565 void
2566 ipa_modref_c_finalize ()
2567 {
2568 if (optimization_summaries)
2569 ggc_delete (optimization_summaries);
2570 optimization_summaries = NULL;
2571 gcc_checking_assert (!summaries);
2572 if (summaries_lto)
2573 {
2574 ggc_delete (summaries_lto);
2575 summaries_lto = NULL;
2576 }
2577 if (fnspec_summaries)
2578 delete fnspec_summaries;
2579 fnspec_summaries = NULL;
2580 }
2581
2582 #include "gt-ipa-modref.h"