rtlanal.c (subreg_get_info): Exit early for simple and common cases.
[gcc.git] / gcc / ipa-cp.c
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Interprocedural constant propagation (IPA-CP).
24
25 The goal of this transformation is to
26
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
30
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
35
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
38
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
47
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
52
53
54 First stage - intraprocedural analysis
55 =======================================
56
57 This phase computes jump_function and modification flags.
58
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
62
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
67
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
70
71 ipcp_generate_summary() is the main function of the first stage.
72
73 Second stage - interprocedural analysis
74 ========================================
75
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
79
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
86
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
92
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
95
96 Third phase - materialization of clones, call statement updates.
97 ============================================
98
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
102
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "hash-set.h"
107 #include "machmode.h"
108 #include "vec.h"
109 #include "hash-map.h"
110 #include "double-int.h"
111 #include "input.h"
112 #include "alias.h"
113 #include "symtab.h"
114 #include "options.h"
115 #include "wide-int.h"
116 #include "inchash.h"
117 #include "tree.h"
118 #include "fold-const.h"
119 #include "gimple-fold.h"
120 #include "gimple-expr.h"
121 #include "target.h"
122 #include "predict.h"
123 #include "basic-block.h"
124 #include "is-a.h"
125 #include "plugin-api.h"
126 #include "tm.h"
127 #include "hard-reg-set.h"
128 #include "input.h"
129 #include "function.h"
130 #include "ipa-ref.h"
131 #include "cgraph.h"
132 #include "alloc-pool.h"
133 #include "symbol-summary.h"
134 #include "ipa-prop.h"
135 #include "bitmap.h"
136 #include "tree-pass.h"
137 #include "flags.h"
138 #include "diagnostic.h"
139 #include "tree-pretty-print.h"
140 #include "tree-inline.h"
141 #include "params.h"
142 #include "ipa-inline.h"
143 #include "ipa-utils.h"
144
145 template <typename valtype> class ipcp_value;
146
147 /* Describes a particular source for an IPA-CP value. */
148
149 template <typename valtype>
150 class ipcp_value_source
151 {
152 public:
153 /* Aggregate offset of the source, negative if the source is scalar value of
154 the argument itself. */
155 HOST_WIDE_INT offset;
156 /* The incoming edge that brought the value. */
157 cgraph_edge *cs;
158 /* If the jump function that resulted into his value was a pass-through or an
159 ancestor, this is the ipcp_value of the caller from which the described
160 value has been derived. Otherwise it is NULL. */
161 ipcp_value<valtype> *val;
162 /* Next pointer in a linked list of sources of a value. */
163 ipcp_value_source *next;
164 /* If the jump function that resulted into his value was a pass-through or an
165 ancestor, this is the index of the parameter of the caller the jump
166 function references. */
167 int index;
168 };
169
170 /* Common ancestor for all ipcp_value instantiations. */
171
172 class ipcp_value_base
173 {
174 public:
175 /* Time benefit and size cost that specializing the function for this value
176 would bring about in this function alone. */
177 int local_time_benefit, local_size_cost;
178 /* Time benefit and size cost that specializing the function for this value
179 can bring about in it's callees (transitively). */
180 int prop_time_benefit, prop_size_cost;
181 };
182
183 /* Describes one particular value stored in struct ipcp_lattice. */
184
185 template <typename valtype>
186 class ipcp_value : public ipcp_value_base
187 {
188 public:
189 /* The actual value for the given parameter. */
190 valtype value;
191 /* The list of sources from which this value originates. */
192 ipcp_value_source <valtype> *sources;
193 /* Next pointers in a linked list of all values in a lattice. */
194 ipcp_value *next;
195 /* Next pointers in a linked list of values in a strongly connected component
196 of values. */
197 ipcp_value *scc_next;
198 /* Next pointers in a linked list of SCCs of values sorted topologically
199 according their sources. */
200 ipcp_value *topo_next;
201 /* A specialized node created for this value, NULL if none has been (so far)
202 created. */
203 cgraph_node *spec_node;
204 /* Depth first search number and low link for topological sorting of
205 values. */
206 int dfs, low_link;
207 /* True if this valye is currently on the topo-sort stack. */
208 bool on_stack;
209
210 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
211 HOST_WIDE_INT offset);
212 };
213
214 /* Lattice describing potential values of a formal parameter of a function, or
215 a part of an aggreagate. TOP is represented by a lattice with zero values
216 and with contains_variable and bottom flags cleared. BOTTOM is represented
217 by a lattice with the bottom flag set. In that case, values and
218 contains_variable flag should be disregarded. */
219
220 template <typename valtype>
221 class ipcp_lattice
222 {
223 public:
224 /* The list of known values and types in this lattice. Note that values are
225 not deallocated if a lattice is set to bottom because there may be value
226 sources referencing them. */
227 ipcp_value<valtype> *values;
228 /* Number of known values and types in this lattice. */
229 int values_count;
230 /* The lattice contains a variable component (in addition to values). */
231 bool contains_variable;
232 /* The value of the lattice is bottom (i.e. variable and unusable for any
233 propagation). */
234 bool bottom;
235
236 inline bool is_single_const ();
237 inline bool set_to_bottom ();
238 inline bool set_contains_variable ();
239 bool add_value (valtype newval, cgraph_edge *cs,
240 ipcp_value<valtype> *src_val = NULL,
241 int src_idx = 0, HOST_WIDE_INT offset = -1);
242 void print (FILE * f, bool dump_sources, bool dump_benefits);
243 };
244
245 /* Lattice of tree values with an offset to describe a part of an
246 aggregate. */
247
248 class ipcp_agg_lattice : public ipcp_lattice<tree>
249 {
250 public:
251 /* Offset that is being described by this lattice. */
252 HOST_WIDE_INT offset;
253 /* Size so that we don't have to re-compute it every time we traverse the
254 list. Must correspond to TYPE_SIZE of all lat values. */
255 HOST_WIDE_INT size;
256 /* Next element of the linked list. */
257 struct ipcp_agg_lattice *next;
258 };
259
260 /* Structure containing lattices for a parameter itself and for pieces of
261 aggregates that are passed in the parameter or by a reference in a parameter
262 plus some other useful flags. */
263
264 class ipcp_param_lattices
265 {
266 public:
267 /* Lattice describing the value of the parameter itself. */
268 ipcp_lattice<tree> itself;
269 /* Lattice describing the the polymorphic contexts of a parameter. */
270 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
271 /* Lattices describing aggregate parts. */
272 ipcp_agg_lattice *aggs;
273 /* Alignment information. Very basic one value lattice where !known means
274 TOP and zero alignment bottom. */
275 ipa_alignment alignment;
276 /* Number of aggregate lattices */
277 int aggs_count;
278 /* True if aggregate data were passed by reference (as opposed to by
279 value). */
280 bool aggs_by_ref;
281 /* All aggregate lattices contain a variable component (in addition to
282 values). */
283 bool aggs_contain_variable;
284 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
285 for any propagation). */
286 bool aggs_bottom;
287
288 /* There is a virtual call based on this parameter. */
289 bool virt_call;
290 };
291
292 /* Allocation pools for values and their sources in ipa-cp. */
293
294 alloc_pool ipcp_cst_values_pool;
295 alloc_pool ipcp_poly_ctx_values_pool;
296 alloc_pool ipcp_sources_pool;
297 alloc_pool ipcp_agg_lattice_pool;
298
299 /* Maximal count found in program. */
300
301 static gcov_type max_count;
302
303 /* Original overall size of the program. */
304
305 static long overall_size, max_new_size;
306
307 /* Return the param lattices structure corresponding to the Ith formal
308 parameter of the function described by INFO. */
309 static inline struct ipcp_param_lattices *
310 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
311 {
312 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
313 gcc_checking_assert (!info->ipcp_orig_node);
314 gcc_checking_assert (info->lattices);
315 return &(info->lattices[i]);
316 }
317
318 /* Return the lattice corresponding to the scalar value of the Ith formal
319 parameter of the function described by INFO. */
320 static inline ipcp_lattice<tree> *
321 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
322 {
323 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
324 return &plats->itself;
325 }
326
327 /* Return the lattice corresponding to the scalar value of the Ith formal
328 parameter of the function described by INFO. */
329 static inline ipcp_lattice<ipa_polymorphic_call_context> *
330 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
331 {
332 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
333 return &plats->ctxlat;
334 }
335
336 /* Return whether LAT is a lattice with a single constant and without an
337 undefined value. */
338
339 template <typename valtype>
340 inline bool
341 ipcp_lattice<valtype>::is_single_const ()
342 {
343 if (bottom || contains_variable || values_count != 1)
344 return false;
345 else
346 return true;
347 }
348
349 /* Print V which is extracted from a value in a lattice to F. */
350
351 static void
352 print_ipcp_constant_value (FILE * f, tree v)
353 {
354 if (TREE_CODE (v) == ADDR_EXPR
355 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
356 {
357 fprintf (f, "& ");
358 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
359 }
360 else
361 print_generic_expr (f, v, 0);
362 }
363
364 /* Print V which is extracted from a value in a lattice to F. */
365
366 static void
367 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
368 {
369 v.dump(f, false);
370 }
371
372 /* Print a lattice LAT to F. */
373
374 template <typename valtype>
375 void
376 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
377 {
378 ipcp_value<valtype> *val;
379 bool prev = false;
380
381 if (bottom)
382 {
383 fprintf (f, "BOTTOM\n");
384 return;
385 }
386
387 if (!values_count && !contains_variable)
388 {
389 fprintf (f, "TOP\n");
390 return;
391 }
392
393 if (contains_variable)
394 {
395 fprintf (f, "VARIABLE");
396 prev = true;
397 if (dump_benefits)
398 fprintf (f, "\n");
399 }
400
401 for (val = values; val; val = val->next)
402 {
403 if (dump_benefits && prev)
404 fprintf (f, " ");
405 else if (!dump_benefits && prev)
406 fprintf (f, ", ");
407 else
408 prev = true;
409
410 print_ipcp_constant_value (f, val->value);
411
412 if (dump_sources)
413 {
414 ipcp_value_source<valtype> *s;
415
416 fprintf (f, " [from:");
417 for (s = val->sources; s; s = s->next)
418 fprintf (f, " %i(%i)", s->cs->caller->order,
419 s->cs->frequency);
420 fprintf (f, "]");
421 }
422
423 if (dump_benefits)
424 fprintf (f, " [loc_time: %i, loc_size: %i, "
425 "prop_time: %i, prop_size: %i]\n",
426 val->local_time_benefit, val->local_size_cost,
427 val->prop_time_benefit, val->prop_size_cost);
428 }
429 if (!dump_benefits)
430 fprintf (f, "\n");
431 }
432
433 /* Print all ipcp_lattices of all functions to F. */
434
435 static void
436 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
437 {
438 struct cgraph_node *node;
439 int i, count;
440
441 fprintf (f, "\nLattices:\n");
442 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
443 {
444 struct ipa_node_params *info;
445
446 info = IPA_NODE_REF (node);
447 fprintf (f, " Node: %s/%i:\n", node->name (),
448 node->order);
449 count = ipa_get_param_count (info);
450 for (i = 0; i < count; i++)
451 {
452 struct ipcp_agg_lattice *aglat;
453 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
454 fprintf (f, " param [%d]: ", i);
455 plats->itself.print (f, dump_sources, dump_benefits);
456 fprintf (f, " ctxs: ");
457 plats->ctxlat.print (f, dump_sources, dump_benefits);
458 if (plats->alignment.known && plats->alignment.align > 0)
459 fprintf (f, " Alignment %u, misalignment %u\n",
460 plats->alignment.align, plats->alignment.misalign);
461 else if (plats->alignment.known)
462 fprintf (f, " Alignment unusable\n");
463 else
464 fprintf (f, " Alignment unknown\n");
465 if (plats->virt_call)
466 fprintf (f, " virt_call flag set\n");
467
468 if (plats->aggs_bottom)
469 {
470 fprintf (f, " AGGS BOTTOM\n");
471 continue;
472 }
473 if (plats->aggs_contain_variable)
474 fprintf (f, " AGGS VARIABLE\n");
475 for (aglat = plats->aggs; aglat; aglat = aglat->next)
476 {
477 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
478 plats->aggs_by_ref ? "ref " : "", aglat->offset);
479 aglat->print (f, dump_sources, dump_benefits);
480 }
481 }
482 }
483 }
484
485 /* Determine whether it is at all technically possible to create clones of NODE
486 and store this information in the ipa_node_params structure associated
487 with NODE. */
488
489 static void
490 determine_versionability (struct cgraph_node *node)
491 {
492 const char *reason = NULL;
493
494 /* There are a number of generic reasons functions cannot be versioned. We
495 also cannot remove parameters if there are type attributes such as fnspec
496 present. */
497 if (node->alias || node->thunk.thunk_p)
498 reason = "alias or thunk";
499 else if (!node->local.versionable)
500 reason = "not a tree_versionable_function";
501 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
502 reason = "insufficient body availability";
503 else if (!opt_for_fn (node->decl, optimize)
504 || !opt_for_fn (node->decl, flag_ipa_cp))
505 reason = "non-optimized function";
506 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
507 {
508 /* Ideally we should clone the SIMD clones themselves and create
509 vector copies of them, so IPA-cp and SIMD clones can happily
510 coexist, but that may not be worth the effort. */
511 reason = "function has SIMD clones";
512 }
513 /* Don't clone decls local to a comdat group; it breaks and for C++
514 decloned constructors, inlining is always better anyway. */
515 else if (node->comdat_local_p ())
516 reason = "comdat-local function";
517
518 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
519 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
520 node->name (), node->order, reason);
521
522 node->local.versionable = (reason == NULL);
523 }
524
525 /* Return true if it is at all technically possible to create clones of a
526 NODE. */
527
528 static bool
529 ipcp_versionable_function_p (struct cgraph_node *node)
530 {
531 return node->local.versionable;
532 }
533
534 /* Structure holding accumulated information about callers of a node. */
535
536 struct caller_statistics
537 {
538 gcov_type count_sum;
539 int n_calls, n_hot_calls, freq_sum;
540 };
541
542 /* Initialize fields of STAT to zeroes. */
543
544 static inline void
545 init_caller_stats (struct caller_statistics *stats)
546 {
547 stats->count_sum = 0;
548 stats->n_calls = 0;
549 stats->n_hot_calls = 0;
550 stats->freq_sum = 0;
551 }
552
553 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
554 non-thunk incoming edges to NODE. */
555
556 static bool
557 gather_caller_stats (struct cgraph_node *node, void *data)
558 {
559 struct caller_statistics *stats = (struct caller_statistics *) data;
560 struct cgraph_edge *cs;
561
562 for (cs = node->callers; cs; cs = cs->next_caller)
563 if (cs->caller->thunk.thunk_p)
564 cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
565 stats, false);
566 else
567 {
568 stats->count_sum += cs->count;
569 stats->freq_sum += cs->frequency;
570 stats->n_calls++;
571 if (cs->maybe_hot_p ())
572 stats->n_hot_calls ++;
573 }
574 return false;
575
576 }
577
578 /* Return true if this NODE is viable candidate for cloning. */
579
580 static bool
581 ipcp_cloning_candidate_p (struct cgraph_node *node)
582 {
583 struct caller_statistics stats;
584
585 gcc_checking_assert (node->has_gimple_body_p ());
586
587 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
588 {
589 if (dump_file)
590 fprintf (dump_file, "Not considering %s for cloning; "
591 "-fipa-cp-clone disabled.\n",
592 node->name ());
593 return false;
594 }
595
596 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
597 {
598 if (dump_file)
599 fprintf (dump_file, "Not considering %s for cloning; "
600 "optimizing it for size.\n",
601 node->name ());
602 return false;
603 }
604
605 init_caller_stats (&stats);
606 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
607
608 if (inline_summaries->get (node)->self_size < stats.n_calls)
609 {
610 if (dump_file)
611 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
612 node->name ());
613 return true;
614 }
615
616 /* When profile is available and function is hot, propagate into it even if
617 calls seems cold; constant propagation can improve function's speed
618 significantly. */
619 if (max_count)
620 {
621 if (stats.count_sum > node->count * 90 / 100)
622 {
623 if (dump_file)
624 fprintf (dump_file, "Considering %s for cloning; "
625 "usually called directly.\n",
626 node->name ());
627 return true;
628 }
629 }
630 if (!stats.n_hot_calls)
631 {
632 if (dump_file)
633 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
634 node->name ());
635 return false;
636 }
637 if (dump_file)
638 fprintf (dump_file, "Considering %s for cloning.\n",
639 node->name ());
640 return true;
641 }
642
643 template <typename valtype>
644 class value_topo_info
645 {
646 public:
647 /* Head of the linked list of topologically sorted values. */
648 ipcp_value<valtype> *values_topo;
649 /* Stack for creating SCCs, represented by a linked list too. */
650 ipcp_value<valtype> *stack;
651 /* Counter driving the algorithm in add_val_to_toposort. */
652 int dfs_counter;
653
654 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
655 {}
656 void add_val (ipcp_value<valtype> *cur_val);
657 void propagate_effects ();
658 };
659
660 /* Arrays representing a topological ordering of call graph nodes and a stack
661 of nodes used during constant propagation and also data required to perform
662 topological sort of values and propagation of benefits in the determined
663 order. */
664
665 class ipa_topo_info
666 {
667 public:
668 /* Array with obtained topological order of cgraph nodes. */
669 struct cgraph_node **order;
670 /* Stack of cgraph nodes used during propagation within SCC until all values
671 in the SCC stabilize. */
672 struct cgraph_node **stack;
673 int nnodes, stack_top;
674
675 value_topo_info<tree> constants;
676 value_topo_info<ipa_polymorphic_call_context> contexts;
677
678 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
679 constants ()
680 {}
681 };
682
683 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
684
685 static void
686 build_toporder_info (struct ipa_topo_info *topo)
687 {
688 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
689 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
690
691 gcc_checking_assert (topo->stack_top == 0);
692 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
693 }
694
695 /* Free information about strongly connected components and the arrays in
696 TOPO. */
697
698 static void
699 free_toporder_info (struct ipa_topo_info *topo)
700 {
701 ipa_free_postorder_info ();
702 free (topo->order);
703 free (topo->stack);
704 }
705
706 /* Add NODE to the stack in TOPO, unless it is already there. */
707
708 static inline void
709 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
710 {
711 struct ipa_node_params *info = IPA_NODE_REF (node);
712 if (info->node_enqueued)
713 return;
714 info->node_enqueued = 1;
715 topo->stack[topo->stack_top++] = node;
716 }
717
718 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
719 is empty. */
720
721 static struct cgraph_node *
722 pop_node_from_stack (struct ipa_topo_info *topo)
723 {
724 if (topo->stack_top)
725 {
726 struct cgraph_node *node;
727 topo->stack_top--;
728 node = topo->stack[topo->stack_top];
729 IPA_NODE_REF (node)->node_enqueued = 0;
730 return node;
731 }
732 else
733 return NULL;
734 }
735
736 /* Set lattice LAT to bottom and return true if it previously was not set as
737 such. */
738
739 template <typename valtype>
740 inline bool
741 ipcp_lattice<valtype>::set_to_bottom ()
742 {
743 bool ret = !bottom;
744 bottom = true;
745 return ret;
746 }
747
748 /* Mark lattice as containing an unknown value and return true if it previously
749 was not marked as such. */
750
751 template <typename valtype>
752 inline bool
753 ipcp_lattice<valtype>::set_contains_variable ()
754 {
755 bool ret = !contains_variable;
756 contains_variable = true;
757 return ret;
758 }
759
760 /* Set all aggegate lattices in PLATS to bottom and return true if they were
761 not previously set as such. */
762
763 static inline bool
764 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
765 {
766 bool ret = !plats->aggs_bottom;
767 plats->aggs_bottom = true;
768 return ret;
769 }
770
771 /* Mark all aggegate lattices in PLATS as containing an unknown value and
772 return true if they were not previously marked as such. */
773
774 static inline bool
775 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
776 {
777 bool ret = !plats->aggs_contain_variable;
778 plats->aggs_contain_variable = true;
779 return ret;
780 }
781
782 /* Return true if alignment information in PLATS is known to be unusable. */
783
784 static inline bool
785 alignment_bottom_p (ipcp_param_lattices *plats)
786 {
787 return plats->alignment.known && (plats->alignment.align == 0);
788 }
789
790 /* Set alignment information in PLATS to unusable. Return true if it
791 previously was usable or unknown. */
792
793 static inline bool
794 set_alignment_to_bottom (ipcp_param_lattices *plats)
795 {
796 if (alignment_bottom_p (plats))
797 return false;
798 plats->alignment.known = true;
799 plats->alignment.align = 0;
800 return true;
801 }
802
803 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
804 return true is any of them has not been marked as such so far. */
805
806 static inline bool
807 set_all_contains_variable (struct ipcp_param_lattices *plats)
808 {
809 bool ret;
810 ret = plats->itself.set_contains_variable ();
811 ret |= plats->ctxlat.set_contains_variable ();
812 ret |= set_agg_lats_contain_variable (plats);
813 ret |= set_alignment_to_bottom (plats);
814 return ret;
815 }
816
817 /* Initialize ipcp_lattices. */
818
819 static void
820 initialize_node_lattices (struct cgraph_node *node)
821 {
822 struct ipa_node_params *info = IPA_NODE_REF (node);
823 struct cgraph_edge *ie;
824 bool disable = false, variable = false;
825 int i;
826
827 gcc_checking_assert (node->has_gimple_body_p ());
828 if (!cgraph_local_p (node))
829 {
830 /* When cloning is allowed, we can assume that externally visible
831 functions are not called. We will compensate this by cloning
832 later. */
833 if (ipcp_versionable_function_p (node)
834 && ipcp_cloning_candidate_p (node))
835 variable = true;
836 else
837 disable = true;
838 }
839
840 if (disable || variable)
841 {
842 for (i = 0; i < ipa_get_param_count (info) ; i++)
843 {
844 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
845 if (disable)
846 {
847 plats->itself.set_to_bottom ();
848 plats->ctxlat.set_to_bottom ();
849 set_agg_lats_to_bottom (plats);
850 set_alignment_to_bottom (plats);
851 }
852 else
853 set_all_contains_variable (plats);
854 }
855 if (dump_file && (dump_flags & TDF_DETAILS)
856 && !node->alias && !node->thunk.thunk_p)
857 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
858 node->name (), node->order,
859 disable ? "BOTTOM" : "VARIABLE");
860 }
861
862 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
863 if (ie->indirect_info->polymorphic
864 && ie->indirect_info->param_index >= 0)
865 {
866 gcc_checking_assert (ie->indirect_info->param_index >= 0);
867 ipa_get_parm_lattices (info,
868 ie->indirect_info->param_index)->virt_call = 1;
869 }
870 }
871
872 /* Return the result of a (possibly arithmetic) pass through jump function
873 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
874 determined or be considered an interprocedural invariant. */
875
876 static tree
877 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
878 {
879 tree restype, res;
880
881 gcc_checking_assert (is_gimple_ip_invariant (input));
882 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
883 return input;
884
885 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
886 == tcc_comparison)
887 restype = boolean_type_node;
888 else
889 restype = TREE_TYPE (input);
890 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
891 input, ipa_get_jf_pass_through_operand (jfunc));
892
893 if (res && !is_gimple_ip_invariant (res))
894 return NULL_TREE;
895
896 return res;
897 }
898
899 /* Return the result of an ancestor jump function JFUNC on the constant value
900 INPUT. Return NULL_TREE if that cannot be determined. */
901
902 static tree
903 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
904 {
905 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
906 if (TREE_CODE (input) == ADDR_EXPR)
907 {
908 tree t = TREE_OPERAND (input, 0);
909 t = build_ref_for_offset (EXPR_LOCATION (t), t,
910 ipa_get_jf_ancestor_offset (jfunc),
911 ptr_type_node, NULL, false);
912 return build_fold_addr_expr (t);
913 }
914 else
915 return NULL_TREE;
916 }
917
918 /* Determine whether JFUNC evaluates to a single known constant value and if
919 so, return it. Otherwise return NULL. INFO describes the caller node or
920 the one it is inlined to, so that pass-through jump functions can be
921 evaluated. */
922
923 tree
924 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
925 {
926 if (jfunc->type == IPA_JF_CONST)
927 return ipa_get_jf_constant (jfunc);
928 else if (jfunc->type == IPA_JF_PASS_THROUGH
929 || jfunc->type == IPA_JF_ANCESTOR)
930 {
931 tree input;
932 int idx;
933
934 if (jfunc->type == IPA_JF_PASS_THROUGH)
935 idx = ipa_get_jf_pass_through_formal_id (jfunc);
936 else
937 idx = ipa_get_jf_ancestor_formal_id (jfunc);
938
939 if (info->ipcp_orig_node)
940 input = info->known_csts[idx];
941 else
942 {
943 ipcp_lattice<tree> *lat;
944
945 if (!info->lattices)
946 return NULL_TREE;
947 lat = ipa_get_scalar_lat (info, idx);
948 if (!lat->is_single_const ())
949 return NULL_TREE;
950 input = lat->values->value;
951 }
952
953 if (!input)
954 return NULL_TREE;
955
956 if (jfunc->type == IPA_JF_PASS_THROUGH)
957 return ipa_get_jf_pass_through_result (jfunc, input);
958 else
959 return ipa_get_jf_ancestor_result (jfunc, input);
960 }
961 else
962 return NULL_TREE;
963 }
964
965 /* Determie whether JFUNC evaluates to single known polymorphic context, given
966 that INFO describes the caller node or the one it is inlined to, CS is the
967 call graph edge corresponding to JFUNC and CSIDX index of the described
968 parameter. */
969
970 ipa_polymorphic_call_context
971 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
972 ipa_jump_func *jfunc)
973 {
974 ipa_edge_args *args = IPA_EDGE_REF (cs);
975 ipa_polymorphic_call_context ctx;
976 ipa_polymorphic_call_context *edge_ctx
977 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
978
979 if (edge_ctx && !edge_ctx->useless_p ())
980 ctx = *edge_ctx;
981
982 if (jfunc->type == IPA_JF_PASS_THROUGH
983 || jfunc->type == IPA_JF_ANCESTOR)
984 {
985 ipa_polymorphic_call_context srcctx;
986 int srcidx;
987 bool type_preserved = true;
988 if (jfunc->type == IPA_JF_PASS_THROUGH)
989 {
990 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
991 return ctx;
992 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
993 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
994 }
995 else
996 {
997 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
998 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
999 }
1000 if (info->ipcp_orig_node)
1001 {
1002 if (info->known_contexts.exists ())
1003 srcctx = info->known_contexts[srcidx];
1004 }
1005 else
1006 {
1007 if (!info->lattices)
1008 return ctx;
1009 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1010 lat = ipa_get_poly_ctx_lat (info, srcidx);
1011 if (!lat->is_single_const ())
1012 return ctx;
1013 srcctx = lat->values->value;
1014 }
1015 if (srcctx.useless_p ())
1016 return ctx;
1017 if (jfunc->type == IPA_JF_ANCESTOR)
1018 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1019 if (!type_preserved)
1020 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1021 srcctx.combine_with (ctx);
1022 return srcctx;
1023 }
1024
1025 return ctx;
1026 }
1027
1028 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1029 bottom, not containing a variable component and without any known value at
1030 the same time. */
1031
1032 DEBUG_FUNCTION void
1033 ipcp_verify_propagated_values (void)
1034 {
1035 struct cgraph_node *node;
1036
1037 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1038 {
1039 struct ipa_node_params *info = IPA_NODE_REF (node);
1040 int i, count = ipa_get_param_count (info);
1041
1042 for (i = 0; i < count; i++)
1043 {
1044 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1045
1046 if (!lat->bottom
1047 && !lat->contains_variable
1048 && lat->values_count == 0)
1049 {
1050 if (dump_file)
1051 {
1052 symtab_node::dump_table (dump_file);
1053 fprintf (dump_file, "\nIPA lattices after constant "
1054 "propagation, before gcc_unreachable:\n");
1055 print_all_lattices (dump_file, true, false);
1056 }
1057
1058 gcc_unreachable ();
1059 }
1060 }
1061 }
1062 }
1063
1064 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1065
1066 static bool
1067 values_equal_for_ipcp_p (tree x, tree y)
1068 {
1069 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1070
1071 if (x == y)
1072 return true;
1073
1074 if (TREE_CODE (x) == ADDR_EXPR
1075 && TREE_CODE (y) == ADDR_EXPR
1076 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1077 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1078 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1079 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1080 else
1081 return operand_equal_p (x, y, 0);
1082 }
1083
1084 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1085
1086 static bool
1087 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1088 ipa_polymorphic_call_context y)
1089 {
1090 return x.equal_to (y);
1091 }
1092
1093
1094 /* Add a new value source to the value represented by THIS, marking that a
1095 value comes from edge CS and (if the underlying jump function is a
1096 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1097 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1098 scalar value of the parameter itself or the offset within an aggregate. */
1099
1100 template <typename valtype>
1101 void
1102 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1103 int src_idx, HOST_WIDE_INT offset)
1104 {
1105 ipcp_value_source<valtype> *src;
1106
1107 src = new (pool_alloc (ipcp_sources_pool)) ipcp_value_source<valtype>;
1108 src->offset = offset;
1109 src->cs = cs;
1110 src->val = src_val;
1111 src->index = src_idx;
1112
1113 src->next = sources;
1114 sources = src;
1115 }
1116
1117 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1118 SOURCE and clear all other fields. */
1119
1120 static ipcp_value<tree> *
1121 allocate_and_init_ipcp_value (tree source)
1122 {
1123 ipcp_value<tree> *val;
1124
1125 val = new (pool_alloc (ipcp_cst_values_pool)) ipcp_value<tree>;
1126 memset (val, 0, sizeof (*val));
1127 val->value = source;
1128 return val;
1129 }
1130
1131 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1132 value to SOURCE and clear all other fields. */
1133
1134 static ipcp_value<ipa_polymorphic_call_context> *
1135 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1136 {
1137 ipcp_value<ipa_polymorphic_call_context> *val;
1138
1139 val = new (pool_alloc (ipcp_poly_ctx_values_pool))
1140 ipcp_value<ipa_polymorphic_call_context>;
1141 memset (val, 0, sizeof (*val));
1142 val->value = source;
1143 return val;
1144 }
1145
1146 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1147 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1148 meaning. OFFSET -1 means the source is scalar and not a part of an
1149 aggregate. */
1150
1151 template <typename valtype>
1152 bool
1153 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1154 ipcp_value<valtype> *src_val,
1155 int src_idx, HOST_WIDE_INT offset)
1156 {
1157 ipcp_value<valtype> *val;
1158
1159 if (bottom)
1160 return false;
1161
1162 for (val = values; val; val = val->next)
1163 if (values_equal_for_ipcp_p (val->value, newval))
1164 {
1165 if (ipa_edge_within_scc (cs))
1166 {
1167 ipcp_value_source<valtype> *s;
1168 for (s = val->sources; s ; s = s->next)
1169 if (s->cs == cs)
1170 break;
1171 if (s)
1172 return false;
1173 }
1174
1175 val->add_source (cs, src_val, src_idx, offset);
1176 return false;
1177 }
1178
1179 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1180 {
1181 /* We can only free sources, not the values themselves, because sources
1182 of other values in this this SCC might point to them. */
1183 for (val = values; val; val = val->next)
1184 {
1185 while (val->sources)
1186 {
1187 ipcp_value_source<valtype> *src = val->sources;
1188 val->sources = src->next;
1189 pool_free (ipcp_sources_pool, src);
1190 }
1191 }
1192
1193 values = NULL;
1194 return set_to_bottom ();
1195 }
1196
1197 values_count++;
1198 val = allocate_and_init_ipcp_value (newval);
1199 val->add_source (cs, src_val, src_idx, offset);
1200 val->next = values;
1201 values = val;
1202 return true;
1203 }
1204
1205 /* Propagate values through a pass-through jump function JFUNC associated with
1206 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1207 is the index of the source parameter. */
1208
1209 static bool
1210 propagate_vals_accross_pass_through (cgraph_edge *cs,
1211 ipa_jump_func *jfunc,
1212 ipcp_lattice<tree> *src_lat,
1213 ipcp_lattice<tree> *dest_lat,
1214 int src_idx)
1215 {
1216 ipcp_value<tree> *src_val;
1217 bool ret = false;
1218
1219 /* Do not create new values when propagating within an SCC because if there
1220 are arithmetic functions with circular dependencies, there is infinite
1221 number of them and we would just make lattices bottom. */
1222 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1223 && ipa_edge_within_scc (cs))
1224 ret = dest_lat->set_contains_variable ();
1225 else
1226 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1227 {
1228 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1229
1230 if (cstval)
1231 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1232 else
1233 ret |= dest_lat->set_contains_variable ();
1234 }
1235
1236 return ret;
1237 }
1238
1239 /* Propagate values through an ancestor jump function JFUNC associated with
1240 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1241 is the index of the source parameter. */
1242
1243 static bool
1244 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1245 struct ipa_jump_func *jfunc,
1246 ipcp_lattice<tree> *src_lat,
1247 ipcp_lattice<tree> *dest_lat,
1248 int src_idx)
1249 {
1250 ipcp_value<tree> *src_val;
1251 bool ret = false;
1252
1253 if (ipa_edge_within_scc (cs))
1254 return dest_lat->set_contains_variable ();
1255
1256 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1257 {
1258 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1259
1260 if (t)
1261 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1262 else
1263 ret |= dest_lat->set_contains_variable ();
1264 }
1265
1266 return ret;
1267 }
1268
1269 /* Propagate scalar values across jump function JFUNC that is associated with
1270 edge CS and put the values into DEST_LAT. */
1271
1272 static bool
1273 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1274 struct ipa_jump_func *jfunc,
1275 ipcp_lattice<tree> *dest_lat)
1276 {
1277 if (dest_lat->bottom)
1278 return false;
1279
1280 if (jfunc->type == IPA_JF_CONST)
1281 {
1282 tree val = ipa_get_jf_constant (jfunc);
1283 return dest_lat->add_value (val, cs, NULL, 0);
1284 }
1285 else if (jfunc->type == IPA_JF_PASS_THROUGH
1286 || jfunc->type == IPA_JF_ANCESTOR)
1287 {
1288 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1289 ipcp_lattice<tree> *src_lat;
1290 int src_idx;
1291 bool ret;
1292
1293 if (jfunc->type == IPA_JF_PASS_THROUGH)
1294 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1295 else
1296 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1297
1298 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1299 if (src_lat->bottom)
1300 return dest_lat->set_contains_variable ();
1301
1302 /* If we would need to clone the caller and cannot, do not propagate. */
1303 if (!ipcp_versionable_function_p (cs->caller)
1304 && (src_lat->contains_variable
1305 || (src_lat->values_count > 1)))
1306 return dest_lat->set_contains_variable ();
1307
1308 if (jfunc->type == IPA_JF_PASS_THROUGH)
1309 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1310 dest_lat, src_idx);
1311 else
1312 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1313 src_idx);
1314
1315 if (src_lat->contains_variable)
1316 ret |= dest_lat->set_contains_variable ();
1317
1318 return ret;
1319 }
1320
1321 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1322 use it for indirect inlining), we should propagate them too. */
1323 return dest_lat->set_contains_variable ();
1324 }
1325
1326 /* Propagate scalar values across jump function JFUNC that is associated with
1327 edge CS and describes argument IDX and put the values into DEST_LAT. */
1328
1329 static bool
1330 propagate_context_accross_jump_function (cgraph_edge *cs,
1331 ipa_jump_func *jfunc, int idx,
1332 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1333 {
1334 ipa_edge_args *args = IPA_EDGE_REF (cs);
1335 if (dest_lat->bottom)
1336 return false;
1337 bool ret = false;
1338 bool added_sth = false;
1339 bool type_preserved = true;
1340
1341 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1342 = ipa_get_ith_polymorhic_call_context (args, idx);
1343
1344 if (edge_ctx_ptr)
1345 edge_ctx = *edge_ctx_ptr;
1346
1347 if (jfunc->type == IPA_JF_PASS_THROUGH
1348 || jfunc->type == IPA_JF_ANCESTOR)
1349 {
1350 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1351 int src_idx;
1352 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1353
1354 /* TODO: Once we figure out how to propagate speculations, it will
1355 probably be a good idea to switch to speculation if type_preserved is
1356 not set instead of punting. */
1357 if (jfunc->type == IPA_JF_PASS_THROUGH)
1358 {
1359 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1360 goto prop_fail;
1361 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1362 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1363 }
1364 else
1365 {
1366 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1367 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1368 }
1369
1370 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1371 /* If we would need to clone the caller and cannot, do not propagate. */
1372 if (!ipcp_versionable_function_p (cs->caller)
1373 && (src_lat->contains_variable
1374 || (src_lat->values_count > 1)))
1375 goto prop_fail;
1376
1377 ipcp_value<ipa_polymorphic_call_context> *src_val;
1378 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1379 {
1380 ipa_polymorphic_call_context cur = src_val->value;
1381
1382 if (!type_preserved)
1383 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1384 if (jfunc->type == IPA_JF_ANCESTOR)
1385 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1386 /* TODO: In cases we know how the context is going to be used,
1387 we can improve the result by passing proper OTR_TYPE. */
1388 cur.combine_with (edge_ctx);
1389 if (!cur.useless_p ())
1390 {
1391 if (src_lat->contains_variable
1392 && !edge_ctx.equal_to (cur))
1393 ret |= dest_lat->set_contains_variable ();
1394 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1395 added_sth = true;
1396 }
1397 }
1398
1399 }
1400
1401 prop_fail:
1402 if (!added_sth)
1403 {
1404 if (!edge_ctx.useless_p ())
1405 ret |= dest_lat->add_value (edge_ctx, cs);
1406 else
1407 ret |= dest_lat->set_contains_variable ();
1408 }
1409
1410 return ret;
1411 }
1412
1413 /* Propagate alignments across jump function JFUNC that is associated with
1414 edge CS and update DEST_LAT accordingly. */
1415
1416 static bool
1417 propagate_alignment_accross_jump_function (struct cgraph_edge *cs,
1418 struct ipa_jump_func *jfunc,
1419 struct ipcp_param_lattices *dest_lat)
1420 {
1421 if (alignment_bottom_p (dest_lat))
1422 return false;
1423
1424 ipa_alignment cur;
1425 cur.known = false;
1426 if (jfunc->alignment.known)
1427 cur = jfunc->alignment;
1428 else if (jfunc->type == IPA_JF_PASS_THROUGH
1429 || jfunc->type == IPA_JF_ANCESTOR)
1430 {
1431 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1432 struct ipcp_param_lattices *src_lats;
1433 HOST_WIDE_INT offset = 0;
1434 int src_idx;
1435
1436 if (jfunc->type == IPA_JF_PASS_THROUGH)
1437 {
1438 enum tree_code op = ipa_get_jf_pass_through_operation (jfunc);
1439 if (op != NOP_EXPR)
1440 {
1441 if (op != POINTER_PLUS_EXPR
1442 && op != PLUS_EXPR
1443 && op != MINUS_EXPR)
1444 goto prop_fail;
1445 tree operand = ipa_get_jf_pass_through_operand (jfunc);
1446 if (!tree_fits_shwi_p (operand))
1447 goto prop_fail;
1448 offset = tree_to_shwi (operand);
1449 }
1450 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1451 }
1452 else
1453 {
1454 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1455 offset = ipa_get_jf_ancestor_offset (jfunc);
1456 }
1457
1458 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
1459 if (!src_lats->alignment.known
1460 || alignment_bottom_p (src_lats))
1461 goto prop_fail;
1462
1463 cur = src_lats->alignment;
1464 cur.misalign = (cur.misalign + offset) % cur.align;
1465 }
1466
1467 if (cur.known)
1468 {
1469 if (!dest_lat->alignment.known)
1470 {
1471 dest_lat->alignment = cur;
1472 return true;
1473 }
1474 else if (dest_lat->alignment.align == cur.align
1475 && dest_lat->alignment.misalign == cur.misalign)
1476 return false;
1477 }
1478
1479 prop_fail:
1480 set_alignment_to_bottom (dest_lat);
1481 return true;
1482 }
1483
1484 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1485 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1486 other cases, return false). If there are no aggregate items, set
1487 aggs_by_ref to NEW_AGGS_BY_REF. */
1488
1489 static bool
1490 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1491 bool new_aggs_by_ref)
1492 {
1493 if (dest_plats->aggs)
1494 {
1495 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1496 {
1497 set_agg_lats_to_bottom (dest_plats);
1498 return true;
1499 }
1500 }
1501 else
1502 dest_plats->aggs_by_ref = new_aggs_by_ref;
1503 return false;
1504 }
1505
1506 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1507 already existing lattice for the given OFFSET and SIZE, marking all skipped
1508 lattices as containing variable and checking for overlaps. If there is no
1509 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1510 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1511 unless there are too many already. If there are two many, return false. If
1512 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1513 skipped lattices were newly marked as containing variable, set *CHANGE to
1514 true. */
1515
1516 static bool
1517 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1518 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1519 struct ipcp_agg_lattice ***aglat,
1520 bool pre_existing, bool *change)
1521 {
1522 gcc_checking_assert (offset >= 0);
1523
1524 while (**aglat && (**aglat)->offset < offset)
1525 {
1526 if ((**aglat)->offset + (**aglat)->size > offset)
1527 {
1528 set_agg_lats_to_bottom (dest_plats);
1529 return false;
1530 }
1531 *change |= (**aglat)->set_contains_variable ();
1532 *aglat = &(**aglat)->next;
1533 }
1534
1535 if (**aglat && (**aglat)->offset == offset)
1536 {
1537 if ((**aglat)->size != val_size
1538 || ((**aglat)->next
1539 && (**aglat)->next->offset < offset + val_size))
1540 {
1541 set_agg_lats_to_bottom (dest_plats);
1542 return false;
1543 }
1544 gcc_checking_assert (!(**aglat)->next
1545 || (**aglat)->next->offset >= offset + val_size);
1546 return true;
1547 }
1548 else
1549 {
1550 struct ipcp_agg_lattice *new_al;
1551
1552 if (**aglat && (**aglat)->offset < offset + val_size)
1553 {
1554 set_agg_lats_to_bottom (dest_plats);
1555 return false;
1556 }
1557 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1558 return false;
1559 dest_plats->aggs_count++;
1560 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1561 memset (new_al, 0, sizeof (*new_al));
1562
1563 new_al->offset = offset;
1564 new_al->size = val_size;
1565 new_al->contains_variable = pre_existing;
1566
1567 new_al->next = **aglat;
1568 **aglat = new_al;
1569 return true;
1570 }
1571 }
1572
1573 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1574 containing an unknown value. */
1575
1576 static bool
1577 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1578 {
1579 bool ret = false;
1580 while (aglat)
1581 {
1582 ret |= aglat->set_contains_variable ();
1583 aglat = aglat->next;
1584 }
1585 return ret;
1586 }
1587
1588 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1589 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1590 parameter used for lattice value sources. Return true if DEST_PLATS changed
1591 in any way. */
1592
1593 static bool
1594 merge_aggregate_lattices (struct cgraph_edge *cs,
1595 struct ipcp_param_lattices *dest_plats,
1596 struct ipcp_param_lattices *src_plats,
1597 int src_idx, HOST_WIDE_INT offset_delta)
1598 {
1599 bool pre_existing = dest_plats->aggs != NULL;
1600 struct ipcp_agg_lattice **dst_aglat;
1601 bool ret = false;
1602
1603 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1604 return true;
1605 if (src_plats->aggs_bottom)
1606 return set_agg_lats_contain_variable (dest_plats);
1607 if (src_plats->aggs_contain_variable)
1608 ret |= set_agg_lats_contain_variable (dest_plats);
1609 dst_aglat = &dest_plats->aggs;
1610
1611 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1612 src_aglat;
1613 src_aglat = src_aglat->next)
1614 {
1615 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1616
1617 if (new_offset < 0)
1618 continue;
1619 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1620 &dst_aglat, pre_existing, &ret))
1621 {
1622 struct ipcp_agg_lattice *new_al = *dst_aglat;
1623
1624 dst_aglat = &(*dst_aglat)->next;
1625 if (src_aglat->bottom)
1626 {
1627 ret |= new_al->set_contains_variable ();
1628 continue;
1629 }
1630 if (src_aglat->contains_variable)
1631 ret |= new_al->set_contains_variable ();
1632 for (ipcp_value<tree> *val = src_aglat->values;
1633 val;
1634 val = val->next)
1635 ret |= new_al->add_value (val->value, cs, val, src_idx,
1636 src_aglat->offset);
1637 }
1638 else if (dest_plats->aggs_bottom)
1639 return true;
1640 }
1641 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1642 return ret;
1643 }
1644
1645 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1646 pass-through JFUNC and if so, whether it has conform and conforms to the
1647 rules about propagating values passed by reference. */
1648
1649 static bool
1650 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1651 struct ipa_jump_func *jfunc)
1652 {
1653 return src_plats->aggs
1654 && (!src_plats->aggs_by_ref
1655 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1656 }
1657
1658 /* Propagate scalar values across jump function JFUNC that is associated with
1659 edge CS and put the values into DEST_LAT. */
1660
1661 static bool
1662 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1663 struct ipa_jump_func *jfunc,
1664 struct ipcp_param_lattices *dest_plats)
1665 {
1666 bool ret = false;
1667
1668 if (dest_plats->aggs_bottom)
1669 return false;
1670
1671 if (jfunc->type == IPA_JF_PASS_THROUGH
1672 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1673 {
1674 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1675 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1676 struct ipcp_param_lattices *src_plats;
1677
1678 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1679 if (agg_pass_through_permissible_p (src_plats, jfunc))
1680 {
1681 /* Currently we do not produce clobber aggregate jump
1682 functions, replace with merging when we do. */
1683 gcc_assert (!jfunc->agg.items);
1684 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1685 src_idx, 0);
1686 }
1687 else
1688 ret |= set_agg_lats_contain_variable (dest_plats);
1689 }
1690 else if (jfunc->type == IPA_JF_ANCESTOR
1691 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1692 {
1693 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1694 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1695 struct ipcp_param_lattices *src_plats;
1696
1697 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1698 if (src_plats->aggs && src_plats->aggs_by_ref)
1699 {
1700 /* Currently we do not produce clobber aggregate jump
1701 functions, replace with merging when we do. */
1702 gcc_assert (!jfunc->agg.items);
1703 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1704 ipa_get_jf_ancestor_offset (jfunc));
1705 }
1706 else if (!src_plats->aggs_by_ref)
1707 ret |= set_agg_lats_to_bottom (dest_plats);
1708 else
1709 ret |= set_agg_lats_contain_variable (dest_plats);
1710 }
1711 else if (jfunc->agg.items)
1712 {
1713 bool pre_existing = dest_plats->aggs != NULL;
1714 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1715 struct ipa_agg_jf_item *item;
1716 int i;
1717
1718 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1719 return true;
1720
1721 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1722 {
1723 HOST_WIDE_INT val_size;
1724
1725 if (item->offset < 0)
1726 continue;
1727 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1728 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1729
1730 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1731 &aglat, pre_existing, &ret))
1732 {
1733 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
1734 aglat = &(*aglat)->next;
1735 }
1736 else if (dest_plats->aggs_bottom)
1737 return true;
1738 }
1739
1740 ret |= set_chain_of_aglats_contains_variable (*aglat);
1741 }
1742 else
1743 ret |= set_agg_lats_contain_variable (dest_plats);
1744
1745 return ret;
1746 }
1747
1748 /* Propagate constants from the caller to the callee of CS. INFO describes the
1749 caller. */
1750
1751 static bool
1752 propagate_constants_accross_call (struct cgraph_edge *cs)
1753 {
1754 struct ipa_node_params *callee_info;
1755 enum availability availability;
1756 struct cgraph_node *callee, *alias_or_thunk;
1757 struct ipa_edge_args *args;
1758 bool ret = false;
1759 int i, args_count, parms_count;
1760
1761 callee = cs->callee->function_symbol (&availability);
1762 if (!callee->definition)
1763 return false;
1764 gcc_checking_assert (callee->has_gimple_body_p ());
1765 callee_info = IPA_NODE_REF (callee);
1766
1767 args = IPA_EDGE_REF (cs);
1768 args_count = ipa_get_cs_argument_count (args);
1769 parms_count = ipa_get_param_count (callee_info);
1770 if (parms_count == 0)
1771 return false;
1772
1773 /* No propagation through instrumentation thunks is available yet.
1774 It should be possible with proper mapping of call args and
1775 instrumented callee params in the propagation loop below. But
1776 this case mostly occurs when legacy code calls instrumented code
1777 and it is not a primary target for optimizations.
1778 We detect instrumentation thunks in aliases and thunks chain by
1779 checking instrumentation_clone flag for chain source and target.
1780 Going through instrumentation thunks we always have it changed
1781 from 0 to 1 and all other nodes do not change it. */
1782 if (!cs->callee->instrumentation_clone
1783 && callee->instrumentation_clone)
1784 {
1785 for (i = 0; i < parms_count; i++)
1786 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1787 i));
1788 return ret;
1789 }
1790
1791 /* If this call goes through a thunk we must not propagate to the first (0th)
1792 parameter. However, we might need to uncover a thunk from below a series
1793 of aliases first. */
1794 alias_or_thunk = cs->callee;
1795 while (alias_or_thunk->alias)
1796 alias_or_thunk = alias_or_thunk->get_alias_target ();
1797 if (alias_or_thunk->thunk.thunk_p)
1798 {
1799 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1800 0));
1801 i = 1;
1802 }
1803 else
1804 i = 0;
1805
1806 for (; (i < args_count) && (i < parms_count); i++)
1807 {
1808 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1809 struct ipcp_param_lattices *dest_plats;
1810
1811 dest_plats = ipa_get_parm_lattices (callee_info, i);
1812 if (availability == AVAIL_INTERPOSABLE)
1813 ret |= set_all_contains_variable (dest_plats);
1814 else
1815 {
1816 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1817 &dest_plats->itself);
1818 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
1819 &dest_plats->ctxlat);
1820 ret |= propagate_alignment_accross_jump_function (cs, jump_func,
1821 dest_plats);
1822 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1823 dest_plats);
1824 }
1825 }
1826 for (; i < parms_count; i++)
1827 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1828
1829 return ret;
1830 }
1831
1832 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1833 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
1834 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
1835
1836 static tree
1837 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1838 vec<tree> known_csts,
1839 vec<ipa_polymorphic_call_context> known_contexts,
1840 vec<ipa_agg_jump_function_p> known_aggs,
1841 struct ipa_agg_replacement_value *agg_reps,
1842 bool *speculative)
1843 {
1844 int param_index = ie->indirect_info->param_index;
1845 HOST_WIDE_INT anc_offset;
1846 tree t;
1847 tree target = NULL;
1848
1849 *speculative = false;
1850
1851 if (param_index == -1
1852 || known_csts.length () <= (unsigned int) param_index)
1853 return NULL_TREE;
1854
1855 if (!ie->indirect_info->polymorphic)
1856 {
1857 tree t;
1858
1859 if (ie->indirect_info->agg_contents)
1860 {
1861 if (agg_reps)
1862 {
1863 t = NULL;
1864 while (agg_reps)
1865 {
1866 if (agg_reps->index == param_index
1867 && agg_reps->offset == ie->indirect_info->offset
1868 && agg_reps->by_ref == ie->indirect_info->by_ref)
1869 {
1870 t = agg_reps->value;
1871 break;
1872 }
1873 agg_reps = agg_reps->next;
1874 }
1875 }
1876 else if (known_aggs.length () > (unsigned int) param_index)
1877 {
1878 struct ipa_agg_jump_function *agg;
1879 agg = known_aggs[param_index];
1880 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1881 ie->indirect_info->by_ref);
1882 }
1883 else
1884 t = NULL;
1885 }
1886 else
1887 t = known_csts[param_index];
1888
1889 if (t &&
1890 TREE_CODE (t) == ADDR_EXPR
1891 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1892 return TREE_OPERAND (t, 0);
1893 else
1894 return NULL_TREE;
1895 }
1896
1897 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
1898 return NULL_TREE;
1899
1900 gcc_assert (!ie->indirect_info->agg_contents);
1901 anc_offset = ie->indirect_info->offset;
1902
1903 t = NULL;
1904
1905 /* Try to work out value of virtual table pointer value in replacemnets. */
1906 if (!t && agg_reps && !ie->indirect_info->by_ref)
1907 {
1908 while (agg_reps)
1909 {
1910 if (agg_reps->index == param_index
1911 && agg_reps->offset == ie->indirect_info->offset
1912 && agg_reps->by_ref)
1913 {
1914 t = agg_reps->value;
1915 break;
1916 }
1917 agg_reps = agg_reps->next;
1918 }
1919 }
1920
1921 /* Try to work out value of virtual table pointer value in known
1922 aggregate values. */
1923 if (!t && known_aggs.length () > (unsigned int) param_index
1924 && !ie->indirect_info->by_ref)
1925 {
1926 struct ipa_agg_jump_function *agg;
1927 agg = known_aggs[param_index];
1928 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1929 true);
1930 }
1931
1932 /* If we found the virtual table pointer, lookup the target. */
1933 if (t)
1934 {
1935 tree vtable;
1936 unsigned HOST_WIDE_INT offset;
1937 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
1938 {
1939 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
1940 vtable, offset);
1941 if (target)
1942 {
1943 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
1944 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
1945 || !possible_polymorphic_call_target_p
1946 (ie, cgraph_node::get (target)))
1947 target = ipa_impossible_devirt_target (ie, target);
1948 *speculative = ie->indirect_info->vptr_changed;
1949 if (!*speculative)
1950 return target;
1951 }
1952 }
1953 }
1954
1955 /* Do we know the constant value of pointer? */
1956 if (!t)
1957 t = known_csts[param_index];
1958
1959 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
1960
1961 ipa_polymorphic_call_context context;
1962 if (known_contexts.length () > (unsigned int) param_index)
1963 {
1964 context = known_contexts[param_index];
1965 context.offset_by (anc_offset);
1966 if (ie->indirect_info->vptr_changed)
1967 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1968 ie->indirect_info->otr_type);
1969 if (t)
1970 {
1971 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
1972 (t, ie->indirect_info->otr_type, anc_offset);
1973 if (!ctx2.useless_p ())
1974 context.combine_with (ctx2, ie->indirect_info->otr_type);
1975 }
1976 }
1977 else if (t)
1978 {
1979 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
1980 anc_offset);
1981 if (ie->indirect_info->vptr_changed)
1982 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1983 ie->indirect_info->otr_type);
1984 }
1985 else
1986 return NULL_TREE;
1987
1988 vec <cgraph_node *>targets;
1989 bool final;
1990
1991 targets = possible_polymorphic_call_targets
1992 (ie->indirect_info->otr_type,
1993 ie->indirect_info->otr_token,
1994 context, &final);
1995 if (!final || targets.length () > 1)
1996 {
1997 struct cgraph_node *node;
1998 if (*speculative)
1999 return target;
2000 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2001 || ie->speculative || !ie->maybe_hot_p ())
2002 return NULL;
2003 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2004 ie->indirect_info->otr_token,
2005 context);
2006 if (node)
2007 {
2008 *speculative = true;
2009 target = node->decl;
2010 }
2011 else
2012 return NULL;
2013 }
2014 else
2015 {
2016 *speculative = false;
2017 if (targets.length () == 1)
2018 target = targets[0]->decl;
2019 else
2020 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2021 }
2022
2023 if (target && !possible_polymorphic_call_target_p (ie,
2024 cgraph_node::get (target)))
2025 target = ipa_impossible_devirt_target (ie, target);
2026
2027 return target;
2028 }
2029
2030
2031 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2032 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2033 return the destination. */
2034
2035 tree
2036 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2037 vec<tree> known_csts,
2038 vec<ipa_polymorphic_call_context> known_contexts,
2039 vec<ipa_agg_jump_function_p> known_aggs,
2040 bool *speculative)
2041 {
2042 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2043 known_aggs, NULL, speculative);
2044 }
2045
2046 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2047 and KNOWN_CONTEXTS. */
2048
2049 static int
2050 devirtualization_time_bonus (struct cgraph_node *node,
2051 vec<tree> known_csts,
2052 vec<ipa_polymorphic_call_context> known_contexts,
2053 vec<ipa_agg_jump_function_p> known_aggs)
2054 {
2055 struct cgraph_edge *ie;
2056 int res = 0;
2057
2058 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2059 {
2060 struct cgraph_node *callee;
2061 struct inline_summary *isummary;
2062 enum availability avail;
2063 tree target;
2064 bool speculative;
2065
2066 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2067 known_aggs, &speculative);
2068 if (!target)
2069 continue;
2070
2071 /* Only bare minimum benefit for clearly un-inlineable targets. */
2072 res += 1;
2073 callee = cgraph_node::get (target);
2074 if (!callee || !callee->definition)
2075 continue;
2076 callee = callee->function_symbol (&avail);
2077 if (avail < AVAIL_AVAILABLE)
2078 continue;
2079 isummary = inline_summaries->get (callee);
2080 if (!isummary->inlinable)
2081 continue;
2082
2083 /* FIXME: The values below need re-considering and perhaps also
2084 integrating into the cost metrics, at lest in some very basic way. */
2085 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2086 res += 31 / ((int)speculative + 1);
2087 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2088 res += 15 / ((int)speculative + 1);
2089 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2090 || DECL_DECLARED_INLINE_P (callee->decl))
2091 res += 7 / ((int)speculative + 1);
2092 }
2093
2094 return res;
2095 }
2096
2097 /* Return time bonus incurred because of HINTS. */
2098
2099 static int
2100 hint_time_bonus (inline_hints hints)
2101 {
2102 int result = 0;
2103 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2104 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2105 if (hints & INLINE_HINT_array_index)
2106 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2107 return result;
2108 }
2109
2110 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2111 and SIZE_COST and with the sum of frequencies of incoming edges to the
2112 potential new clone in FREQUENCIES. */
2113
2114 static bool
2115 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2116 int freq_sum, gcov_type count_sum, int size_cost)
2117 {
2118 if (time_benefit == 0
2119 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2120 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
2121 return false;
2122
2123 gcc_assert (size_cost > 0);
2124
2125 if (max_count)
2126 {
2127 int factor = (count_sum * 1000) / max_count;
2128 int64_t evaluation = (((int64_t) time_benefit * factor)
2129 / size_cost);
2130
2131 if (dump_file && (dump_flags & TDF_DETAILS))
2132 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2133 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
2134 ") -> evaluation: " "%"PRId64
2135 ", threshold: %i\n",
2136 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
2137 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2138
2139 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2140 }
2141 else
2142 {
2143 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2144 / size_cost);
2145
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2148 "size: %i, freq_sum: %i) -> evaluation: "
2149 "%"PRId64 ", threshold: %i\n",
2150 time_benefit, size_cost, freq_sum, evaluation,
2151 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2152
2153 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2154 }
2155 }
2156
2157 /* Return all context independent values from aggregate lattices in PLATS in a
2158 vector. Return NULL if there are none. */
2159
2160 static vec<ipa_agg_jf_item, va_gc> *
2161 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2162 {
2163 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2164
2165 if (plats->aggs_bottom
2166 || plats->aggs_contain_variable
2167 || plats->aggs_count == 0)
2168 return NULL;
2169
2170 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2171 aglat;
2172 aglat = aglat->next)
2173 if (aglat->is_single_const ())
2174 {
2175 struct ipa_agg_jf_item item;
2176 item.offset = aglat->offset;
2177 item.value = aglat->values->value;
2178 vec_safe_push (res, item);
2179 }
2180 return res;
2181 }
2182
2183 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2184 populate them with values of parameters that are known independent of the
2185 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2186 non-NULL, the movement cost of all removable parameters will be stored in
2187 it. */
2188
2189 static bool
2190 gather_context_independent_values (struct ipa_node_params *info,
2191 vec<tree> *known_csts,
2192 vec<ipa_polymorphic_call_context>
2193 *known_contexts,
2194 vec<ipa_agg_jump_function> *known_aggs,
2195 int *removable_params_cost)
2196 {
2197 int i, count = ipa_get_param_count (info);
2198 bool ret = false;
2199
2200 known_csts->create (0);
2201 known_contexts->create (0);
2202 known_csts->safe_grow_cleared (count);
2203 known_contexts->safe_grow_cleared (count);
2204 if (known_aggs)
2205 {
2206 known_aggs->create (0);
2207 known_aggs->safe_grow_cleared (count);
2208 }
2209
2210 if (removable_params_cost)
2211 *removable_params_cost = 0;
2212
2213 for (i = 0; i < count ; i++)
2214 {
2215 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2216 ipcp_lattice<tree> *lat = &plats->itself;
2217
2218 if (lat->is_single_const ())
2219 {
2220 ipcp_value<tree> *val = lat->values;
2221 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2222 (*known_csts)[i] = val->value;
2223 if (removable_params_cost)
2224 *removable_params_cost
2225 += estimate_move_cost (TREE_TYPE (val->value), false);
2226 ret = true;
2227 }
2228 else if (removable_params_cost
2229 && !ipa_is_param_used (info, i))
2230 *removable_params_cost
2231 += ipa_get_param_move_cost (info, i);
2232
2233 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2234 if (ctxlat->is_single_const ())
2235 {
2236 (*known_contexts)[i] = ctxlat->values->value;
2237 ret = true;
2238 }
2239
2240 if (known_aggs)
2241 {
2242 vec<ipa_agg_jf_item, va_gc> *agg_items;
2243 struct ipa_agg_jump_function *ajf;
2244
2245 agg_items = context_independent_aggregate_values (plats);
2246 ajf = &(*known_aggs)[i];
2247 ajf->items = agg_items;
2248 ajf->by_ref = plats->aggs_by_ref;
2249 ret |= agg_items != NULL;
2250 }
2251 }
2252
2253 return ret;
2254 }
2255
2256 /* The current interface in ipa-inline-analysis requires a pointer vector.
2257 Create it.
2258
2259 FIXME: That interface should be re-worked, this is slightly silly. Still,
2260 I'd like to discuss how to change it first and this demonstrates the
2261 issue. */
2262
2263 static vec<ipa_agg_jump_function_p>
2264 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2265 {
2266 vec<ipa_agg_jump_function_p> ret;
2267 struct ipa_agg_jump_function *ajf;
2268 int i;
2269
2270 ret.create (known_aggs.length ());
2271 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2272 ret.quick_push (ajf);
2273 return ret;
2274 }
2275
2276 /* Perform time and size measurement of NODE with the context given in
2277 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2278 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2279 all context-independent removable parameters and EST_MOVE_COST of estimated
2280 movement of the considered parameter and store it into VAL. */
2281
2282 static void
2283 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2284 vec<ipa_polymorphic_call_context> known_contexts,
2285 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2286 int base_time, int removable_params_cost,
2287 int est_move_cost, ipcp_value_base *val)
2288 {
2289 int time, size, time_benefit;
2290 inline_hints hints;
2291
2292 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2293 known_aggs_ptrs, &size, &time,
2294 &hints);
2295 time_benefit = base_time - time
2296 + devirtualization_time_bonus (node, known_csts, known_contexts,
2297 known_aggs_ptrs)
2298 + hint_time_bonus (hints)
2299 + removable_params_cost + est_move_cost;
2300
2301 gcc_checking_assert (size >=0);
2302 /* The inliner-heuristics based estimates may think that in certain
2303 contexts some functions do not have any size at all but we want
2304 all specializations to have at least a tiny cost, not least not to
2305 divide by zero. */
2306 if (size == 0)
2307 size = 1;
2308
2309 val->local_time_benefit = time_benefit;
2310 val->local_size_cost = size;
2311 }
2312
2313 /* Iterate over known values of parameters of NODE and estimate the local
2314 effects in terms of time and size they have. */
2315
2316 static void
2317 estimate_local_effects (struct cgraph_node *node)
2318 {
2319 struct ipa_node_params *info = IPA_NODE_REF (node);
2320 int i, count = ipa_get_param_count (info);
2321 vec<tree> known_csts;
2322 vec<ipa_polymorphic_call_context> known_contexts;
2323 vec<ipa_agg_jump_function> known_aggs;
2324 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2325 bool always_const;
2326 int base_time = inline_summaries->get (node)->time;
2327 int removable_params_cost;
2328
2329 if (!count || !ipcp_versionable_function_p (node))
2330 return;
2331
2332 if (dump_file && (dump_flags & TDF_DETAILS))
2333 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2334 node->name (), node->order, base_time);
2335
2336 always_const = gather_context_independent_values (info, &known_csts,
2337 &known_contexts, &known_aggs,
2338 &removable_params_cost);
2339 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2340 if (always_const)
2341 {
2342 struct caller_statistics stats;
2343 inline_hints hints;
2344 int time, size;
2345
2346 init_caller_stats (&stats);
2347 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2348 false);
2349 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2350 known_aggs_ptrs, &size, &time, &hints);
2351 time -= devirtualization_time_bonus (node, known_csts, known_contexts,
2352 known_aggs_ptrs);
2353 time -= hint_time_bonus (hints);
2354 time -= removable_params_cost;
2355 size -= stats.n_calls * removable_params_cost;
2356
2357 if (dump_file)
2358 fprintf (dump_file, " - context independent values, size: %i, "
2359 "time_benefit: %i\n", size, base_time - time);
2360
2361 if (size <= 0
2362 || node->will_be_removed_from_program_if_no_direct_calls_p ())
2363 {
2364 info->do_clone_for_all_contexts = true;
2365 base_time = time;
2366
2367 if (dump_file)
2368 fprintf (dump_file, " Decided to specialize for all "
2369 "known contexts, code not going to grow.\n");
2370 }
2371 else if (good_cloning_opportunity_p (node, base_time - time,
2372 stats.freq_sum, stats.count_sum,
2373 size))
2374 {
2375 if (size + overall_size <= max_new_size)
2376 {
2377 info->do_clone_for_all_contexts = true;
2378 base_time = time;
2379 overall_size += size;
2380
2381 if (dump_file)
2382 fprintf (dump_file, " Decided to specialize for all "
2383 "known contexts, growth deemed beneficial.\n");
2384 }
2385 else if (dump_file && (dump_flags & TDF_DETAILS))
2386 fprintf (dump_file, " Not cloning for all contexts because "
2387 "max_new_size would be reached with %li.\n",
2388 size + overall_size);
2389 }
2390 }
2391
2392 for (i = 0; i < count ; i++)
2393 {
2394 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2395 ipcp_lattice<tree> *lat = &plats->itself;
2396 ipcp_value<tree> *val;
2397
2398 if (lat->bottom
2399 || !lat->values
2400 || known_csts[i])
2401 continue;
2402
2403 for (val = lat->values; val; val = val->next)
2404 {
2405 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2406 known_csts[i] = val->value;
2407
2408 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2409 perform_estimation_of_a_value (node, known_csts, known_contexts,
2410 known_aggs_ptrs, base_time,
2411 removable_params_cost, emc, val);
2412
2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 {
2415 fprintf (dump_file, " - estimates for value ");
2416 print_ipcp_constant_value (dump_file, val->value);
2417 fprintf (dump_file, " for ");
2418 ipa_dump_param (dump_file, info, i);
2419 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2420 val->local_time_benefit, val->local_size_cost);
2421 }
2422 }
2423 known_csts[i] = NULL_TREE;
2424 }
2425
2426 for (i = 0; i < count; i++)
2427 {
2428 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2429
2430 if (!plats->virt_call)
2431 continue;
2432
2433 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2434 ipcp_value<ipa_polymorphic_call_context> *val;
2435
2436 if (ctxlat->bottom
2437 || !ctxlat->values
2438 || !known_contexts[i].useless_p ())
2439 continue;
2440
2441 for (val = ctxlat->values; val; val = val->next)
2442 {
2443 known_contexts[i] = val->value;
2444 perform_estimation_of_a_value (node, known_csts, known_contexts,
2445 known_aggs_ptrs, base_time,
2446 removable_params_cost, 0, val);
2447
2448 if (dump_file && (dump_flags & TDF_DETAILS))
2449 {
2450 fprintf (dump_file, " - estimates for polymorphic context ");
2451 print_ipcp_constant_value (dump_file, val->value);
2452 fprintf (dump_file, " for ");
2453 ipa_dump_param (dump_file, info, i);
2454 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2455 val->local_time_benefit, val->local_size_cost);
2456 }
2457 }
2458 known_contexts[i] = ipa_polymorphic_call_context ();
2459 }
2460
2461 for (i = 0; i < count ; i++)
2462 {
2463 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2464 struct ipa_agg_jump_function *ajf;
2465 struct ipcp_agg_lattice *aglat;
2466
2467 if (plats->aggs_bottom || !plats->aggs)
2468 continue;
2469
2470 ajf = &known_aggs[i];
2471 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2472 {
2473 ipcp_value<tree> *val;
2474 if (aglat->bottom || !aglat->values
2475 /* If the following is true, the one value is in known_aggs. */
2476 || (!plats->aggs_contain_variable
2477 && aglat->is_single_const ()))
2478 continue;
2479
2480 for (val = aglat->values; val; val = val->next)
2481 {
2482 struct ipa_agg_jf_item item;
2483
2484 item.offset = aglat->offset;
2485 item.value = val->value;
2486 vec_safe_push (ajf->items, item);
2487
2488 perform_estimation_of_a_value (node, known_csts, known_contexts,
2489 known_aggs_ptrs, base_time,
2490 removable_params_cost, 0, val);
2491
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2493 {
2494 fprintf (dump_file, " - estimates for value ");
2495 print_ipcp_constant_value (dump_file, val->value);
2496 fprintf (dump_file, " for ");
2497 ipa_dump_param (dump_file, info, i);
2498 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2499 "]: time_benefit: %i, size: %i\n",
2500 plats->aggs_by_ref ? "ref " : "",
2501 aglat->offset,
2502 val->local_time_benefit, val->local_size_cost);
2503 }
2504
2505 ajf->items->pop ();
2506 }
2507 }
2508 }
2509
2510 for (i = 0; i < count ; i++)
2511 vec_free (known_aggs[i].items);
2512
2513 known_csts.release ();
2514 known_contexts.release ();
2515 known_aggs.release ();
2516 known_aggs_ptrs.release ();
2517 }
2518
2519
2520 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2521 topological sort of values. */
2522
2523 template <typename valtype>
2524 void
2525 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
2526 {
2527 ipcp_value_source<valtype> *src;
2528
2529 if (cur_val->dfs)
2530 return;
2531
2532 dfs_counter++;
2533 cur_val->dfs = dfs_counter;
2534 cur_val->low_link = dfs_counter;
2535
2536 cur_val->topo_next = stack;
2537 stack = cur_val;
2538 cur_val->on_stack = true;
2539
2540 for (src = cur_val->sources; src; src = src->next)
2541 if (src->val)
2542 {
2543 if (src->val->dfs == 0)
2544 {
2545 add_val (src->val);
2546 if (src->val->low_link < cur_val->low_link)
2547 cur_val->low_link = src->val->low_link;
2548 }
2549 else if (src->val->on_stack
2550 && src->val->dfs < cur_val->low_link)
2551 cur_val->low_link = src->val->dfs;
2552 }
2553
2554 if (cur_val->dfs == cur_val->low_link)
2555 {
2556 ipcp_value<valtype> *v, *scc_list = NULL;
2557
2558 do
2559 {
2560 v = stack;
2561 stack = v->topo_next;
2562 v->on_stack = false;
2563
2564 v->scc_next = scc_list;
2565 scc_list = v;
2566 }
2567 while (v != cur_val);
2568
2569 cur_val->topo_next = values_topo;
2570 values_topo = cur_val;
2571 }
2572 }
2573
2574 /* Add all values in lattices associated with NODE to the topological sort if
2575 they are not there yet. */
2576
2577 static void
2578 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
2579 {
2580 struct ipa_node_params *info = IPA_NODE_REF (node);
2581 int i, count = ipa_get_param_count (info);
2582
2583 for (i = 0; i < count ; i++)
2584 {
2585 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2586 ipcp_lattice<tree> *lat = &plats->itself;
2587 struct ipcp_agg_lattice *aglat;
2588
2589 if (!lat->bottom)
2590 {
2591 ipcp_value<tree> *val;
2592 for (val = lat->values; val; val = val->next)
2593 topo->constants.add_val (val);
2594 }
2595
2596 if (!plats->aggs_bottom)
2597 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2598 if (!aglat->bottom)
2599 {
2600 ipcp_value<tree> *val;
2601 for (val = aglat->values; val; val = val->next)
2602 topo->constants.add_val (val);
2603 }
2604
2605 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2606 if (!ctxlat->bottom)
2607 {
2608 ipcp_value<ipa_polymorphic_call_context> *ctxval;
2609 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
2610 topo->contexts.add_val (ctxval);
2611 }
2612 }
2613 }
2614
2615 /* One pass of constants propagation along the call graph edges, from callers
2616 to callees (requires topological ordering in TOPO), iterate over strongly
2617 connected components. */
2618
2619 static void
2620 propagate_constants_topo (struct ipa_topo_info *topo)
2621 {
2622 int i;
2623
2624 for (i = topo->nnodes - 1; i >= 0; i--)
2625 {
2626 unsigned j;
2627 struct cgraph_node *v, *node = topo->order[i];
2628 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
2629
2630 /* First, iteratively propagate within the strongly connected component
2631 until all lattices stabilize. */
2632 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2633 if (v->has_gimple_body_p ())
2634 push_node_to_stack (topo, v);
2635
2636 v = pop_node_from_stack (topo);
2637 while (v)
2638 {
2639 struct cgraph_edge *cs;
2640
2641 for (cs = v->callees; cs; cs = cs->next_callee)
2642 if (ipa_edge_within_scc (cs)
2643 && propagate_constants_accross_call (cs))
2644 push_node_to_stack (topo, cs->callee);
2645 v = pop_node_from_stack (topo);
2646 }
2647
2648 /* Afterwards, propagate along edges leading out of the SCC, calculates
2649 the local effects of the discovered constants and all valid values to
2650 their topological sort. */
2651 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2652 if (v->has_gimple_body_p ())
2653 {
2654 struct cgraph_edge *cs;
2655
2656 estimate_local_effects (v);
2657 add_all_node_vals_to_toposort (v, topo);
2658 for (cs = v->callees; cs; cs = cs->next_callee)
2659 if (!ipa_edge_within_scc (cs))
2660 propagate_constants_accross_call (cs);
2661 }
2662 cycle_nodes.release ();
2663 }
2664 }
2665
2666
2667 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2668 the bigger one if otherwise. */
2669
2670 static int
2671 safe_add (int a, int b)
2672 {
2673 if (a > INT_MAX/2 || b > INT_MAX/2)
2674 return a > b ? a : b;
2675 else
2676 return a + b;
2677 }
2678
2679
2680 /* Propagate the estimated effects of individual values along the topological
2681 from the dependent values to those they depend on. */
2682
2683 template <typename valtype>
2684 void
2685 value_topo_info<valtype>::propagate_effects ()
2686 {
2687 ipcp_value<valtype> *base;
2688
2689 for (base = values_topo; base; base = base->topo_next)
2690 {
2691 ipcp_value_source<valtype> *src;
2692 ipcp_value<valtype> *val;
2693 int time = 0, size = 0;
2694
2695 for (val = base; val; val = val->scc_next)
2696 {
2697 time = safe_add (time,
2698 val->local_time_benefit + val->prop_time_benefit);
2699 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2700 }
2701
2702 for (val = base; val; val = val->scc_next)
2703 for (src = val->sources; src; src = src->next)
2704 if (src->val
2705 && src->cs->maybe_hot_p ())
2706 {
2707 src->val->prop_time_benefit = safe_add (time,
2708 src->val->prop_time_benefit);
2709 src->val->prop_size_cost = safe_add (size,
2710 src->val->prop_size_cost);
2711 }
2712 }
2713 }
2714
2715
2716 /* Propagate constants, polymorphic contexts and their effects from the
2717 summaries interprocedurally. */
2718
2719 static void
2720 ipcp_propagate_stage (struct ipa_topo_info *topo)
2721 {
2722 struct cgraph_node *node;
2723
2724 if (dump_file)
2725 fprintf (dump_file, "\n Propagating constants:\n\n");
2726
2727 if (in_lto_p)
2728 ipa_update_after_lto_read ();
2729
2730
2731 FOR_EACH_DEFINED_FUNCTION (node)
2732 {
2733 struct ipa_node_params *info = IPA_NODE_REF (node);
2734
2735 determine_versionability (node);
2736 if (node->has_gimple_body_p ())
2737 {
2738 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2739 ipa_get_param_count (info));
2740 initialize_node_lattices (node);
2741 }
2742 if (node->definition && !node->alias)
2743 overall_size += inline_summaries->get (node)->self_size;
2744 if (node->count > max_count)
2745 max_count = node->count;
2746 }
2747
2748 max_new_size = overall_size;
2749 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2750 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2751 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2752
2753 if (dump_file)
2754 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2755 overall_size, max_new_size);
2756
2757 propagate_constants_topo (topo);
2758 #ifdef ENABLE_CHECKING
2759 ipcp_verify_propagated_values ();
2760 #endif
2761 topo->constants.propagate_effects ();
2762 topo->contexts.propagate_effects ();
2763
2764 if (dump_file)
2765 {
2766 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2767 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2768 }
2769 }
2770
2771 /* Discover newly direct outgoing edges from NODE which is a new clone with
2772 known KNOWN_CSTS and make them direct. */
2773
2774 static void
2775 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2776 vec<tree> known_csts,
2777 vec<ipa_polymorphic_call_context>
2778 known_contexts,
2779 struct ipa_agg_replacement_value *aggvals)
2780 {
2781 struct cgraph_edge *ie, *next_ie;
2782 bool found = false;
2783
2784 for (ie = node->indirect_calls; ie; ie = next_ie)
2785 {
2786 tree target;
2787 bool speculative;
2788
2789 next_ie = ie->next_callee;
2790 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2791 vNULL, aggvals, &speculative);
2792 if (target)
2793 {
2794 bool agg_contents = ie->indirect_info->agg_contents;
2795 bool polymorphic = ie->indirect_info->polymorphic;
2796 int param_index = ie->indirect_info->param_index;
2797 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
2798 speculative);
2799 found = true;
2800
2801 if (cs && !agg_contents && !polymorphic)
2802 {
2803 struct ipa_node_params *info = IPA_NODE_REF (node);
2804 int c = ipa_get_controlled_uses (info, param_index);
2805 if (c != IPA_UNDESCRIBED_USE)
2806 {
2807 struct ipa_ref *to_del;
2808
2809 c--;
2810 ipa_set_controlled_uses (info, param_index, c);
2811 if (dump_file && (dump_flags & TDF_DETAILS))
2812 fprintf (dump_file, " controlled uses count of param "
2813 "%i bumped down to %i\n", param_index, c);
2814 if (c == 0
2815 && (to_del = node->find_reference (cs->callee, NULL, 0)))
2816 {
2817 if (dump_file && (dump_flags & TDF_DETAILS))
2818 fprintf (dump_file, " and even removing its "
2819 "cloning-created reference\n");
2820 to_del->remove_reference ();
2821 }
2822 }
2823 }
2824 }
2825 }
2826 /* Turning calls to direct calls will improve overall summary. */
2827 if (found)
2828 inline_update_overall_summary (node);
2829 }
2830
2831 /* Vector of pointers which for linked lists of clones of an original crgaph
2832 edge. */
2833
2834 static vec<cgraph_edge *> next_edge_clone;
2835 static vec<cgraph_edge *> prev_edge_clone;
2836
2837 static inline void
2838 grow_edge_clone_vectors (void)
2839 {
2840 if (next_edge_clone.length ()
2841 <= (unsigned) symtab->edges_max_uid)
2842 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2843 if (prev_edge_clone.length ()
2844 <= (unsigned) symtab->edges_max_uid)
2845 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2846 }
2847
2848 /* Edge duplication hook to grow the appropriate linked list in
2849 next_edge_clone. */
2850
2851 static void
2852 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2853 void *)
2854 {
2855 grow_edge_clone_vectors ();
2856
2857 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2858 if (old_next)
2859 prev_edge_clone[old_next->uid] = dst;
2860 prev_edge_clone[dst->uid] = src;
2861
2862 next_edge_clone[dst->uid] = old_next;
2863 next_edge_clone[src->uid] = dst;
2864 }
2865
2866 /* Hook that is called by cgraph.c when an edge is removed. */
2867
2868 static void
2869 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2870 {
2871 grow_edge_clone_vectors ();
2872
2873 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2874 struct cgraph_edge *next = next_edge_clone[cs->uid];
2875 if (prev)
2876 next_edge_clone[prev->uid] = next;
2877 if (next)
2878 prev_edge_clone[next->uid] = prev;
2879 }
2880
2881 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2882 parameter with the given INDEX. */
2883
2884 static tree
2885 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
2886 int index)
2887 {
2888 struct ipa_agg_replacement_value *aggval;
2889
2890 aggval = ipa_get_agg_replacements_for_node (node);
2891 while (aggval)
2892 {
2893 if (aggval->offset == offset
2894 && aggval->index == index)
2895 return aggval->value;
2896 aggval = aggval->next;
2897 }
2898 return NULL_TREE;
2899 }
2900
2901 /* Return true is NODE is DEST or its clone for all contexts. */
2902
2903 static bool
2904 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
2905 {
2906 if (node == dest)
2907 return true;
2908
2909 struct ipa_node_params *info = IPA_NODE_REF (node);
2910 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
2911 }
2912
2913 /* Return true if edge CS does bring about the value described by SRC to node
2914 DEST or its clone for all contexts. */
2915
2916 static bool
2917 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
2918 cgraph_node *dest)
2919 {
2920 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2921 enum availability availability;
2922 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
2923
2924 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2925 || availability <= AVAIL_INTERPOSABLE
2926 || caller_info->node_dead)
2927 return false;
2928 if (!src->val)
2929 return true;
2930
2931 if (caller_info->ipcp_orig_node)
2932 {
2933 tree t;
2934 if (src->offset == -1)
2935 t = caller_info->known_csts[src->index];
2936 else
2937 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2938 return (t != NULL_TREE
2939 && values_equal_for_ipcp_p (src->val->value, t));
2940 }
2941 else
2942 {
2943 struct ipcp_agg_lattice *aglat;
2944 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2945 src->index);
2946 if (src->offset == -1)
2947 return (plats->itself.is_single_const ()
2948 && values_equal_for_ipcp_p (src->val->value,
2949 plats->itself.values->value));
2950 else
2951 {
2952 if (plats->aggs_bottom || plats->aggs_contain_variable)
2953 return false;
2954 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2955 if (aglat->offset == src->offset)
2956 return (aglat->is_single_const ()
2957 && values_equal_for_ipcp_p (src->val->value,
2958 aglat->values->value));
2959 }
2960 return false;
2961 }
2962 }
2963
2964 /* Return true if edge CS does bring about the value described by SRC to node
2965 DEST or its clone for all contexts. */
2966
2967 static bool
2968 cgraph_edge_brings_value_p (cgraph_edge *cs,
2969 ipcp_value_source<ipa_polymorphic_call_context> *src,
2970 cgraph_node *dest)
2971 {
2972 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2973 cgraph_node *real_dest = cs->callee->function_symbol ();
2974
2975 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2976 || caller_info->node_dead)
2977 return false;
2978 if (!src->val)
2979 return true;
2980
2981 if (caller_info->ipcp_orig_node)
2982 return (caller_info->known_contexts.length () > (unsigned) src->index)
2983 && values_equal_for_ipcp_p (src->val->value,
2984 caller_info->known_contexts[src->index]);
2985
2986 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2987 src->index);
2988 return plats->ctxlat.is_single_const ()
2989 && values_equal_for_ipcp_p (src->val->value,
2990 plats->ctxlat.values->value);
2991 }
2992
2993 /* Get the next clone in the linked list of clones of an edge. */
2994
2995 static inline struct cgraph_edge *
2996 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2997 {
2998 return next_edge_clone[cs->uid];
2999 }
3000
3001 /* Given VAL that is intended for DEST, iterate over all its sources and if
3002 they still hold, add their edge frequency and their number into *FREQUENCY
3003 and *CALLER_COUNT respectively. */
3004
3005 template <typename valtype>
3006 static bool
3007 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3008 int *freq_sum,
3009 gcov_type *count_sum, int *caller_count)
3010 {
3011 ipcp_value_source<valtype> *src;
3012 int freq = 0, count = 0;
3013 gcov_type cnt = 0;
3014 bool hot = false;
3015
3016 for (src = val->sources; src; src = src->next)
3017 {
3018 struct cgraph_edge *cs = src->cs;
3019 while (cs)
3020 {
3021 if (cgraph_edge_brings_value_p (cs, src, dest))
3022 {
3023 count++;
3024 freq += cs->frequency;
3025 cnt += cs->count;
3026 hot |= cs->maybe_hot_p ();
3027 }
3028 cs = get_next_cgraph_edge_clone (cs);
3029 }
3030 }
3031
3032 *freq_sum = freq;
3033 *count_sum = cnt;
3034 *caller_count = count;
3035 return hot;
3036 }
3037
3038 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3039 is assumed their number is known and equal to CALLER_COUNT. */
3040
3041 template <typename valtype>
3042 static vec<cgraph_edge *>
3043 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3044 int caller_count)
3045 {
3046 ipcp_value_source<valtype> *src;
3047 vec<cgraph_edge *> ret;
3048
3049 ret.create (caller_count);
3050 for (src = val->sources; src; src = src->next)
3051 {
3052 struct cgraph_edge *cs = src->cs;
3053 while (cs)
3054 {
3055 if (cgraph_edge_brings_value_p (cs, src, dest))
3056 ret.quick_push (cs);
3057 cs = get_next_cgraph_edge_clone (cs);
3058 }
3059 }
3060
3061 return ret;
3062 }
3063
3064 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3065 Return it or NULL if for some reason it cannot be created. */
3066
3067 static struct ipa_replace_map *
3068 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3069 {
3070 struct ipa_replace_map *replace_map;
3071
3072
3073 replace_map = ggc_alloc<ipa_replace_map> ();
3074 if (dump_file)
3075 {
3076 fprintf (dump_file, " replacing ");
3077 ipa_dump_param (dump_file, info, parm_num);
3078
3079 fprintf (dump_file, " with const ");
3080 print_generic_expr (dump_file, value, 0);
3081 fprintf (dump_file, "\n");
3082 }
3083 replace_map->old_tree = NULL;
3084 replace_map->parm_num = parm_num;
3085 replace_map->new_tree = value;
3086 replace_map->replace_p = true;
3087 replace_map->ref_p = false;
3088
3089 return replace_map;
3090 }
3091
3092 /* Dump new profiling counts */
3093
3094 static void
3095 dump_profile_updates (struct cgraph_node *orig_node,
3096 struct cgraph_node *new_node)
3097 {
3098 struct cgraph_edge *cs;
3099
3100 fprintf (dump_file, " setting count of the specialized node to "
3101 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
3102 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3103 fprintf (dump_file, " edge to %s has count "
3104 HOST_WIDE_INT_PRINT_DEC "\n",
3105 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3106
3107 fprintf (dump_file, " setting count of the original node to "
3108 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
3109 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3110 fprintf (dump_file, " edge to %s is left with "
3111 HOST_WIDE_INT_PRINT_DEC "\n",
3112 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3113 }
3114
3115 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3116 their profile information to reflect this. */
3117
3118 static void
3119 update_profiling_info (struct cgraph_node *orig_node,
3120 struct cgraph_node *new_node)
3121 {
3122 struct cgraph_edge *cs;
3123 struct caller_statistics stats;
3124 gcov_type new_sum, orig_sum;
3125 gcov_type remainder, orig_node_count = orig_node->count;
3126
3127 if (orig_node_count == 0)
3128 return;
3129
3130 init_caller_stats (&stats);
3131 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3132 false);
3133 orig_sum = stats.count_sum;
3134 init_caller_stats (&stats);
3135 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3136 false);
3137 new_sum = stats.count_sum;
3138
3139 if (orig_node_count < orig_sum + new_sum)
3140 {
3141 if (dump_file)
3142 fprintf (dump_file, " Problem: node %s/%i has too low count "
3143 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
3144 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
3145 orig_node->name (), orig_node->order,
3146 (HOST_WIDE_INT) orig_node_count,
3147 (HOST_WIDE_INT) (orig_sum + new_sum));
3148
3149 orig_node_count = (orig_sum + new_sum) * 12 / 10;
3150 if (dump_file)
3151 fprintf (dump_file, " proceeding by pretending it was "
3152 HOST_WIDE_INT_PRINT_DEC "\n",
3153 (HOST_WIDE_INT) orig_node_count);
3154 }
3155
3156 new_node->count = new_sum;
3157 remainder = orig_node_count - new_sum;
3158 orig_node->count = remainder;
3159
3160 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3161 if (cs->frequency)
3162 cs->count = apply_probability (cs->count,
3163 GCOV_COMPUTE_SCALE (new_sum,
3164 orig_node_count));
3165 else
3166 cs->count = 0;
3167
3168 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3169 cs->count = apply_probability (cs->count,
3170 GCOV_COMPUTE_SCALE (remainder,
3171 orig_node_count));
3172
3173 if (dump_file)
3174 dump_profile_updates (orig_node, new_node);
3175 }
3176
3177 /* Update the respective profile of specialized NEW_NODE and the original
3178 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3179 have been redirected to the specialized version. */
3180
3181 static void
3182 update_specialized_profile (struct cgraph_node *new_node,
3183 struct cgraph_node *orig_node,
3184 gcov_type redirected_sum)
3185 {
3186 struct cgraph_edge *cs;
3187 gcov_type new_node_count, orig_node_count = orig_node->count;
3188
3189 if (dump_file)
3190 fprintf (dump_file, " the sum of counts of redirected edges is "
3191 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3192 if (orig_node_count == 0)
3193 return;
3194
3195 gcc_assert (orig_node_count >= redirected_sum);
3196
3197 new_node_count = new_node->count;
3198 new_node->count += redirected_sum;
3199 orig_node->count -= redirected_sum;
3200
3201 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3202 if (cs->frequency)
3203 cs->count += apply_probability (cs->count,
3204 GCOV_COMPUTE_SCALE (redirected_sum,
3205 new_node_count));
3206 else
3207 cs->count = 0;
3208
3209 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3210 {
3211 gcov_type dec = apply_probability (cs->count,
3212 GCOV_COMPUTE_SCALE (redirected_sum,
3213 orig_node_count));
3214 if (dec < cs->count)
3215 cs->count -= dec;
3216 else
3217 cs->count = 0;
3218 }
3219
3220 if (dump_file)
3221 dump_profile_updates (orig_node, new_node);
3222 }
3223
3224 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3225 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3226 redirect all edges in CALLERS to it. */
3227
3228 static struct cgraph_node *
3229 create_specialized_node (struct cgraph_node *node,
3230 vec<tree> known_csts,
3231 vec<ipa_polymorphic_call_context> known_contexts,
3232 struct ipa_agg_replacement_value *aggvals,
3233 vec<cgraph_edge *> callers)
3234 {
3235 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3236 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3237 struct ipa_agg_replacement_value *av;
3238 struct cgraph_node *new_node;
3239 int i, count = ipa_get_param_count (info);
3240 bitmap args_to_skip;
3241
3242 gcc_assert (!info->ipcp_orig_node);
3243
3244 if (node->local.can_change_signature)
3245 {
3246 args_to_skip = BITMAP_GGC_ALLOC ();
3247 for (i = 0; i < count; i++)
3248 {
3249 tree t = known_csts[i];
3250
3251 if (t || !ipa_is_param_used (info, i))
3252 bitmap_set_bit (args_to_skip, i);
3253 }
3254 }
3255 else
3256 {
3257 args_to_skip = NULL;
3258 if (dump_file && (dump_flags & TDF_DETAILS))
3259 fprintf (dump_file, " cannot change function signature\n");
3260 }
3261
3262 for (i = 0; i < count ; i++)
3263 {
3264 tree t = known_csts[i];
3265 if (t)
3266 {
3267 struct ipa_replace_map *replace_map;
3268
3269 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3270 replace_map = get_replacement_map (info, t, i);
3271 if (replace_map)
3272 vec_safe_push (replace_trees, replace_map);
3273 }
3274 }
3275
3276 new_node = node->create_virtual_clone (callers, replace_trees,
3277 args_to_skip, "constprop");
3278 ipa_set_node_agg_value_chain (new_node, aggvals);
3279 for (av = aggvals; av; av = av->next)
3280 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3281
3282 if (dump_file && (dump_flags & TDF_DETAILS))
3283 {
3284 fprintf (dump_file, " the new node is %s/%i.\n",
3285 new_node->name (), new_node->order);
3286 if (known_contexts.exists ())
3287 {
3288 for (i = 0; i < count ; i++)
3289 if (!known_contexts[i].useless_p ())
3290 {
3291 fprintf (dump_file, " known ctx %i is ", i);
3292 known_contexts[i].dump (dump_file);
3293 }
3294 }
3295 if (aggvals)
3296 ipa_dump_agg_replacement_values (dump_file, aggvals);
3297 }
3298 ipa_check_create_node_params ();
3299 update_profiling_info (node, new_node);
3300 new_info = IPA_NODE_REF (new_node);
3301 new_info->ipcp_orig_node = node;
3302 new_info->known_csts = known_csts;
3303 new_info->known_contexts = known_contexts;
3304
3305 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3306
3307 callers.release ();
3308 return new_node;
3309 }
3310
3311 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3312 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3313
3314 static void
3315 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3316 vec<tree> known_csts,
3317 vec<cgraph_edge *> callers)
3318 {
3319 struct ipa_node_params *info = IPA_NODE_REF (node);
3320 int i, count = ipa_get_param_count (info);
3321
3322 for (i = 0; i < count ; i++)
3323 {
3324 struct cgraph_edge *cs;
3325 tree newval = NULL_TREE;
3326 int j;
3327 bool first = true;
3328
3329 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3330 continue;
3331
3332 FOR_EACH_VEC_ELT (callers, j, cs)
3333 {
3334 struct ipa_jump_func *jump_func;
3335 tree t;
3336
3337 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3338 {
3339 newval = NULL_TREE;
3340 break;
3341 }
3342 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3343 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3344 if (!t
3345 || (newval
3346 && !values_equal_for_ipcp_p (t, newval))
3347 || (!first && !newval))
3348 {
3349 newval = NULL_TREE;
3350 break;
3351 }
3352 else
3353 newval = t;
3354 first = false;
3355 }
3356
3357 if (newval)
3358 {
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3360 {
3361 fprintf (dump_file, " adding an extra known scalar value ");
3362 print_ipcp_constant_value (dump_file, newval);
3363 fprintf (dump_file, " for ");
3364 ipa_dump_param (dump_file, info, i);
3365 fprintf (dump_file, "\n");
3366 }
3367
3368 known_csts[i] = newval;
3369 }
3370 }
3371 }
3372
3373 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3374 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3375 CALLERS. */
3376
3377 static void
3378 find_more_contexts_for_caller_subset (cgraph_node *node,
3379 vec<ipa_polymorphic_call_context>
3380 *known_contexts,
3381 vec<cgraph_edge *> callers)
3382 {
3383 ipa_node_params *info = IPA_NODE_REF (node);
3384 int i, count = ipa_get_param_count (info);
3385
3386 for (i = 0; i < count ; i++)
3387 {
3388 cgraph_edge *cs;
3389
3390 if (ipa_get_poly_ctx_lat (info, i)->bottom
3391 || (known_contexts->exists ()
3392 && !(*known_contexts)[i].useless_p ()))
3393 continue;
3394
3395 ipa_polymorphic_call_context newval;
3396 bool first = true;
3397 int j;
3398
3399 FOR_EACH_VEC_ELT (callers, j, cs)
3400 {
3401 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3402 return;
3403 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3404 i);
3405 ipa_polymorphic_call_context ctx;
3406 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3407 jfunc);
3408 if (first)
3409 {
3410 newval = ctx;
3411 first = false;
3412 }
3413 else
3414 newval.meet_with (ctx);
3415 if (newval.useless_p ())
3416 break;
3417 }
3418
3419 if (!newval.useless_p ())
3420 {
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3422 {
3423 fprintf (dump_file, " adding an extra known polymorphic "
3424 "context ");
3425 print_ipcp_constant_value (dump_file, newval);
3426 fprintf (dump_file, " for ");
3427 ipa_dump_param (dump_file, info, i);
3428 fprintf (dump_file, "\n");
3429 }
3430
3431 if (!known_contexts->exists ())
3432 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3433 (*known_contexts)[i] = newval;
3434 }
3435
3436 }
3437 }
3438
3439 /* Go through PLATS and create a vector of values consisting of values and
3440 offsets (minus OFFSET) of lattices that contain only a single value. */
3441
3442 static vec<ipa_agg_jf_item>
3443 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3444 {
3445 vec<ipa_agg_jf_item> res = vNULL;
3446
3447 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3448 return vNULL;
3449
3450 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3451 if (aglat->is_single_const ())
3452 {
3453 struct ipa_agg_jf_item ti;
3454 ti.offset = aglat->offset - offset;
3455 ti.value = aglat->values->value;
3456 res.safe_push (ti);
3457 }
3458 return res;
3459 }
3460
3461 /* Intersect all values in INTER with single value lattices in PLATS (while
3462 subtracting OFFSET). */
3463
3464 static void
3465 intersect_with_plats (struct ipcp_param_lattices *plats,
3466 vec<ipa_agg_jf_item> *inter,
3467 HOST_WIDE_INT offset)
3468 {
3469 struct ipcp_agg_lattice *aglat;
3470 struct ipa_agg_jf_item *item;
3471 int k;
3472
3473 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3474 {
3475 inter->release ();
3476 return;
3477 }
3478
3479 aglat = plats->aggs;
3480 FOR_EACH_VEC_ELT (*inter, k, item)
3481 {
3482 bool found = false;
3483 if (!item->value)
3484 continue;
3485 while (aglat)
3486 {
3487 if (aglat->offset - offset > item->offset)
3488 break;
3489 if (aglat->offset - offset == item->offset)
3490 {
3491 gcc_checking_assert (item->value);
3492 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
3493 found = true;
3494 break;
3495 }
3496 aglat = aglat->next;
3497 }
3498 if (!found)
3499 item->value = NULL_TREE;
3500 }
3501 }
3502
3503 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
3504 vector result while subtracting OFFSET from the individual value offsets. */
3505
3506 static vec<ipa_agg_jf_item>
3507 agg_replacements_to_vector (struct cgraph_node *node, int index,
3508 HOST_WIDE_INT offset)
3509 {
3510 struct ipa_agg_replacement_value *av;
3511 vec<ipa_agg_jf_item> res = vNULL;
3512
3513 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
3514 if (av->index == index
3515 && (av->offset - offset) >= 0)
3516 {
3517 struct ipa_agg_jf_item item;
3518 gcc_checking_assert (av->value);
3519 item.offset = av->offset - offset;
3520 item.value = av->value;
3521 res.safe_push (item);
3522 }
3523
3524 return res;
3525 }
3526
3527 /* Intersect all values in INTER with those that we have already scheduled to
3528 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
3529 (while subtracting OFFSET). */
3530
3531 static void
3532 intersect_with_agg_replacements (struct cgraph_node *node, int index,
3533 vec<ipa_agg_jf_item> *inter,
3534 HOST_WIDE_INT offset)
3535 {
3536 struct ipa_agg_replacement_value *srcvals;
3537 struct ipa_agg_jf_item *item;
3538 int i;
3539
3540 srcvals = ipa_get_agg_replacements_for_node (node);
3541 if (!srcvals)
3542 {
3543 inter->release ();
3544 return;
3545 }
3546
3547 FOR_EACH_VEC_ELT (*inter, i, item)
3548 {
3549 struct ipa_agg_replacement_value *av;
3550 bool found = false;
3551 if (!item->value)
3552 continue;
3553 for (av = srcvals; av; av = av->next)
3554 {
3555 gcc_checking_assert (av->value);
3556 if (av->index == index
3557 && av->offset - offset == item->offset)
3558 {
3559 if (values_equal_for_ipcp_p (item->value, av->value))
3560 found = true;
3561 break;
3562 }
3563 }
3564 if (!found)
3565 item->value = NULL_TREE;
3566 }
3567 }
3568
3569 /* Intersect values in INTER with aggregate values that come along edge CS to
3570 parameter number INDEX and return it. If INTER does not actually exist yet,
3571 copy all incoming values to it. If we determine we ended up with no values
3572 whatsoever, return a released vector. */
3573
3574 static vec<ipa_agg_jf_item>
3575 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
3576 vec<ipa_agg_jf_item> inter)
3577 {
3578 struct ipa_jump_func *jfunc;
3579 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
3580 if (jfunc->type == IPA_JF_PASS_THROUGH
3581 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3582 {
3583 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3584 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
3585
3586 if (caller_info->ipcp_orig_node)
3587 {
3588 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
3589 struct ipcp_param_lattices *orig_plats;
3590 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
3591 src_idx);
3592 if (agg_pass_through_permissible_p (orig_plats, jfunc))
3593 {
3594 if (!inter.exists ())
3595 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
3596 else
3597 intersect_with_agg_replacements (cs->caller, src_idx,
3598 &inter, 0);
3599 }
3600 else
3601 {
3602 inter.release ();
3603 return vNULL;
3604 }
3605 }
3606 else
3607 {
3608 struct ipcp_param_lattices *src_plats;
3609 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
3610 if (agg_pass_through_permissible_p (src_plats, jfunc))
3611 {
3612 /* Currently we do not produce clobber aggregate jump
3613 functions, adjust when we do. */
3614 gcc_checking_assert (!jfunc->agg.items);
3615 if (!inter.exists ())
3616 inter = copy_plats_to_inter (src_plats, 0);
3617 else
3618 intersect_with_plats (src_plats, &inter, 0);
3619 }
3620 else
3621 {
3622 inter.release ();
3623 return vNULL;
3624 }
3625 }
3626 }
3627 else if (jfunc->type == IPA_JF_ANCESTOR
3628 && ipa_get_jf_ancestor_agg_preserved (jfunc))
3629 {
3630 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3631 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
3632 struct ipcp_param_lattices *src_plats;
3633 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
3634
3635 if (caller_info->ipcp_orig_node)
3636 {
3637 if (!inter.exists ())
3638 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
3639 else
3640 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
3641 delta);
3642 }
3643 else
3644 {
3645 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
3646 /* Currently we do not produce clobber aggregate jump
3647 functions, adjust when we do. */
3648 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3649 if (!inter.exists ())
3650 inter = copy_plats_to_inter (src_plats, delta);
3651 else
3652 intersect_with_plats (src_plats, &inter, delta);
3653 }
3654 }
3655 else if (jfunc->agg.items)
3656 {
3657 struct ipa_agg_jf_item *item;
3658 int k;
3659
3660 if (!inter.exists ())
3661 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3662 inter.safe_push ((*jfunc->agg.items)[i]);
3663 else
3664 FOR_EACH_VEC_ELT (inter, k, item)
3665 {
3666 int l = 0;
3667 bool found = false;;
3668
3669 if (!item->value)
3670 continue;
3671
3672 while ((unsigned) l < jfunc->agg.items->length ())
3673 {
3674 struct ipa_agg_jf_item *ti;
3675 ti = &(*jfunc->agg.items)[l];
3676 if (ti->offset > item->offset)
3677 break;
3678 if (ti->offset == item->offset)
3679 {
3680 gcc_checking_assert (ti->value);
3681 if (values_equal_for_ipcp_p (item->value,
3682 ti->value))
3683 found = true;
3684 break;
3685 }
3686 l++;
3687 }
3688 if (!found)
3689 item->value = NULL;
3690 }
3691 }
3692 else
3693 {
3694 inter.release ();
3695 return vec<ipa_agg_jf_item>();
3696 }
3697 return inter;
3698 }
3699
3700 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3701 from all of them. */
3702
3703 static struct ipa_agg_replacement_value *
3704 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3705 vec<cgraph_edge *> callers)
3706 {
3707 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3708 struct ipa_agg_replacement_value *res;
3709 struct ipa_agg_replacement_value **tail = &res;
3710 struct cgraph_edge *cs;
3711 int i, j, count = ipa_get_param_count (dest_info);
3712
3713 FOR_EACH_VEC_ELT (callers, j, cs)
3714 {
3715 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3716 if (c < count)
3717 count = c;
3718 }
3719
3720 for (i = 0; i < count ; i++)
3721 {
3722 struct cgraph_edge *cs;
3723 vec<ipa_agg_jf_item> inter = vNULL;
3724 struct ipa_agg_jf_item *item;
3725 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3726 int j;
3727
3728 /* Among other things, the following check should deal with all by_ref
3729 mismatches. */
3730 if (plats->aggs_bottom)
3731 continue;
3732
3733 FOR_EACH_VEC_ELT (callers, j, cs)
3734 {
3735 inter = intersect_aggregates_with_edge (cs, i, inter);
3736
3737 if (!inter.exists ())
3738 goto next_param;
3739 }
3740
3741 FOR_EACH_VEC_ELT (inter, j, item)
3742 {
3743 struct ipa_agg_replacement_value *v;
3744
3745 if (!item->value)
3746 continue;
3747
3748 v = ggc_alloc<ipa_agg_replacement_value> ();
3749 v->index = i;
3750 v->offset = item->offset;
3751 v->value = item->value;
3752 v->by_ref = plats->aggs_by_ref;
3753 *tail = v;
3754 tail = &v->next;
3755 }
3756
3757 next_param:
3758 if (inter.exists ())
3759 inter.release ();
3760 }
3761 *tail = NULL;
3762 return res;
3763 }
3764
3765 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3766
3767 static struct ipa_agg_replacement_value *
3768 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3769 {
3770 struct ipa_agg_replacement_value *res;
3771 struct ipa_agg_replacement_value **tail = &res;
3772 struct ipa_agg_jump_function *aggjf;
3773 struct ipa_agg_jf_item *item;
3774 int i, j;
3775
3776 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3777 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3778 {
3779 struct ipa_agg_replacement_value *v;
3780 v = ggc_alloc<ipa_agg_replacement_value> ();
3781 v->index = i;
3782 v->offset = item->offset;
3783 v->value = item->value;
3784 v->by_ref = aggjf->by_ref;
3785 *tail = v;
3786 tail = &v->next;
3787 }
3788 *tail = NULL;
3789 return res;
3790 }
3791
3792 /* Determine whether CS also brings all scalar values that the NODE is
3793 specialized for. */
3794
3795 static bool
3796 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3797 struct cgraph_node *node)
3798 {
3799 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3800 int count = ipa_get_param_count (dest_info);
3801 struct ipa_node_params *caller_info;
3802 struct ipa_edge_args *args;
3803 int i;
3804
3805 caller_info = IPA_NODE_REF (cs->caller);
3806 args = IPA_EDGE_REF (cs);
3807 for (i = 0; i < count; i++)
3808 {
3809 struct ipa_jump_func *jump_func;
3810 tree val, t;
3811
3812 val = dest_info->known_csts[i];
3813 if (!val)
3814 continue;
3815
3816 if (i >= ipa_get_cs_argument_count (args))
3817 return false;
3818 jump_func = ipa_get_ith_jump_func (args, i);
3819 t = ipa_value_from_jfunc (caller_info, jump_func);
3820 if (!t || !values_equal_for_ipcp_p (val, t))
3821 return false;
3822 }
3823 return true;
3824 }
3825
3826 /* Determine whether CS also brings all aggregate values that NODE is
3827 specialized for. */
3828 static bool
3829 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3830 struct cgraph_node *node)
3831 {
3832 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3833 struct ipa_node_params *orig_node_info;
3834 struct ipa_agg_replacement_value *aggval;
3835 int i, ec, count;
3836
3837 aggval = ipa_get_agg_replacements_for_node (node);
3838 if (!aggval)
3839 return true;
3840
3841 count = ipa_get_param_count (IPA_NODE_REF (node));
3842 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3843 if (ec < count)
3844 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3845 if (aggval->index >= ec)
3846 return false;
3847
3848 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3849 if (orig_caller_info->ipcp_orig_node)
3850 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3851
3852 for (i = 0; i < count; i++)
3853 {
3854 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3855 struct ipcp_param_lattices *plats;
3856 bool interesting = false;
3857 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3858 if (aggval->index == i)
3859 {
3860 interesting = true;
3861 break;
3862 }
3863 if (!interesting)
3864 continue;
3865
3866 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3867 if (plats->aggs_bottom)
3868 return false;
3869
3870 values = intersect_aggregates_with_edge (cs, i, values);
3871 if (!values.exists ())
3872 return false;
3873
3874 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3875 if (aggval->index == i)
3876 {
3877 struct ipa_agg_jf_item *item;
3878 int j;
3879 bool found = false;
3880 FOR_EACH_VEC_ELT (values, j, item)
3881 if (item->value
3882 && item->offset == av->offset
3883 && values_equal_for_ipcp_p (item->value, av->value))
3884 {
3885 found = true;
3886 break;
3887 }
3888 if (!found)
3889 {
3890 values.release ();
3891 return false;
3892 }
3893 }
3894 }
3895 return true;
3896 }
3897
3898 /* Given an original NODE and a VAL for which we have already created a
3899 specialized clone, look whether there are incoming edges that still lead
3900 into the old node but now also bring the requested value and also conform to
3901 all other criteria such that they can be redirected the the special node.
3902 This function can therefore redirect the final edge in a SCC. */
3903
3904 template <typename valtype>
3905 static void
3906 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
3907 {
3908 ipcp_value_source<valtype> *src;
3909 gcov_type redirected_sum = 0;
3910
3911 for (src = val->sources; src; src = src->next)
3912 {
3913 struct cgraph_edge *cs = src->cs;
3914 while (cs)
3915 {
3916 if (cgraph_edge_brings_value_p (cs, src, node)
3917 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3918 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
3919 {
3920 if (dump_file)
3921 fprintf (dump_file, " - adding an extra caller %s/%i"
3922 " of %s/%i\n",
3923 xstrdup_for_dump (cs->caller->name ()),
3924 cs->caller->order,
3925 xstrdup_for_dump (val->spec_node->name ()),
3926 val->spec_node->order);
3927
3928 cs->redirect_callee_duplicating_thunks (val->spec_node);
3929 val->spec_node->expand_all_artificial_thunks ();
3930 redirected_sum += cs->count;
3931 }
3932 cs = get_next_cgraph_edge_clone (cs);
3933 }
3934 }
3935
3936 if (redirected_sum)
3937 update_specialized_profile (val->spec_node, node, redirected_sum);
3938 }
3939
3940 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
3941
3942 static bool
3943 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
3944 {
3945 ipa_polymorphic_call_context *ctx;
3946 int i;
3947
3948 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
3949 if (!ctx->useless_p ())
3950 return true;
3951 return false;
3952 }
3953
3954 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
3955
3956 static vec<ipa_polymorphic_call_context>
3957 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
3958 {
3959 if (known_contexts_useful_p (known_contexts))
3960 return known_contexts.copy ();
3961 else
3962 return vNULL;
3963 }
3964
3965 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
3966 non-empty, replace KNOWN_CONTEXTS with its copy too. */
3967
3968 static void
3969 modify_known_vectors_with_val (vec<tree> *known_csts,
3970 vec<ipa_polymorphic_call_context> *known_contexts,
3971 ipcp_value<tree> *val,
3972 int index)
3973 {
3974 *known_csts = known_csts->copy ();
3975 *known_contexts = copy_useful_known_contexts (*known_contexts);
3976 (*known_csts)[index] = val->value;
3977 }
3978
3979 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
3980 copy according to VAL and INDEX. */
3981
3982 static void
3983 modify_known_vectors_with_val (vec<tree> *known_csts,
3984 vec<ipa_polymorphic_call_context> *known_contexts,
3985 ipcp_value<ipa_polymorphic_call_context> *val,
3986 int index)
3987 {
3988 *known_csts = known_csts->copy ();
3989 *known_contexts = known_contexts->copy ();
3990 (*known_contexts)[index] = val->value;
3991 }
3992
3993 /* Return true if OFFSET indicates this was not an aggregate value or there is
3994 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
3995 AGGVALS list. */
3996
3997 DEBUG_FUNCTION bool
3998 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
3999 int index, HOST_WIDE_INT offset, tree value)
4000 {
4001 if (offset == -1)
4002 return true;
4003
4004 while (aggvals)
4005 {
4006 if (aggvals->index == index
4007 && aggvals->offset == offset
4008 && values_equal_for_ipcp_p (aggvals->value, value))
4009 return true;
4010 aggvals = aggvals->next;
4011 }
4012 return false;
4013 }
4014
4015 /* Return true if offset is minus one because source of a polymorphic contect
4016 cannot be an aggregate value. */
4017
4018 DEBUG_FUNCTION bool
4019 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4020 int , HOST_WIDE_INT offset,
4021 ipa_polymorphic_call_context)
4022 {
4023 return offset == -1;
4024 }
4025
4026 /* Decide wheter to create a special version of NODE for value VAL of parameter
4027 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4028 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4029 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4030
4031 template <typename valtype>
4032 static bool
4033 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4034 ipcp_value<valtype> *val, vec<tree> known_csts,
4035 vec<ipa_polymorphic_call_context> known_contexts)
4036 {
4037 struct ipa_agg_replacement_value *aggvals;
4038 int freq_sum, caller_count;
4039 gcov_type count_sum;
4040 vec<cgraph_edge *> callers;
4041
4042 if (val->spec_node)
4043 {
4044 perhaps_add_new_callers (node, val);
4045 return false;
4046 }
4047 else if (val->local_size_cost + overall_size > max_new_size)
4048 {
4049 if (dump_file && (dump_flags & TDF_DETAILS))
4050 fprintf (dump_file, " Ignoring candidate value because "
4051 "max_new_size would be reached with %li.\n",
4052 val->local_size_cost + overall_size);
4053 return false;
4054 }
4055 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4056 &caller_count))
4057 return false;
4058
4059 if (dump_file && (dump_flags & TDF_DETAILS))
4060 {
4061 fprintf (dump_file, " - considering value ");
4062 print_ipcp_constant_value (dump_file, val->value);
4063 fprintf (dump_file, " for ");
4064 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4065 if (offset != -1)
4066 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4067 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4068 }
4069
4070 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4071 freq_sum, count_sum,
4072 val->local_size_cost)
4073 && !good_cloning_opportunity_p (node,
4074 val->local_time_benefit
4075 + val->prop_time_benefit,
4076 freq_sum, count_sum,
4077 val->local_size_cost
4078 + val->prop_size_cost))
4079 return false;
4080
4081 if (dump_file)
4082 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
4083 node->name (), node->order);
4084
4085 callers = gather_edges_for_value (val, node, caller_count);
4086 if (offset == -1)
4087 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4088 else
4089 {
4090 known_csts = known_csts.copy ();
4091 known_contexts = copy_useful_known_contexts (known_contexts);
4092 }
4093 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4094 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4095 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4096 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4097 offset, val->value));
4098 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4099 aggvals, callers);
4100 overall_size += val->local_size_cost;
4101
4102 /* TODO: If for some lattice there is only one other known value
4103 left, make a special node for it too. */
4104
4105 return true;
4106 }
4107
4108 /* Decide whether and what specialized clones of NODE should be created. */
4109
4110 static bool
4111 decide_whether_version_node (struct cgraph_node *node)
4112 {
4113 struct ipa_node_params *info = IPA_NODE_REF (node);
4114 int i, count = ipa_get_param_count (info);
4115 vec<tree> known_csts;
4116 vec<ipa_polymorphic_call_context> known_contexts;
4117 vec<ipa_agg_jump_function> known_aggs = vNULL;
4118 bool ret = false;
4119
4120 if (count == 0)
4121 return false;
4122
4123 if (dump_file && (dump_flags & TDF_DETAILS))
4124 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
4125 node->name (), node->order);
4126
4127 gather_context_independent_values (info, &known_csts, &known_contexts,
4128 info->do_clone_for_all_contexts ? &known_aggs
4129 : NULL, NULL);
4130
4131 for (i = 0; i < count ;i++)
4132 {
4133 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4134 ipcp_lattice<tree> *lat = &plats->itself;
4135 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4136
4137 if (!lat->bottom
4138 && !known_csts[i])
4139 {
4140 ipcp_value<tree> *val;
4141 for (val = lat->values; val; val = val->next)
4142 ret |= decide_about_value (node, i, -1, val, known_csts,
4143 known_contexts);
4144 }
4145
4146 if (!plats->aggs_bottom)
4147 {
4148 struct ipcp_agg_lattice *aglat;
4149 ipcp_value<tree> *val;
4150 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4151 if (!aglat->bottom && aglat->values
4152 /* If the following is false, the one value is in
4153 known_aggs. */
4154 && (plats->aggs_contain_variable
4155 || !aglat->is_single_const ()))
4156 for (val = aglat->values; val; val = val->next)
4157 ret |= decide_about_value (node, i, aglat->offset, val,
4158 known_csts, known_contexts);
4159 }
4160
4161 if (!ctxlat->bottom
4162 && known_contexts[i].useless_p ())
4163 {
4164 ipcp_value<ipa_polymorphic_call_context> *val;
4165 for (val = ctxlat->values; val; val = val->next)
4166 ret |= decide_about_value (node, i, -1, val, known_csts,
4167 known_contexts);
4168 }
4169
4170 info = IPA_NODE_REF (node);
4171 }
4172
4173 if (info->do_clone_for_all_contexts)
4174 {
4175 struct cgraph_node *clone;
4176 vec<cgraph_edge *> callers;
4177
4178 if (dump_file)
4179 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4180 "for all known contexts.\n", node->name (),
4181 node->order);
4182
4183 callers = node->collect_callers ();
4184
4185 if (!known_contexts_useful_p (known_contexts))
4186 {
4187 known_contexts.release ();
4188 known_contexts = vNULL;
4189 }
4190 clone = create_specialized_node (node, known_csts, known_contexts,
4191 known_aggs_to_agg_replacement_list (known_aggs),
4192 callers);
4193 info = IPA_NODE_REF (node);
4194 info->do_clone_for_all_contexts = false;
4195 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4196 for (i = 0; i < count ; i++)
4197 vec_free (known_aggs[i].items);
4198 known_aggs.release ();
4199 ret = true;
4200 }
4201 else
4202 {
4203 known_csts.release ();
4204 known_contexts.release ();
4205 }
4206
4207 return ret;
4208 }
4209
4210 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4211
4212 static void
4213 spread_undeadness (struct cgraph_node *node)
4214 {
4215 struct cgraph_edge *cs;
4216
4217 for (cs = node->callees; cs; cs = cs->next_callee)
4218 if (ipa_edge_within_scc (cs))
4219 {
4220 struct cgraph_node *callee;
4221 struct ipa_node_params *info;
4222
4223 callee = cs->callee->function_symbol (NULL);
4224 info = IPA_NODE_REF (callee);
4225
4226 if (info->node_dead)
4227 {
4228 info->node_dead = 0;
4229 spread_undeadness (callee);
4230 }
4231 }
4232 }
4233
4234 /* Return true if NODE has a caller from outside of its SCC that is not
4235 dead. Worker callback for cgraph_for_node_and_aliases. */
4236
4237 static bool
4238 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4239 void *data ATTRIBUTE_UNUSED)
4240 {
4241 struct cgraph_edge *cs;
4242
4243 for (cs = node->callers; cs; cs = cs->next_caller)
4244 if (cs->caller->thunk.thunk_p
4245 && cs->caller->call_for_symbol_thunks_and_aliases
4246 (has_undead_caller_from_outside_scc_p, NULL, true))
4247 return true;
4248 else if (!ipa_edge_within_scc (cs)
4249 && !IPA_NODE_REF (cs->caller)->node_dead)
4250 return true;
4251 return false;
4252 }
4253
4254
4255 /* Identify nodes within the same SCC as NODE which are no longer needed
4256 because of new clones and will be removed as unreachable. */
4257
4258 static void
4259 identify_dead_nodes (struct cgraph_node *node)
4260 {
4261 struct cgraph_node *v;
4262 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4263 if (v->will_be_removed_from_program_if_no_direct_calls_p ()
4264 && !v->call_for_symbol_thunks_and_aliases
4265 (has_undead_caller_from_outside_scc_p, NULL, true))
4266 IPA_NODE_REF (v)->node_dead = 1;
4267
4268 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4269 if (!IPA_NODE_REF (v)->node_dead)
4270 spread_undeadness (v);
4271
4272 if (dump_file && (dump_flags & TDF_DETAILS))
4273 {
4274 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4275 if (IPA_NODE_REF (v)->node_dead)
4276 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4277 v->name (), v->order);
4278 }
4279 }
4280
4281 /* The decision stage. Iterate over the topological order of call graph nodes
4282 TOPO and make specialized clones if deemed beneficial. */
4283
4284 static void
4285 ipcp_decision_stage (struct ipa_topo_info *topo)
4286 {
4287 int i;
4288
4289 if (dump_file)
4290 fprintf (dump_file, "\nIPA decision stage:\n\n");
4291
4292 for (i = topo->nnodes - 1; i >= 0; i--)
4293 {
4294 struct cgraph_node *node = topo->order[i];
4295 bool change = false, iterate = true;
4296
4297 while (iterate)
4298 {
4299 struct cgraph_node *v;
4300 iterate = false;
4301 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4302 if (v->has_gimple_body_p ()
4303 && ipcp_versionable_function_p (v))
4304 iterate |= decide_whether_version_node (v);
4305
4306 change |= iterate;
4307 }
4308 if (change)
4309 identify_dead_nodes (node);
4310 }
4311 }
4312
4313 /* Look up all alignment information that we have discovered and copy it over
4314 to the transformation summary. */
4315
4316 static void
4317 ipcp_store_alignment_results (void)
4318 {
4319 cgraph_node *node;
4320
4321 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4322 {
4323 ipa_node_params *info = IPA_NODE_REF (node);
4324 bool dumped_sth = false;
4325 bool found_useful_result = false;
4326
4327 if (info->ipcp_orig_node)
4328 info = IPA_NODE_REF (info->ipcp_orig_node);
4329
4330 unsigned count = ipa_get_param_count (info);
4331 for (unsigned i = 0; i < count ; i++)
4332 {
4333 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4334 if (plats->alignment.known
4335 && plats->alignment.align > 0)
4336 {
4337 found_useful_result = true;
4338 break;
4339 }
4340 }
4341 if (!found_useful_result)
4342 continue;
4343
4344 ipcp_grow_transformations_if_necessary ();
4345 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4346 vec_safe_reserve_exact (ts->alignments, count);
4347
4348 for (unsigned i = 0; i < count ; i++)
4349 {
4350 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4351
4352 if (plats->alignment.align == 0)
4353 plats->alignment.known = false;
4354
4355 ts->alignments->quick_push (plats->alignment);
4356 if (!dump_file || !plats->alignment.known)
4357 continue;
4358 if (!dumped_sth)
4359 {
4360 fprintf (dump_file, "Propagated alignment info for function %s/%i:\n",
4361 node->name (), node->order);
4362 dumped_sth = true;
4363 }
4364 fprintf (dump_file, " param %i: align: %u, misalign: %u\n",
4365 i, plats->alignment.align, plats->alignment.misalign);
4366 }
4367 }
4368 }
4369
4370 /* The IPCP driver. */
4371
4372 static unsigned int
4373 ipcp_driver (void)
4374 {
4375 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4376 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4377 struct ipa_topo_info topo;
4378
4379 ipa_check_create_node_params ();
4380 ipa_check_create_edge_args ();
4381 grow_edge_clone_vectors ();
4382 edge_duplication_hook_holder =
4383 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
4384 edge_removal_hook_holder =
4385 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
4386
4387 ipcp_cst_values_pool = create_alloc_pool ("IPA-CP constant values",
4388 sizeof (ipcp_value<tree>), 32);
4389 ipcp_poly_ctx_values_pool = create_alloc_pool
4390 ("IPA-CP polymorphic contexts",
4391 sizeof (ipcp_value<ipa_polymorphic_call_context>), 32);
4392 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
4393 sizeof (ipcp_value_source<tree>), 64);
4394 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
4395 sizeof (struct ipcp_agg_lattice),
4396 32);
4397 if (dump_file)
4398 {
4399 fprintf (dump_file, "\nIPA structures before propagation:\n");
4400 if (dump_flags & TDF_DETAILS)
4401 ipa_print_all_params (dump_file);
4402 ipa_print_all_jump_functions (dump_file);
4403 }
4404
4405 /* Topological sort. */
4406 build_toporder_info (&topo);
4407 /* Do the interprocedural propagation. */
4408 ipcp_propagate_stage (&topo);
4409 /* Decide what constant propagation and cloning should be performed. */
4410 ipcp_decision_stage (&topo);
4411 /* Store results of alignment propagation. */
4412 ipcp_store_alignment_results ();
4413
4414 /* Free all IPCP structures. */
4415 free_toporder_info (&topo);
4416 next_edge_clone.release ();
4417 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4418 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4419 ipa_free_all_structures_after_ipa_cp ();
4420 if (dump_file)
4421 fprintf (dump_file, "\nIPA constant propagation end\n");
4422 return 0;
4423 }
4424
4425 /* Initialization and computation of IPCP data structures. This is the initial
4426 intraprocedural analysis of functions, which gathers information to be
4427 propagated later on. */
4428
4429 static void
4430 ipcp_generate_summary (void)
4431 {
4432 struct cgraph_node *node;
4433
4434 if (dump_file)
4435 fprintf (dump_file, "\nIPA constant propagation start:\n");
4436 ipa_register_cgraph_hooks ();
4437
4438 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4439 {
4440 node->local.versionable
4441 = tree_versionable_function_p (node->decl);
4442 ipa_analyze_node (node);
4443 }
4444 }
4445
4446 /* Write ipcp summary for nodes in SET. */
4447
4448 static void
4449 ipcp_write_summary (void)
4450 {
4451 ipa_prop_write_jump_functions ();
4452 }
4453
4454 /* Read ipcp summary. */
4455
4456 static void
4457 ipcp_read_summary (void)
4458 {
4459 ipa_prop_read_jump_functions ();
4460 }
4461
4462 namespace {
4463
4464 const pass_data pass_data_ipa_cp =
4465 {
4466 IPA_PASS, /* type */
4467 "cp", /* name */
4468 OPTGROUP_NONE, /* optinfo_flags */
4469 TV_IPA_CONSTANT_PROP, /* tv_id */
4470 0, /* properties_required */
4471 0, /* properties_provided */
4472 0, /* properties_destroyed */
4473 0, /* todo_flags_start */
4474 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
4475 };
4476
4477 class pass_ipa_cp : public ipa_opt_pass_d
4478 {
4479 public:
4480 pass_ipa_cp (gcc::context *ctxt)
4481 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
4482 ipcp_generate_summary, /* generate_summary */
4483 ipcp_write_summary, /* write_summary */
4484 ipcp_read_summary, /* read_summary */
4485 ipcp_write_transformation_summaries, /*
4486 write_optimization_summary */
4487 ipcp_read_transformation_summaries, /*
4488 read_optimization_summary */
4489 NULL, /* stmt_fixup */
4490 0, /* function_transform_todo_flags_start */
4491 ipcp_transform_function, /* function_transform */
4492 NULL) /* variable_transform */
4493 {}
4494
4495 /* opt_pass methods: */
4496 virtual bool gate (function *)
4497 {
4498 /* FIXME: We should remove the optimize check after we ensure we never run
4499 IPA passes when not optimizing. */
4500 return (flag_ipa_cp && optimize) || in_lto_p;
4501 }
4502
4503 virtual unsigned int execute (function *) { return ipcp_driver (); }
4504
4505 }; // class pass_ipa_cp
4506
4507 } // anon namespace
4508
4509 ipa_opt_pass_d *
4510 make_pass_ipa_cp (gcc::context *ctxt)
4511 {
4512 return new pass_ipa_cp (ctxt);
4513 }
4514
4515 /* Reset all state within ipa-cp.c so that we can rerun the compiler
4516 within the same process. For use by toplev::finalize. */
4517
4518 void
4519 ipa_cp_c_finalize (void)
4520 {
4521 max_count = 0;
4522 overall_size = 0;
4523 max_new_size = 0;
4524 }