openmp: Implement OpenMP 5.0 base-pointer attachement and clause ordering
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58
59 /* Function summary where the parameter infos are actually stored. */
60 ipa_node_params_t *ipa_node_params_sum = NULL;
61
62 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
63
64 /* Edge summary for IPA-CP edge information. */
65 ipa_edge_args_sum_t *ipa_edge_args_sum;
66
67 /* Traits for a hash table for reusing already existing ipa_bits. */
68
69 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
70 {
71 typedef ipa_bits *value_type;
72 typedef ipa_bits *compare_type;
73 static hashval_t
74 hash (const ipa_bits *p)
75 {
76 hashval_t t = (hashval_t) p->value.to_shwi ();
77 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
78 }
79 static bool
80 equal (const ipa_bits *a, const ipa_bits *b)
81 {
82 return a->value == b->value && a->mask == b->mask;
83 }
84 static const bool empty_zero_p = true;
85 static void
86 mark_empty (ipa_bits *&p)
87 {
88 p = NULL;
89 }
90 static bool
91 is_empty (const ipa_bits *p)
92 {
93 return p == NULL;
94 }
95 static bool
96 is_deleted (const ipa_bits *p)
97 {
98 return p == reinterpret_cast<const ipa_bits *> (1);
99 }
100 static void
101 mark_deleted (ipa_bits *&p)
102 {
103 p = reinterpret_cast<ipa_bits *> (1);
104 }
105 };
106
107 /* Hash table for avoid repeated allocations of equal ipa_bits. */
108 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
109
110 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
111 the equiv bitmap is not hashed and is expected to be NULL. */
112
113 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
114 {
115 typedef value_range *value_type;
116 typedef value_range *compare_type;
117 static hashval_t
118 hash (const value_range *p)
119 {
120 inchash::hash hstate (p->kind ());
121 inchash::add_expr (p->min (), hstate);
122 inchash::add_expr (p->max (), hstate);
123 return hstate.end ();
124 }
125 static bool
126 equal (const value_range *a, const value_range *b)
127 {
128 return (a->equal_p (*b)
129 && types_compatible_p (a->type (), b->type ()));
130 }
131 static const bool empty_zero_p = true;
132 static void
133 mark_empty (value_range *&p)
134 {
135 p = NULL;
136 }
137 static bool
138 is_empty (const value_range *p)
139 {
140 return p == NULL;
141 }
142 static bool
143 is_deleted (const value_range *p)
144 {
145 return p == reinterpret_cast<const value_range *> (1);
146 }
147 static void
148 mark_deleted (value_range *&p)
149 {
150 p = reinterpret_cast<value_range *> (1);
151 }
152 };
153
154 /* Hash table for avoid repeated allocations of equal value_ranges. */
155 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
156
157 /* Holders of ipa cgraph hooks: */
158 static struct cgraph_node_hook_list *function_insertion_hook_holder;
159
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
162 {
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge *cs;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc *next_duplicate;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
169 int refcount;
170 };
171
172 /* Allocation pool for reference descriptions. */
173
174 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions");
176
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
179
180 static bool
181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
182 {
183 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
184
185 if (!fs_opts)
186 return false;
187 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
188 }
189
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
192
193 static int
194 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
195 tree ptree)
196 {
197 int i, count;
198
199 count = vec_safe_length (descriptors);
200 for (i = 0; i < count; i++)
201 if ((*descriptors)[i].decl_or_type == ptree)
202 return i;
203
204 return -1;
205 }
206
207 /* Return index of the formal whose tree is PTREE in function which corresponds
208 to INFO. */
209
210 int
211 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
212 {
213 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
214 }
215
216 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
217 NODE. */
218
219 static void
220 ipa_populate_param_decls (struct cgraph_node *node,
221 vec<ipa_param_descriptor, va_gc> &descriptors)
222 {
223 tree fndecl;
224 tree fnargs;
225 tree parm;
226 int param_num;
227
228 fndecl = node->decl;
229 gcc_assert (gimple_has_body_p (fndecl));
230 fnargs = DECL_ARGUMENTS (fndecl);
231 param_num = 0;
232 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
233 {
234 descriptors[param_num].decl_or_type = parm;
235 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
236 descriptors[param_num].move_cost = cost;
237 /* Watch overflow, move_cost is a bitfield. */
238 gcc_checking_assert (cost == descriptors[param_num].move_cost);
239 param_num++;
240 }
241 }
242
243 /* Return how many formal parameters FNDECL has. */
244
245 int
246 count_formal_params (tree fndecl)
247 {
248 tree parm;
249 int count = 0;
250 gcc_assert (gimple_has_body_p (fndecl));
251
252 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
253 count++;
254
255 return count;
256 }
257
258 /* Return the declaration of Ith formal parameter of the function corresponding
259 to INFO. Note there is no setter function as this array is built just once
260 using ipa_initialize_node_params. */
261
262 void
263 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
264 {
265 fprintf (file, "param #%i", i);
266 if ((*info->descriptors)[i].decl_or_type)
267 {
268 fprintf (file, " ");
269 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
270 }
271 }
272
273 /* If necessary, allocate vector of parameter descriptors in info of NODE.
274 Return true if they were allocated, false if not. */
275
276 static bool
277 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
278 {
279 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
280
281 if (!info->descriptors && param_count)
282 {
283 vec_safe_grow_cleared (info->descriptors, param_count, true);
284 return true;
285 }
286 else
287 return false;
288 }
289
290 /* Initialize the ipa_node_params structure associated with NODE by counting
291 the function parameters, creating the descriptors and populating their
292 param_decls. */
293
294 void
295 ipa_initialize_node_params (struct cgraph_node *node)
296 {
297 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
298
299 if (!info->descriptors
300 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
301 ipa_populate_param_decls (node, *info->descriptors);
302 }
303
304 /* Print the jump functions associated with call graph edge CS to file F. */
305
306 static void
307 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
308 {
309 int i, count;
310
311 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
312 for (i = 0; i < count; i++)
313 {
314 struct ipa_jump_func *jump_func;
315 enum jump_func_type type;
316
317 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
318 type = jump_func->type;
319
320 fprintf (f, " param %d: ", i);
321 if (type == IPA_JF_UNKNOWN)
322 fprintf (f, "UNKNOWN\n");
323 else if (type == IPA_JF_CONST)
324 {
325 tree val = jump_func->value.constant.value;
326 fprintf (f, "CONST: ");
327 print_generic_expr (f, val);
328 if (TREE_CODE (val) == ADDR_EXPR
329 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
330 {
331 fprintf (f, " -> ");
332 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
333 }
334 fprintf (f, "\n");
335 }
336 else if (type == IPA_JF_PASS_THROUGH)
337 {
338 fprintf (f, "PASS THROUGH: ");
339 fprintf (f, "%d, op %s",
340 jump_func->value.pass_through.formal_id,
341 get_tree_code_name(jump_func->value.pass_through.operation));
342 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 {
344 fprintf (f, " ");
345 print_generic_expr (f, jump_func->value.pass_through.operand);
346 }
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
350 }
351 else if (type == IPA_JF_ANCESTOR)
352 {
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
360 }
361
362 if (jump_func->agg.items)
363 {
364 struct ipa_agg_jf_item *item;
365 int j;
366
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
370 {
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 fprintf (f, "type: ");
374 print_generic_expr (f, item->type);
375 fprintf (f, ", ");
376 if (item->jftype == IPA_JF_PASS_THROUGH)
377 fprintf (f, "PASS THROUGH: %d,",
378 item->value.pass_through.formal_id);
379 else if (item->jftype == IPA_JF_LOAD_AGG)
380 {
381 fprintf (f, "LOAD AGG: %d",
382 item->value.pass_through.formal_id);
383 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
384 item->value.load_agg.offset,
385 item->value.load_agg.by_ref ? "reference"
386 : "value");
387 }
388
389 if (item->jftype == IPA_JF_PASS_THROUGH
390 || item->jftype == IPA_JF_LOAD_AGG)
391 {
392 fprintf (f, " op %s",
393 get_tree_code_name (item->value.pass_through.operation));
394 if (item->value.pass_through.operation != NOP_EXPR)
395 {
396 fprintf (f, " ");
397 print_generic_expr (f, item->value.pass_through.operand);
398 }
399 }
400 else if (item->jftype == IPA_JF_CONST)
401 {
402 fprintf (f, "CONST: ");
403 print_generic_expr (f, item->value.constant);
404 }
405 else if (item->jftype == IPA_JF_UNKNOWN)
406 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
407 tree_to_uhwi (TYPE_SIZE (item->type)));
408 fprintf (f, "\n");
409 }
410 }
411
412 class ipa_polymorphic_call_context *ctx
413 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
414 if (ctx && !ctx->useless_p ())
415 {
416 fprintf (f, " Context: ");
417 ctx->dump (dump_file);
418 }
419
420 if (jump_func->bits)
421 {
422 fprintf (f, " value: ");
423 print_hex (jump_func->bits->value, f);
424 fprintf (f, ", mask: ");
425 print_hex (jump_func->bits->mask, f);
426 fprintf (f, "\n");
427 }
428 else
429 fprintf (f, " Unknown bits\n");
430
431 if (jump_func->m_vr)
432 {
433 fprintf (f, " VR ");
434 fprintf (f, "%s[",
435 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
436 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
437 fprintf (f, ", ");
438 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
439 fprintf (f, "]\n");
440 }
441 else
442 fprintf (f, " Unknown VR\n");
443 }
444 }
445
446
447 /* Print the jump functions of all arguments on all call graph edges going from
448 NODE to file F. */
449
450 void
451 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
452 {
453 struct cgraph_edge *cs;
454
455 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
456 for (cs = node->callees; cs; cs = cs->next_callee)
457 {
458
459 fprintf (f, " callsite %s -> %s : \n",
460 node->dump_name (),
461 cs->callee->dump_name ());
462 if (!ipa_edge_args_info_available_for_edge_p (cs))
463 fprintf (f, " no arg info\n");
464 else
465 ipa_print_node_jump_functions_for_edge (f, cs);
466 }
467
468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
469 {
470 class cgraph_indirect_call_info *ii;
471
472 ii = cs->indirect_info;
473 if (ii->agg_contents)
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
476 ii->member_ptr ? "member ptr" : "aggregate",
477 ii->param_index, ii->offset,
478 ii->by_ref ? "by reference" : "by_value");
479 else
480 fprintf (f, " indirect %s callsite, calling param %i, "
481 "offset " HOST_WIDE_INT_PRINT_DEC,
482 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
483 ii->offset);
484
485 if (cs->call_stmt)
486 {
487 fprintf (f, ", for stmt ");
488 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
489 }
490 else
491 fprintf (f, "\n");
492 if (ii->polymorphic)
493 ii->context.dump (f);
494 if (!ipa_edge_args_info_available_for_edge_p (cs))
495 fprintf (f, " no arg info\n");
496 else
497 ipa_print_node_jump_functions_for_edge (f, cs);
498 }
499 }
500
501 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
502
503 void
504 ipa_print_all_jump_functions (FILE *f)
505 {
506 struct cgraph_node *node;
507
508 fprintf (f, "\nJump functions:\n");
509 FOR_EACH_FUNCTION (node)
510 {
511 ipa_print_node_jump_functions (f, node);
512 }
513 }
514
515 /* Set jfunc to be a know-really nothing jump function. */
516
517 static void
518 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
519 {
520 jfunc->type = IPA_JF_UNKNOWN;
521 }
522
523 /* Set JFUNC to be a copy of another jmp (to be used by jump function
524 combination code). The two functions will share their rdesc. */
525
526 static void
527 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
528 struct ipa_jump_func *src)
529
530 {
531 gcc_checking_assert (src->type == IPA_JF_CONST);
532 dst->type = IPA_JF_CONST;
533 dst->value.constant = src->value.constant;
534 }
535
536 /* Set JFUNC to be a constant jmp function. */
537
538 static void
539 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
540 struct cgraph_edge *cs)
541 {
542 jfunc->type = IPA_JF_CONST;
543 jfunc->value.constant.value = unshare_expr_without_location (constant);
544
545 if (TREE_CODE (constant) == ADDR_EXPR
546 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
547 {
548 struct ipa_cst_ref_desc *rdesc;
549
550 rdesc = ipa_refdesc_pool.allocate ();
551 rdesc->cs = cs;
552 rdesc->next_duplicate = NULL;
553 rdesc->refcount = 1;
554 jfunc->value.constant.rdesc = rdesc;
555 }
556 else
557 jfunc->value.constant.rdesc = NULL;
558 }
559
560 /* Set JFUNC to be a simple pass-through jump function. */
561 static void
562 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
563 bool agg_preserved)
564 {
565 jfunc->type = IPA_JF_PASS_THROUGH;
566 jfunc->value.pass_through.operand = NULL_TREE;
567 jfunc->value.pass_through.formal_id = formal_id;
568 jfunc->value.pass_through.operation = NOP_EXPR;
569 jfunc->value.pass_through.agg_preserved = agg_preserved;
570 }
571
572 /* Set JFUNC to be an unary pass through jump function. */
573
574 static void
575 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
576 enum tree_code operation)
577 {
578 jfunc->type = IPA_JF_PASS_THROUGH;
579 jfunc->value.pass_through.operand = NULL_TREE;
580 jfunc->value.pass_through.formal_id = formal_id;
581 jfunc->value.pass_through.operation = operation;
582 jfunc->value.pass_through.agg_preserved = false;
583 }
584 /* Set JFUNC to be an arithmetic pass through jump function. */
585
586 static void
587 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
588 tree operand, enum tree_code operation)
589 {
590 jfunc->type = IPA_JF_PASS_THROUGH;
591 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
592 jfunc->value.pass_through.formal_id = formal_id;
593 jfunc->value.pass_through.operation = operation;
594 jfunc->value.pass_through.agg_preserved = false;
595 }
596
597 /* Set JFUNC to be an ancestor jump function. */
598
599 static void
600 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
601 int formal_id, bool agg_preserved)
602 {
603 jfunc->type = IPA_JF_ANCESTOR;
604 jfunc->value.ancestor.formal_id = formal_id;
605 jfunc->value.ancestor.offset = offset;
606 jfunc->value.ancestor.agg_preserved = agg_preserved;
607 }
608
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
611
612 static struct ipa_bb_info *
613 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
614 {
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
617 }
618
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
621
622 struct prop_type_change_info
623 {
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed;
632 };
633
634 /* Return true if STMT can modify a virtual method table pointer.
635
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
640
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
643
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
646
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
650
651 There is no way to call a constructor of an ancestor sub-object in any
652 other way.
653
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
657
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
669 */
670
671 static bool
672 stmt_may_be_vtbl_ptr_store (gimple *stmt)
673 {
674 if (is_gimple_call (stmt))
675 return false;
676 if (gimple_clobber_p (stmt))
677 return false;
678 else if (is_gimple_assign (stmt))
679 {
680 tree lhs = gimple_assign_lhs (stmt);
681
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
683 {
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
686 return false;
687
688 if (TREE_CODE (lhs) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
690 return false;
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
694 }
695 }
696 return true;
697 }
698
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
703
704 static bool
705 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
706 {
707 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
708 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
709
710 if (stmt_may_be_vtbl_ptr_store (stmt))
711 {
712 tci->type_maybe_changed = true;
713 return true;
714 }
715 else
716 return false;
717 }
718
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
722
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
727
728 static bool
729 param_type_may_change_p (tree function, tree arg, gimple *call)
730 {
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
734 return false;
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
741
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
746 return true;
747 if (TREE_CODE (arg) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg)
749 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
750 {
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
753 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
756 type. */
757 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function)
759 && !DECL_CXX_DESTRUCTOR_P (function)
760 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
761 {
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
764 block = BLOCK_SUPERCONTEXT (block))
765 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
766 return true;
767 return false;
768 }
769 }
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
778
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
781
782 static bool
783 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
784 tree base, tree comp_type, gcall *call,
785 HOST_WIDE_INT offset)
786 {
787 struct prop_type_change_info tci;
788 ao_ref ao;
789
790 gcc_checking_assert (DECL_P (arg)
791 || TREE_CODE (arg) == MEM_REF
792 || handled_component_p (arg));
793
794 comp_type = TYPE_MAIN_VARIANT (comp_type);
795
796 /* Const calls cannot call virtual methods through VMT and so type changes do
797 not matter. */
798 if (!flag_devirtualize || !gimple_vuse (call)
799 /* Be sure expected_type is polymorphic. */
800 || !comp_type
801 || TREE_CODE (comp_type) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
804 return true;
805
806 ao_ref_init (&ao, arg);
807 ao.base = base;
808 ao.offset = offset;
809 ao.size = POINTER_SIZE;
810 ao.max_size = ao.size;
811
812 tci.offset = offset;
813 tci.object = get_base_address (arg);
814 tci.type_maybe_changed = false;
815
816 int walked
817 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
818 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
819
820 if (walked >= 0 && !tci.type_maybe_changed)
821 return false;
822
823 return true;
824 }
825
826 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
827 If it is, return true. ARG is the object itself (not a pointer
828 to it, unless dereferenced). BASE is the base of the memory access as
829 returned by get_ref_base_and_extent, as is the offset. */
830
831 static bool
832 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
833 tree comp_type, gcall *call,
834 HOST_WIDE_INT offset)
835 {
836 if (!flag_devirtualize)
837 return false;
838
839 if (TREE_CODE (base) == MEM_REF
840 && !param_type_may_change_p (current_function_decl,
841 TREE_OPERAND (base, 0),
842 call))
843 return false;
844 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
845 call, offset);
846 }
847
848 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
849 SSA name (its dereference will become the base and the offset is assumed to
850 be zero). */
851
852 static bool
853 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
854 gcall *call)
855 {
856 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
857 if (!flag_devirtualize
858 || !POINTER_TYPE_P (TREE_TYPE (arg)))
859 return false;
860
861 if (!param_type_may_change_p (current_function_decl, arg, call))
862 return false;
863
864 arg = build2 (MEM_REF, ptr_type_node, arg,
865 build_int_cst (ptr_type_node, 0));
866
867 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
868 call, 0);
869 }
870
871 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
872 boolean variable pointed to by DATA. */
873
874 static bool
875 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
876 void *data)
877 {
878 bool *b = (bool *) data;
879 *b = true;
880 return true;
881 }
882
883 /* Find the nearest valid aa status for parameter specified by INDEX that
884 dominates BB. */
885
886 static struct ipa_param_aa_status *
887 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
888 int index)
889 {
890 while (true)
891 {
892 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
893 if (!bb)
894 return NULL;
895 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
896 if (!bi->param_aa_statuses.is_empty ()
897 && bi->param_aa_statuses[index].valid)
898 return &bi->param_aa_statuses[index];
899 }
900 }
901
902 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
903 structures and/or intialize the result with a dominating description as
904 necessary. */
905
906 static struct ipa_param_aa_status *
907 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
908 int index)
909 {
910 gcc_checking_assert (fbi);
911 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
912 if (bi->param_aa_statuses.is_empty ())
913 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
914 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
915 if (!paa->valid)
916 {
917 gcc_checking_assert (!paa->parm_modified
918 && !paa->ref_modified
919 && !paa->pt_modified);
920 struct ipa_param_aa_status *dom_paa;
921 dom_paa = find_dominating_aa_status (fbi, bb, index);
922 if (dom_paa)
923 *paa = *dom_paa;
924 else
925 paa->valid = true;
926 }
927
928 return paa;
929 }
930
931 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
932 a value known not to be modified in this function before reaching the
933 statement STMT. FBI holds information about the function we have so far
934 gathered but do not survive the summary building stage. */
935
936 static bool
937 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
938 gimple *stmt, tree parm_load)
939 {
940 struct ipa_param_aa_status *paa;
941 bool modified = false;
942 ao_ref refd;
943
944 tree base = get_base_address (parm_load);
945 gcc_assert (TREE_CODE (base) == PARM_DECL);
946 if (TREE_READONLY (base))
947 return true;
948
949 gcc_checking_assert (fbi);
950 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
951 if (paa->parm_modified)
952 return false;
953
954 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
955 ao_ref_init (&refd, parm_load);
956 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
957 &modified, NULL, NULL,
958 fbi->aa_walk_budget + 1);
959 if (walked < 0)
960 {
961 modified = true;
962 if (fbi)
963 fbi->aa_walk_budget = 0;
964 }
965 else if (fbi)
966 fbi->aa_walk_budget -= walked;
967 if (paa && modified)
968 paa->parm_modified = true;
969 return !modified;
970 }
971
972 /* If STMT is an assignment that loads a value from an parameter declaration,
973 return the index of the parameter in ipa_node_params which has not been
974 modified. Otherwise return -1. */
975
976 static int
977 load_from_unmodified_param (struct ipa_func_body_info *fbi,
978 vec<ipa_param_descriptor, va_gc> *descriptors,
979 gimple *stmt)
980 {
981 int index;
982 tree op1;
983
984 if (!gimple_assign_single_p (stmt))
985 return -1;
986
987 op1 = gimple_assign_rhs1 (stmt);
988 if (TREE_CODE (op1) != PARM_DECL)
989 return -1;
990
991 index = ipa_get_param_decl_index_1 (descriptors, op1);
992 if (index < 0
993 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
994 return -1;
995
996 return index;
997 }
998
999 /* Return true if memory reference REF (which must be a load through parameter
1000 with INDEX) loads data that are known to be unmodified in this function
1001 before reaching statement STMT. */
1002
1003 static bool
1004 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1005 int index, gimple *stmt, tree ref)
1006 {
1007 struct ipa_param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1010
1011 gcc_checking_assert (fbi);
1012 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1013 if (paa->ref_modified)
1014 return false;
1015
1016 gcc_checking_assert (gimple_vuse (stmt));
1017 ao_ref_init (&refd, ref);
1018 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1019 &modified, NULL, NULL,
1020 fbi->aa_walk_budget + 1);
1021 if (walked < 0)
1022 {
1023 modified = true;
1024 fbi->aa_walk_budget = 0;
1025 }
1026 else
1027 fbi->aa_walk_budget -= walked;
1028 if (modified)
1029 paa->ref_modified = true;
1030 return !modified;
1031 }
1032
1033 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1034 is known to be unmodified in this function before reaching call statement
1035 CALL into which it is passed. FBI describes the function body. */
1036
1037 static bool
1038 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1039 gimple *call, tree parm)
1040 {
1041 bool modified = false;
1042 ao_ref refd;
1043
1044 /* It's unnecessary to calculate anything about memory contnets for a const
1045 function because it is not goin to use it. But do not cache the result
1046 either. Also, no such calculations for non-pointers. */
1047 if (!gimple_vuse (call)
1048 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1049 return false;
1050
1051 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1052 gimple_bb (call),
1053 index);
1054 if (paa->pt_modified)
1055 return false;
1056
1057 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1058 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1059 &modified, NULL, NULL,
1060 fbi->aa_walk_budget + 1);
1061 if (walked < 0)
1062 {
1063 fbi->aa_walk_budget = 0;
1064 modified = true;
1065 }
1066 else
1067 fbi->aa_walk_budget -= walked;
1068 if (modified)
1069 paa->pt_modified = true;
1070 return !modified;
1071 }
1072
1073 /* Return true if we can prove that OP is a memory reference loading
1074 data from an aggregate passed as a parameter.
1075
1076 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1077 false if it cannot prove that the value has not been modified before the
1078 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1079 if it cannot prove the value has not been modified, in that case it will
1080 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1081
1082 INFO and PARMS_AINFO describe parameters of the current function (but the
1083 latter can be NULL), STMT is the load statement. If function returns true,
1084 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1085 within the aggregate and whether it is a load from a value passed by
1086 reference respectively. */
1087
1088 bool
1089 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1090 vec<ipa_param_descriptor, va_gc> *descriptors,
1091 gimple *stmt, tree op, int *index_p,
1092 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1093 bool *by_ref_p, bool *guaranteed_unmodified)
1094 {
1095 int index;
1096 HOST_WIDE_INT size;
1097 bool reverse;
1098 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1099
1100 if (!base)
1101 return false;
1102
1103 if (DECL_P (base))
1104 {
1105 int index = ipa_get_param_decl_index_1 (descriptors, base);
1106 if (index >= 0
1107 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1108 {
1109 *index_p = index;
1110 *by_ref_p = false;
1111 if (size_p)
1112 *size_p = size;
1113 if (guaranteed_unmodified)
1114 *guaranteed_unmodified = true;
1115 return true;
1116 }
1117 return false;
1118 }
1119
1120 if (TREE_CODE (base) != MEM_REF
1121 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1122 || !integer_zerop (TREE_OPERAND (base, 1)))
1123 return false;
1124
1125 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1126 {
1127 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1128 index = ipa_get_param_decl_index_1 (descriptors, parm);
1129 }
1130 else
1131 {
1132 /* This branch catches situations where a pointer parameter is not a
1133 gimple register, for example:
1134
1135 void hip7(S*) (struct S * p)
1136 {
1137 void (*<T2e4>) (struct S *) D.1867;
1138 struct S * p.1;
1139
1140 <bb 2>:
1141 p.1_1 = p;
1142 D.1867_2 = p.1_1->f;
1143 D.1867_2 ();
1144 gdp = &p;
1145 */
1146
1147 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1148 index = load_from_unmodified_param (fbi, descriptors, def);
1149 }
1150
1151 if (index >= 0)
1152 {
1153 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1154 if (!data_preserved && !guaranteed_unmodified)
1155 return false;
1156
1157 *index_p = index;
1158 *by_ref_p = true;
1159 if (size_p)
1160 *size_p = size;
1161 if (guaranteed_unmodified)
1162 *guaranteed_unmodified = data_preserved;
1163 return true;
1164 }
1165 return false;
1166 }
1167
1168 /* If STMT is an assignment that loads a value from a parameter declaration,
1169 or from an aggregate passed as the parameter either by value or reference,
1170 return the index of the parameter in ipa_node_params. Otherwise return -1.
1171
1172 FBI holds gathered information about the function. INFO describes
1173 parameters of the function, STMT is the assignment statement. If it is a
1174 memory load from an aggregate, *OFFSET_P is filled with offset within the
1175 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1176 reference. */
1177
1178 static int
1179 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1180 class ipa_node_params *info,
1181 gimple *stmt,
1182 HOST_WIDE_INT *offset_p,
1183 bool *by_ref_p)
1184 {
1185 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1186 poly_int64 size;
1187
1188 /* Load value from a parameter declaration. */
1189 if (index >= 0)
1190 {
1191 *offset_p = -1;
1192 return index;
1193 }
1194
1195 if (!gimple_assign_load_p (stmt))
1196 return -1;
1197
1198 tree rhs = gimple_assign_rhs1 (stmt);
1199
1200 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1201 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1202 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1203 return -1;
1204
1205 /* Skip memory reference containing bit-field. */
1206 if (TREE_CODE (rhs) == BIT_FIELD_REF
1207 || contains_bitfld_component_ref_p (rhs))
1208 return -1;
1209
1210 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1211 offset_p, &size, by_ref_p))
1212 return -1;
1213
1214 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1215 size));
1216 if (!*by_ref_p)
1217 {
1218 tree param_type = ipa_get_type (info, index);
1219
1220 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1221 return -1;
1222 }
1223 else if (TREE_THIS_VOLATILE (rhs))
1224 return -1;
1225
1226 return index;
1227 }
1228
1229 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1230 to find original pointer. Initialize RET to the pointer which results from
1231 the walk.
1232 If offset is known return true and initialize OFFSET_RET. */
1233
1234 bool
1235 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1236 {
1237 poly_int64 offset = 0;
1238 bool offset_known = true;
1239 int i;
1240
1241 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1242 {
1243 if (TREE_CODE (op) == ADDR_EXPR)
1244 {
1245 poly_int64 extra_offset = 0;
1246 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1247 &offset);
1248 if (!base)
1249 {
1250 base = get_base_address (TREE_OPERAND (op, 0));
1251 if (TREE_CODE (base) != MEM_REF)
1252 break;
1253 offset_known = false;
1254 }
1255 else
1256 {
1257 if (TREE_CODE (base) != MEM_REF)
1258 break;
1259 offset += extra_offset;
1260 }
1261 op = TREE_OPERAND (base, 0);
1262 if (mem_ref_offset (base).to_shwi (&extra_offset))
1263 offset += extra_offset;
1264 else
1265 offset_known = false;
1266 }
1267 else if (TREE_CODE (op) == SSA_NAME
1268 && !SSA_NAME_IS_DEFAULT_DEF (op))
1269 {
1270 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1271
1272 if (gimple_assign_single_p (pstmt))
1273 op = gimple_assign_rhs1 (pstmt);
1274 else if (is_gimple_assign (pstmt)
1275 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1276 {
1277 poly_int64 extra_offset = 0;
1278 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1279 &extra_offset))
1280 offset += extra_offset;
1281 else
1282 offset_known = false;
1283 op = gimple_assign_rhs1 (pstmt);
1284 }
1285 else
1286 break;
1287 }
1288 else
1289 break;
1290 }
1291 *ret = op;
1292 *offset_ret = offset;
1293 return offset_known;
1294 }
1295
1296 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1297 of an assignment statement STMT, try to determine whether we are actually
1298 handling any of the following cases and construct an appropriate jump
1299 function into JFUNC if so:
1300
1301 1) The passed value is loaded from a formal parameter which is not a gimple
1302 register (most probably because it is addressable, the value has to be
1303 scalar) and we can guarantee the value has not changed. This case can
1304 therefore be described by a simple pass-through jump function. For example:
1305
1306 foo (int a)
1307 {
1308 int a.0;
1309
1310 a.0_2 = a;
1311 bar (a.0_2);
1312
1313 2) The passed value can be described by a simple arithmetic pass-through
1314 jump function. E.g.
1315
1316 foo (int a)
1317 {
1318 int D.2064;
1319
1320 D.2064_4 = a.1(D) + 4;
1321 bar (D.2064_4);
1322
1323 This case can also occur in combination of the previous one, e.g.:
1324
1325 foo (int a, int z)
1326 {
1327 int a.0;
1328 int D.2064;
1329
1330 a.0_3 = a;
1331 D.2064_4 = a.0_3 + 4;
1332 foo (D.2064_4);
1333
1334 3) The passed value is an address of an object within another one (which
1335 also passed by reference). Such situations are described by an ancestor
1336 jump function and describe situations such as:
1337
1338 B::foo() (struct B * const this)
1339 {
1340 struct A * D.1845;
1341
1342 D.1845_2 = &this_1(D)->D.1748;
1343 A::bar (D.1845_2);
1344
1345 INFO is the structure describing individual parameters access different
1346 stages of IPA optimizations. PARMS_AINFO contains the information that is
1347 only needed for intraprocedural analysis. */
1348
1349 static void
1350 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1351 class ipa_node_params *info,
1352 struct ipa_jump_func *jfunc,
1353 gcall *call, gimple *stmt, tree name,
1354 tree param_type)
1355 {
1356 HOST_WIDE_INT offset, size;
1357 tree op1, tc_ssa, base, ssa;
1358 bool reverse;
1359 int index;
1360
1361 op1 = gimple_assign_rhs1 (stmt);
1362
1363 if (TREE_CODE (op1) == SSA_NAME)
1364 {
1365 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1366 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1367 else
1368 index = load_from_unmodified_param (fbi, info->descriptors,
1369 SSA_NAME_DEF_STMT (op1));
1370 tc_ssa = op1;
1371 }
1372 else
1373 {
1374 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1375 tc_ssa = gimple_assign_lhs (stmt);
1376 }
1377
1378 if (index >= 0)
1379 {
1380 switch (gimple_assign_rhs_class (stmt))
1381 {
1382 case GIMPLE_BINARY_RHS:
1383 {
1384 tree op2 = gimple_assign_rhs2 (stmt);
1385 if (!is_gimple_ip_invariant (op2)
1386 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1387 != tcc_comparison)
1388 && !useless_type_conversion_p (TREE_TYPE (name),
1389 TREE_TYPE (op1))))
1390 return;
1391
1392 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1393 gimple_assign_rhs_code (stmt));
1394 break;
1395 }
1396 case GIMPLE_SINGLE_RHS:
1397 {
1398 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1399 tc_ssa);
1400 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1401 break;
1402 }
1403 case GIMPLE_UNARY_RHS:
1404 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1405 ipa_set_jf_unary_pass_through (jfunc, index,
1406 gimple_assign_rhs_code (stmt));
1407 default:;
1408 }
1409 return;
1410 }
1411
1412 if (TREE_CODE (op1) != ADDR_EXPR)
1413 return;
1414 op1 = TREE_OPERAND (op1, 0);
1415 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1416 return;
1417 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1418 offset_int mem_offset;
1419 if (!base
1420 || TREE_CODE (base) != MEM_REF
1421 || !mem_ref_offset (base).is_constant (&mem_offset))
1422 return;
1423 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1424 ssa = TREE_OPERAND (base, 0);
1425 if (TREE_CODE (ssa) != SSA_NAME
1426 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1427 || offset < 0)
1428 return;
1429
1430 /* Dynamic types are changed in constructors and destructors. */
1431 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1432 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1433 ipa_set_ancestor_jf (jfunc, offset, index,
1434 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1435 }
1436
1437 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1438 it looks like:
1439
1440 iftmp.1_3 = &obj_2(D)->D.1762;
1441
1442 The base of the MEM_REF must be a default definition SSA NAME of a
1443 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1444 whole MEM_REF expression is returned and the offset calculated from any
1445 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1446 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1447
1448 static tree
1449 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1450 {
1451 HOST_WIDE_INT size;
1452 tree expr, parm, obj;
1453 bool reverse;
1454
1455 if (!gimple_assign_single_p (assign))
1456 return NULL_TREE;
1457 expr = gimple_assign_rhs1 (assign);
1458
1459 if (TREE_CODE (expr) != ADDR_EXPR)
1460 return NULL_TREE;
1461 expr = TREE_OPERAND (expr, 0);
1462 obj = expr;
1463 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1464
1465 offset_int mem_offset;
1466 if (!expr
1467 || TREE_CODE (expr) != MEM_REF
1468 || !mem_ref_offset (expr).is_constant (&mem_offset))
1469 return NULL_TREE;
1470 parm = TREE_OPERAND (expr, 0);
1471 if (TREE_CODE (parm) != SSA_NAME
1472 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1473 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1474 return NULL_TREE;
1475
1476 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1477 *obj_p = obj;
1478 return expr;
1479 }
1480
1481
1482 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1483 statement PHI, try to find out whether NAME is in fact a
1484 multiple-inheritance typecast from a descendant into an ancestor of a formal
1485 parameter and thus can be described by an ancestor jump function and if so,
1486 write the appropriate function into JFUNC.
1487
1488 Essentially we want to match the following pattern:
1489
1490 if (obj_2(D) != 0B)
1491 goto <bb 3>;
1492 else
1493 goto <bb 4>;
1494
1495 <bb 3>:
1496 iftmp.1_3 = &obj_2(D)->D.1762;
1497
1498 <bb 4>:
1499 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1500 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1501 return D.1879_6; */
1502
1503 static void
1504 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1505 class ipa_node_params *info,
1506 struct ipa_jump_func *jfunc,
1507 gcall *call, gphi *phi)
1508 {
1509 HOST_WIDE_INT offset;
1510 gimple *assign, *cond;
1511 basic_block phi_bb, assign_bb, cond_bb;
1512 tree tmp, parm, expr, obj;
1513 int index, i;
1514
1515 if (gimple_phi_num_args (phi) != 2)
1516 return;
1517
1518 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1519 tmp = PHI_ARG_DEF (phi, 0);
1520 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1521 tmp = PHI_ARG_DEF (phi, 1);
1522 else
1523 return;
1524 if (TREE_CODE (tmp) != SSA_NAME
1525 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1526 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1527 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1528 return;
1529
1530 assign = SSA_NAME_DEF_STMT (tmp);
1531 assign_bb = gimple_bb (assign);
1532 if (!single_pred_p (assign_bb))
1533 return;
1534 expr = get_ancestor_addr_info (assign, &obj, &offset);
1535 if (!expr)
1536 return;
1537 parm = TREE_OPERAND (expr, 0);
1538 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1539 if (index < 0)
1540 return;
1541
1542 cond_bb = single_pred (assign_bb);
1543 cond = last_stmt (cond_bb);
1544 if (!cond
1545 || gimple_code (cond) != GIMPLE_COND
1546 || gimple_cond_code (cond) != NE_EXPR
1547 || gimple_cond_lhs (cond) != parm
1548 || !integer_zerop (gimple_cond_rhs (cond)))
1549 return;
1550
1551 phi_bb = gimple_bb (phi);
1552 for (i = 0; i < 2; i++)
1553 {
1554 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1555 if (pred != assign_bb && pred != cond_bb)
1556 return;
1557 }
1558
1559 ipa_set_ancestor_jf (jfunc, offset, index,
1560 parm_ref_data_pass_through_p (fbi, index, call, parm));
1561 }
1562
1563 /* Inspect the given TYPE and return true iff it has the same structure (the
1564 same number of fields of the same types) as a C++ member pointer. If
1565 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1566 corresponding fields there. */
1567
1568 static bool
1569 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1570 {
1571 tree fld;
1572
1573 if (TREE_CODE (type) != RECORD_TYPE)
1574 return false;
1575
1576 fld = TYPE_FIELDS (type);
1577 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1578 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1579 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1580 return false;
1581
1582 if (method_ptr)
1583 *method_ptr = fld;
1584
1585 fld = DECL_CHAIN (fld);
1586 if (!fld || INTEGRAL_TYPE_P (fld)
1587 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1588 return false;
1589 if (delta)
1590 *delta = fld;
1591
1592 if (DECL_CHAIN (fld))
1593 return false;
1594
1595 return true;
1596 }
1597
1598 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1599 return the rhs of its defining statement, and this statement is stored in
1600 *RHS_STMT. Otherwise return RHS as it is. */
1601
1602 static inline tree
1603 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1604 {
1605 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1606 {
1607 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1608
1609 if (gimple_assign_single_p (def_stmt))
1610 rhs = gimple_assign_rhs1 (def_stmt);
1611 else
1612 break;
1613 *rhs_stmt = def_stmt;
1614 }
1615 return rhs;
1616 }
1617
1618 /* Simple linked list, describing contents of an aggregate before call. */
1619
1620 struct ipa_known_agg_contents_list
1621 {
1622 /* Offset and size of the described part of the aggregate. */
1623 HOST_WIDE_INT offset, size;
1624
1625 /* Type of the described part of the aggregate. */
1626 tree type;
1627
1628 /* Known constant value or jump function data describing contents. */
1629 struct ipa_load_agg_data value;
1630
1631 /* Pointer to the next structure in the list. */
1632 struct ipa_known_agg_contents_list *next;
1633 };
1634
1635 /* Add an aggregate content item into a linked list of
1636 ipa_known_agg_contents_list structure, in which all elements
1637 are sorted ascendingly by offset. */
1638
1639 static inline void
1640 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1641 struct ipa_known_agg_contents_list *item)
1642 {
1643 struct ipa_known_agg_contents_list *list = *plist;
1644
1645 for (; list; list = list->next)
1646 {
1647 if (list->offset >= item->offset)
1648 break;
1649
1650 plist = &list->next;
1651 }
1652
1653 item->next = list;
1654 *plist = item;
1655 }
1656
1657 /* Check whether a given aggregate content is clobbered by certain element in
1658 a linked list of ipa_known_agg_contents_list. */
1659
1660 static inline bool
1661 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1662 struct ipa_known_agg_contents_list *item)
1663 {
1664 for (; list; list = list->next)
1665 {
1666 if (list->offset >= item->offset)
1667 return list->offset < item->offset + item->size;
1668
1669 if (list->offset + list->size > item->offset)
1670 return true;
1671 }
1672
1673 return false;
1674 }
1675
1676 /* Build aggregate jump function from LIST, assuming there are exactly
1677 VALUE_COUNT entries there and that offset of the passed argument
1678 is ARG_OFFSET and store it into JFUNC. */
1679
1680 static void
1681 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1682 int value_count, HOST_WIDE_INT arg_offset,
1683 struct ipa_jump_func *jfunc)
1684 {
1685 vec_alloc (jfunc->agg.items, value_count);
1686 for (; list; list = list->next)
1687 {
1688 struct ipa_agg_jf_item item;
1689 tree operand = list->value.pass_through.operand;
1690
1691 if (list->value.pass_through.formal_id >= 0)
1692 {
1693 /* Content value is derived from some formal paramerter. */
1694 if (list->value.offset >= 0)
1695 item.jftype = IPA_JF_LOAD_AGG;
1696 else
1697 item.jftype = IPA_JF_PASS_THROUGH;
1698
1699 item.value.load_agg = list->value;
1700 if (operand)
1701 item.value.pass_through.operand
1702 = unshare_expr_without_location (operand);
1703 }
1704 else if (operand)
1705 {
1706 /* Content value is known constant. */
1707 item.jftype = IPA_JF_CONST;
1708 item.value.constant = unshare_expr_without_location (operand);
1709 }
1710 else
1711 continue;
1712
1713 item.type = list->type;
1714 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1715
1716 item.offset = list->offset - arg_offset;
1717 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1718
1719 jfunc->agg.items->quick_push (item);
1720 }
1721 }
1722
1723 /* Given an assignment statement STMT, try to collect information into
1724 AGG_VALUE that will be used to construct jump function for RHS of the
1725 assignment, from which content value of an aggregate part comes.
1726
1727 Besides constant and simple pass-through jump functions, also try to
1728 identify whether it matches the following pattern that can be described by
1729 a load-value-from-aggregate jump function, which is a derivative of simple
1730 pass-through jump function.
1731
1732 foo (int *p)
1733 {
1734 ...
1735
1736 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1737 bar (q_5);
1738 }
1739
1740 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1741 constant, simple pass-through and load-vale-from-aggregate. If value
1742 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1743 set to -1. For simple pass-through and load-value-from-aggregate, field
1744 FORMAL_ID specifies the related formal parameter index, and field
1745 OFFSET can be used to distinguish them, -1 means simple pass-through,
1746 otherwise means load-value-from-aggregate. */
1747
1748 static void
1749 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1750 struct ipa_load_agg_data *agg_value,
1751 gimple *stmt)
1752 {
1753 tree lhs = gimple_assign_lhs (stmt);
1754 tree rhs1 = gimple_assign_rhs1 (stmt);
1755 enum tree_code code;
1756 int index = -1;
1757
1758 /* Initialize jump function data for the aggregate part. */
1759 memset (agg_value, 0, sizeof (*agg_value));
1760 agg_value->pass_through.operation = NOP_EXPR;
1761 agg_value->pass_through.formal_id = -1;
1762 agg_value->offset = -1;
1763
1764 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1765 || TREE_THIS_VOLATILE (lhs)
1766 || TREE_CODE (lhs) == BIT_FIELD_REF
1767 || contains_bitfld_component_ref_p (lhs))
1768 return;
1769
1770 /* Skip SSA copies. */
1771 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1772 {
1773 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1774 break;
1775
1776 stmt = SSA_NAME_DEF_STMT (rhs1);
1777 if (!is_gimple_assign (stmt))
1778 return;
1779
1780 rhs1 = gimple_assign_rhs1 (stmt);
1781 }
1782
1783 code = gimple_assign_rhs_code (stmt);
1784 switch (gimple_assign_rhs_class (stmt))
1785 {
1786 case GIMPLE_SINGLE_RHS:
1787 if (is_gimple_ip_invariant (rhs1))
1788 {
1789 agg_value->pass_through.operand = rhs1;
1790 return;
1791 }
1792 code = NOP_EXPR;
1793 break;
1794
1795 case GIMPLE_UNARY_RHS:
1796 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1797 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1798 tcc_binary, this subtleness is somewhat misleading.
1799
1800 Since tcc_unary is widely used in IPA-CP code to check an operation
1801 with one operand, here we only allow tc_unary operation to avoid
1802 possible problem. Then we can use (opclass == tc_unary) or not to
1803 distinguish unary and binary. */
1804 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1805 return;
1806
1807 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1808 break;
1809
1810 case GIMPLE_BINARY_RHS:
1811 {
1812 gimple *rhs1_stmt = stmt;
1813 gimple *rhs2_stmt = stmt;
1814 tree rhs2 = gimple_assign_rhs2 (stmt);
1815
1816 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1817 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1818
1819 if (is_gimple_ip_invariant (rhs2))
1820 {
1821 agg_value->pass_through.operand = rhs2;
1822 stmt = rhs1_stmt;
1823 }
1824 else if (is_gimple_ip_invariant (rhs1))
1825 {
1826 if (TREE_CODE_CLASS (code) == tcc_comparison)
1827 code = swap_tree_comparison (code);
1828 else if (!commutative_tree_code (code))
1829 return;
1830
1831 agg_value->pass_through.operand = rhs1;
1832 stmt = rhs2_stmt;
1833 rhs1 = rhs2;
1834 }
1835 else
1836 return;
1837
1838 if (TREE_CODE_CLASS (code) != tcc_comparison
1839 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1840 return;
1841 }
1842 break;
1843
1844 default:
1845 return;
1846 }
1847
1848 if (TREE_CODE (rhs1) != SSA_NAME)
1849 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1850 &agg_value->offset,
1851 &agg_value->by_ref);
1852 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1853 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1854
1855 if (index >= 0)
1856 {
1857 if (agg_value->offset >= 0)
1858 agg_value->type = TREE_TYPE (rhs1);
1859 agg_value->pass_through.formal_id = index;
1860 agg_value->pass_through.operation = code;
1861 }
1862 else
1863 agg_value->pass_through.operand = NULL_TREE;
1864 }
1865
1866 /* If STMT is a memory store to the object whose address is BASE, extract
1867 information (offset, size, and value) into CONTENT, and return true,
1868 otherwise we conservatively assume the whole object is modified with
1869 unknown content, and return false. CHECK_REF means that access to object
1870 is expected to be in form of MEM_REF expression. */
1871
1872 static bool
1873 extract_mem_content (struct ipa_func_body_info *fbi,
1874 gimple *stmt, tree base, bool check_ref,
1875 struct ipa_known_agg_contents_list *content)
1876 {
1877 HOST_WIDE_INT lhs_offset, lhs_size;
1878 bool reverse;
1879
1880 if (!is_gimple_assign (stmt))
1881 return false;
1882
1883 tree lhs = gimple_assign_lhs (stmt);
1884 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1885 &reverse);
1886 if (!lhs_base)
1887 return false;
1888
1889 if (check_ref)
1890 {
1891 if (TREE_CODE (lhs_base) != MEM_REF
1892 || TREE_OPERAND (lhs_base, 0) != base
1893 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1894 return false;
1895 }
1896 else if (lhs_base != base)
1897 return false;
1898
1899 content->offset = lhs_offset;
1900 content->size = lhs_size;
1901 content->type = TREE_TYPE (lhs);
1902 content->next = NULL;
1903
1904 analyze_agg_content_value (fbi, &content->value, stmt);
1905 return true;
1906 }
1907
1908 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1909 in ARG is filled in constants or values that are derived from caller's
1910 formal parameter in the way described by some kinds of jump functions. FBI
1911 is the context of the caller function for interprocedural analysis. ARG can
1912 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1913 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1914
1915 static void
1916 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1917 gcall *call, tree arg,
1918 tree arg_type,
1919 struct ipa_jump_func *jfunc)
1920 {
1921 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1922 bitmap visited = NULL;
1923 int item_count = 0, value_count = 0;
1924 HOST_WIDE_INT arg_offset, arg_size;
1925 tree arg_base;
1926 bool check_ref, by_ref;
1927 ao_ref r;
1928 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1929
1930 if (max_agg_items == 0)
1931 return;
1932
1933 /* The function operates in three stages. First, we prepare check_ref, r,
1934 arg_base and arg_offset based on what is actually passed as an actual
1935 argument. */
1936
1937 if (POINTER_TYPE_P (arg_type))
1938 {
1939 by_ref = true;
1940 if (TREE_CODE (arg) == SSA_NAME)
1941 {
1942 tree type_size;
1943 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1944 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1945 return;
1946 check_ref = true;
1947 arg_base = arg;
1948 arg_offset = 0;
1949 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1950 arg_size = tree_to_uhwi (type_size);
1951 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1952 }
1953 else if (TREE_CODE (arg) == ADDR_EXPR)
1954 {
1955 bool reverse;
1956
1957 arg = TREE_OPERAND (arg, 0);
1958 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1959 &arg_size, &reverse);
1960 if (!arg_base)
1961 return;
1962 if (DECL_P (arg_base))
1963 {
1964 check_ref = false;
1965 ao_ref_init (&r, arg_base);
1966 }
1967 else
1968 return;
1969 }
1970 else
1971 return;
1972 }
1973 else
1974 {
1975 bool reverse;
1976
1977 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1978
1979 by_ref = false;
1980 check_ref = false;
1981 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1982 &arg_size, &reverse);
1983 if (!arg_base)
1984 return;
1985
1986 ao_ref_init (&r, arg);
1987 }
1988
1989 /* Second stage traverses virtual SSA web backwards starting from the call
1990 statement, only looks at individual dominating virtual operand (its
1991 definition dominates the call), as long as it is confident that content
1992 of the aggregate is affected by definition of the virtual operand, it
1993 builds a sorted linked list of ipa_agg_jf_list describing that. */
1994
1995 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1996 {
1997 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1998
1999 if (gimple_code (stmt) == GIMPLE_PHI)
2000 {
2001 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2002 fbi->aa_walk_budget,
2003 &visited, false, NULL, NULL);
2004 continue;
2005 }
2006
2007 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2008 {
2009 struct ipa_known_agg_contents_list *content
2010 = XALLOCA (struct ipa_known_agg_contents_list);
2011
2012 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2013 break;
2014
2015 /* Now we get a dominating virtual operand, and need to check
2016 whether its value is clobbered any other dominating one. */
2017 if ((content->value.pass_through.formal_id >= 0
2018 || content->value.pass_through.operand)
2019 && !clobber_by_agg_contents_list_p (all_list, content))
2020 {
2021 struct ipa_known_agg_contents_list *copy
2022 = XALLOCA (struct ipa_known_agg_contents_list);
2023
2024 /* Add to the list consisting of only dominating virtual
2025 operands, whose definitions can finally reach the call. */
2026 add_to_agg_contents_list (&list, (*copy = *content, copy));
2027
2028 if (++value_count == max_agg_items)
2029 break;
2030 }
2031
2032 /* Add to the list consisting of all dominating virtual operands. */
2033 add_to_agg_contents_list (&all_list, content);
2034
2035 if (++item_count == 2 * max_agg_items)
2036 break;
2037 }
2038 dom_vuse = gimple_vuse (stmt);
2039 }
2040
2041 if (visited)
2042 BITMAP_FREE (visited);
2043
2044 /* Third stage just goes over the list and creates an appropriate vector of
2045 ipa_agg_jf_item structures out of it, of course only if there are
2046 any meaningful items to begin with. */
2047
2048 if (value_count)
2049 {
2050 jfunc->agg.by_ref = by_ref;
2051 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2052 }
2053 }
2054
2055
2056 /* Return the Ith param type of callee associated with call graph
2057 edge E. */
2058
2059 tree
2060 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2061 {
2062 int n;
2063 tree type = (e->callee
2064 ? TREE_TYPE (e->callee->decl)
2065 : gimple_call_fntype (e->call_stmt));
2066 tree t = TYPE_ARG_TYPES (type);
2067
2068 for (n = 0; n < i; n++)
2069 {
2070 if (!t)
2071 break;
2072 t = TREE_CHAIN (t);
2073 }
2074 if (t)
2075 return TREE_VALUE (t);
2076 if (!e->callee)
2077 return NULL;
2078 t = DECL_ARGUMENTS (e->callee->decl);
2079 for (n = 0; n < i; n++)
2080 {
2081 if (!t)
2082 return NULL;
2083 t = TREE_CHAIN (t);
2084 }
2085 if (t)
2086 return TREE_TYPE (t);
2087 return NULL;
2088 }
2089
2090 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2091 allocated structure or a previously existing one shared with other jump
2092 functions and/or transformation summaries. */
2093
2094 ipa_bits *
2095 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2096 {
2097 ipa_bits tmp;
2098 tmp.value = value;
2099 tmp.mask = mask;
2100
2101 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2102 if (*slot)
2103 return *slot;
2104
2105 ipa_bits *res = ggc_alloc<ipa_bits> ();
2106 res->value = value;
2107 res->mask = mask;
2108 *slot = res;
2109
2110 return res;
2111 }
2112
2113 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2114 table in order to avoid creating multiple same ipa_bits structures. */
2115
2116 static void
2117 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2118 const widest_int &mask)
2119 {
2120 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2121 }
2122
2123 /* Return a pointer to a value_range just like *TMP, but either find it in
2124 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2125
2126 static value_range *
2127 ipa_get_value_range (value_range *tmp)
2128 {
2129 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2130 if (*slot)
2131 return *slot;
2132
2133 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2134 *vr = *tmp;
2135 *slot = vr;
2136
2137 return vr;
2138 }
2139
2140 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2141 equiv set. Use hash table in order to avoid creating multiple same copies of
2142 value_ranges. */
2143
2144 static value_range *
2145 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2146 {
2147 value_range tmp (min, max, kind);
2148 return ipa_get_value_range (&tmp);
2149 }
2150
2151 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2152 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2153 same value_range structures. */
2154
2155 static void
2156 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2157 tree min, tree max)
2158 {
2159 jf->m_vr = ipa_get_value_range (type, min, max);
2160 }
2161
2162 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2163 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2164
2165 static void
2166 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2167 {
2168 jf->m_vr = ipa_get_value_range (tmp);
2169 }
2170
2171 /* Compute jump function for all arguments of callsite CS and insert the
2172 information in the jump_functions array in the ipa_edge_args corresponding
2173 to this callsite. */
2174
2175 static void
2176 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2177 struct cgraph_edge *cs)
2178 {
2179 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2180 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2181 gcall *call = cs->call_stmt;
2182 int n, arg_num = gimple_call_num_args (call);
2183 bool useful_context = false;
2184
2185 if (arg_num == 0 || args->jump_functions)
2186 return;
2187 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2188 if (flag_devirtualize)
2189 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2190
2191 if (gimple_call_internal_p (call))
2192 return;
2193 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2194 return;
2195
2196 for (n = 0; n < arg_num; n++)
2197 {
2198 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2199 tree arg = gimple_call_arg (call, n);
2200 tree param_type = ipa_get_callee_param_type (cs, n);
2201 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2202 {
2203 tree instance;
2204 class ipa_polymorphic_call_context context (cs->caller->decl,
2205 arg, cs->call_stmt,
2206 &instance);
2207 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2208 &fbi->aa_walk_budget);
2209 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2210 if (!context.useless_p ())
2211 useful_context = true;
2212 }
2213
2214 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2215 {
2216 bool addr_nonzero = false;
2217 bool strict_overflow = false;
2218
2219 if (TREE_CODE (arg) == SSA_NAME
2220 && param_type
2221 && get_ptr_nonnull (arg))
2222 addr_nonzero = true;
2223 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2224 addr_nonzero = true;
2225
2226 if (addr_nonzero)
2227 {
2228 tree z = build_int_cst (TREE_TYPE (arg), 0);
2229 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2230 }
2231 else
2232 gcc_assert (!jfunc->m_vr);
2233 }
2234 else
2235 {
2236 wide_int min, max;
2237 value_range_kind kind;
2238 if (TREE_CODE (arg) == SSA_NAME
2239 && param_type
2240 && (kind = get_range_info (arg, &min, &max))
2241 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2242 {
2243 value_range resvr;
2244 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2245 wide_int_to_tree (TREE_TYPE (arg), max),
2246 kind);
2247 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2248 &tmpvr, TREE_TYPE (arg));
2249 if (!resvr.undefined_p () && !resvr.varying_p ())
2250 ipa_set_jfunc_vr (jfunc, &resvr);
2251 else
2252 gcc_assert (!jfunc->m_vr);
2253 }
2254 else
2255 gcc_assert (!jfunc->m_vr);
2256 }
2257
2258 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2259 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2260 {
2261 if (TREE_CODE (arg) == SSA_NAME)
2262 ipa_set_jfunc_bits (jfunc, 0,
2263 widest_int::from (get_nonzero_bits (arg),
2264 TYPE_SIGN (TREE_TYPE (arg))));
2265 else
2266 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2267 }
2268 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2269 {
2270 unsigned HOST_WIDE_INT bitpos;
2271 unsigned align;
2272
2273 get_pointer_alignment_1 (arg, &align, &bitpos);
2274 widest_int mask = wi::bit_and_not
2275 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2276 align / BITS_PER_UNIT - 1);
2277 widest_int value = bitpos / BITS_PER_UNIT;
2278 ipa_set_jfunc_bits (jfunc, value, mask);
2279 }
2280 else
2281 gcc_assert (!jfunc->bits);
2282
2283 if (is_gimple_ip_invariant (arg)
2284 || (VAR_P (arg)
2285 && is_global_var (arg)
2286 && TREE_READONLY (arg)))
2287 ipa_set_jf_constant (jfunc, arg, cs);
2288 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2289 && TREE_CODE (arg) == PARM_DECL)
2290 {
2291 int index = ipa_get_param_decl_index (info, arg);
2292
2293 gcc_assert (index >=0);
2294 /* Aggregate passed by value, check for pass-through, otherwise we
2295 will attempt to fill in aggregate contents later in this
2296 for cycle. */
2297 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2298 {
2299 ipa_set_jf_simple_pass_through (jfunc, index, false);
2300 continue;
2301 }
2302 }
2303 else if (TREE_CODE (arg) == SSA_NAME)
2304 {
2305 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2306 {
2307 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2308 if (index >= 0)
2309 {
2310 bool agg_p;
2311 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2312 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2313 }
2314 }
2315 else
2316 {
2317 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2318 if (is_gimple_assign (stmt))
2319 compute_complex_assign_jump_func (fbi, info, jfunc,
2320 call, stmt, arg, param_type);
2321 else if (gimple_code (stmt) == GIMPLE_PHI)
2322 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2323 call,
2324 as_a <gphi *> (stmt));
2325 }
2326 }
2327
2328 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2329 passed (because type conversions are ignored in gimple). Usually we can
2330 safely get type from function declaration, but in case of K&R prototypes or
2331 variadic functions we can try our luck with type of the pointer passed.
2332 TODO: Since we look for actual initialization of the memory object, we may better
2333 work out the type based on the memory stores we find. */
2334 if (!param_type)
2335 param_type = TREE_TYPE (arg);
2336
2337 if ((jfunc->type != IPA_JF_PASS_THROUGH
2338 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2339 && (jfunc->type != IPA_JF_ANCESTOR
2340 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2341 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2342 || POINTER_TYPE_P (param_type)))
2343 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2344 }
2345 if (!useful_context)
2346 vec_free (args->polymorphic_call_contexts);
2347 }
2348
2349 /* Compute jump functions for all edges - both direct and indirect - outgoing
2350 from BB. */
2351
2352 static void
2353 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2354 {
2355 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2356 int i;
2357 struct cgraph_edge *cs;
2358
2359 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2360 {
2361 struct cgraph_node *callee = cs->callee;
2362
2363 if (callee)
2364 {
2365 callee = callee->ultimate_alias_target ();
2366 /* We do not need to bother analyzing calls to unknown functions
2367 unless they may become known during lto/whopr. */
2368 if (!callee->definition && !flag_lto
2369 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2370 continue;
2371 }
2372 ipa_compute_jump_functions_for_edge (fbi, cs);
2373 }
2374 }
2375
2376 /* If STMT looks like a statement loading a value from a member pointer formal
2377 parameter, return that parameter and store the offset of the field to
2378 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2379 might be clobbered). If USE_DELTA, then we look for a use of the delta
2380 field rather than the pfn. */
2381
2382 static tree
2383 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2384 HOST_WIDE_INT *offset_p)
2385 {
2386 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2387
2388 if (!gimple_assign_single_p (stmt))
2389 return NULL_TREE;
2390
2391 rhs = gimple_assign_rhs1 (stmt);
2392 if (TREE_CODE (rhs) == COMPONENT_REF)
2393 {
2394 ref_field = TREE_OPERAND (rhs, 1);
2395 rhs = TREE_OPERAND (rhs, 0);
2396 }
2397 else
2398 ref_field = NULL_TREE;
2399 if (TREE_CODE (rhs) != MEM_REF)
2400 return NULL_TREE;
2401 rec = TREE_OPERAND (rhs, 0);
2402 if (TREE_CODE (rec) != ADDR_EXPR)
2403 return NULL_TREE;
2404 rec = TREE_OPERAND (rec, 0);
2405 if (TREE_CODE (rec) != PARM_DECL
2406 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2407 return NULL_TREE;
2408 ref_offset = TREE_OPERAND (rhs, 1);
2409
2410 if (use_delta)
2411 fld = delta_field;
2412 else
2413 fld = ptr_field;
2414 if (offset_p)
2415 *offset_p = int_bit_position (fld);
2416
2417 if (ref_field)
2418 {
2419 if (integer_nonzerop (ref_offset))
2420 return NULL_TREE;
2421 return ref_field == fld ? rec : NULL_TREE;
2422 }
2423 else
2424 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2425 : NULL_TREE;
2426 }
2427
2428 /* Returns true iff T is an SSA_NAME defined by a statement. */
2429
2430 static bool
2431 ipa_is_ssa_with_stmt_def (tree t)
2432 {
2433 if (TREE_CODE (t) == SSA_NAME
2434 && !SSA_NAME_IS_DEFAULT_DEF (t))
2435 return true;
2436 else
2437 return false;
2438 }
2439
2440 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2441 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2442 indirect call graph edge.
2443 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2444
2445 static struct cgraph_edge *
2446 ipa_note_param_call (struct cgraph_node *node, int param_index,
2447 gcall *stmt, bool polymorphic)
2448 {
2449 struct cgraph_edge *cs;
2450
2451 cs = node->get_edge (stmt);
2452 cs->indirect_info->param_index = param_index;
2453 cs->indirect_info->agg_contents = 0;
2454 cs->indirect_info->member_ptr = 0;
2455 cs->indirect_info->guaranteed_unmodified = 0;
2456 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2457 param_index, true);
2458 if (cs->indirect_info->polymorphic || polymorphic)
2459 ipa_set_param_used_by_polymorphic_call
2460 (IPA_NODE_REF (node), param_index, true);
2461 return cs;
2462 }
2463
2464 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2465 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2466 intermediate information about each formal parameter. Currently it checks
2467 whether the call calls a pointer that is a formal parameter and if so, the
2468 parameter is marked with the called flag and an indirect call graph edge
2469 describing the call is created. This is very simple for ordinary pointers
2470 represented in SSA but not-so-nice when it comes to member pointers. The
2471 ugly part of this function does nothing more than trying to match the
2472 pattern of such a call. An example of such a pattern is the gimple dump
2473 below, the call is on the last line:
2474
2475 <bb 2>:
2476 f$__delta_5 = f.__delta;
2477 f$__pfn_24 = f.__pfn;
2478
2479 or
2480 <bb 2>:
2481 f$__delta_5 = MEM[(struct *)&f];
2482 f$__pfn_24 = MEM[(struct *)&f + 4B];
2483
2484 and a few lines below:
2485
2486 <bb 5>
2487 D.2496_3 = (int) f$__pfn_24;
2488 D.2497_4 = D.2496_3 & 1;
2489 if (D.2497_4 != 0)
2490 goto <bb 3>;
2491 else
2492 goto <bb 4>;
2493
2494 <bb 6>:
2495 D.2500_7 = (unsigned int) f$__delta_5;
2496 D.2501_8 = &S + D.2500_7;
2497 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2498 D.2503_10 = *D.2502_9;
2499 D.2504_12 = f$__pfn_24 + -1;
2500 D.2505_13 = (unsigned int) D.2504_12;
2501 D.2506_14 = D.2503_10 + D.2505_13;
2502 D.2507_15 = *D.2506_14;
2503 iftmp.11_16 = (String:: *) D.2507_15;
2504
2505 <bb 7>:
2506 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2507 D.2500_19 = (unsigned int) f$__delta_5;
2508 D.2508_20 = &S + D.2500_19;
2509 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2510
2511 Such patterns are results of simple calls to a member pointer:
2512
2513 int doprinting (int (MyString::* f)(int) const)
2514 {
2515 MyString S ("somestring");
2516
2517 return (S.*f)(4);
2518 }
2519
2520 Moreover, the function also looks for called pointers loaded from aggregates
2521 passed by value or reference. */
2522
2523 static void
2524 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2525 tree target)
2526 {
2527 class ipa_node_params *info = fbi->info;
2528 HOST_WIDE_INT offset;
2529 bool by_ref;
2530
2531 if (SSA_NAME_IS_DEFAULT_DEF (target))
2532 {
2533 tree var = SSA_NAME_VAR (target);
2534 int index = ipa_get_param_decl_index (info, var);
2535 if (index >= 0)
2536 ipa_note_param_call (fbi->node, index, call, false);
2537 return;
2538 }
2539
2540 int index;
2541 gimple *def = SSA_NAME_DEF_STMT (target);
2542 bool guaranteed_unmodified;
2543 if (gimple_assign_single_p (def)
2544 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2545 gimple_assign_rhs1 (def), &index, &offset,
2546 NULL, &by_ref, &guaranteed_unmodified))
2547 {
2548 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2549 call, false);
2550 cs->indirect_info->offset = offset;
2551 cs->indirect_info->agg_contents = 1;
2552 cs->indirect_info->by_ref = by_ref;
2553 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2554 return;
2555 }
2556
2557 /* Now we need to try to match the complex pattern of calling a member
2558 pointer. */
2559 if (gimple_code (def) != GIMPLE_PHI
2560 || gimple_phi_num_args (def) != 2
2561 || !POINTER_TYPE_P (TREE_TYPE (target))
2562 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2563 return;
2564
2565 /* First, we need to check whether one of these is a load from a member
2566 pointer that is a parameter to this function. */
2567 tree n1 = PHI_ARG_DEF (def, 0);
2568 tree n2 = PHI_ARG_DEF (def, 1);
2569 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2570 return;
2571 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2572 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2573
2574 tree rec;
2575 basic_block bb, virt_bb;
2576 basic_block join = gimple_bb (def);
2577 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2578 {
2579 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2580 return;
2581
2582 bb = EDGE_PRED (join, 0)->src;
2583 virt_bb = gimple_bb (d2);
2584 }
2585 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2586 {
2587 bb = EDGE_PRED (join, 1)->src;
2588 virt_bb = gimple_bb (d1);
2589 }
2590 else
2591 return;
2592
2593 /* Second, we need to check that the basic blocks are laid out in the way
2594 corresponding to the pattern. */
2595
2596 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2597 || single_pred (virt_bb) != bb
2598 || single_succ (virt_bb) != join)
2599 return;
2600
2601 /* Third, let's see that the branching is done depending on the least
2602 significant bit of the pfn. */
2603
2604 gimple *branch = last_stmt (bb);
2605 if (!branch || gimple_code (branch) != GIMPLE_COND)
2606 return;
2607
2608 if ((gimple_cond_code (branch) != NE_EXPR
2609 && gimple_cond_code (branch) != EQ_EXPR)
2610 || !integer_zerop (gimple_cond_rhs (branch)))
2611 return;
2612
2613 tree cond = gimple_cond_lhs (branch);
2614 if (!ipa_is_ssa_with_stmt_def (cond))
2615 return;
2616
2617 def = SSA_NAME_DEF_STMT (cond);
2618 if (!is_gimple_assign (def)
2619 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2620 || !integer_onep (gimple_assign_rhs2 (def)))
2621 return;
2622
2623 cond = gimple_assign_rhs1 (def);
2624 if (!ipa_is_ssa_with_stmt_def (cond))
2625 return;
2626
2627 def = SSA_NAME_DEF_STMT (cond);
2628
2629 if (is_gimple_assign (def)
2630 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2631 {
2632 cond = gimple_assign_rhs1 (def);
2633 if (!ipa_is_ssa_with_stmt_def (cond))
2634 return;
2635 def = SSA_NAME_DEF_STMT (cond);
2636 }
2637
2638 tree rec2;
2639 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2640 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2641 == ptrmemfunc_vbit_in_delta),
2642 NULL);
2643 if (rec != rec2)
2644 return;
2645
2646 index = ipa_get_param_decl_index (info, rec);
2647 if (index >= 0
2648 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2649 {
2650 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2651 call, false);
2652 cs->indirect_info->offset = offset;
2653 cs->indirect_info->agg_contents = 1;
2654 cs->indirect_info->member_ptr = 1;
2655 cs->indirect_info->guaranteed_unmodified = 1;
2656 }
2657
2658 return;
2659 }
2660
2661 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2662 object referenced in the expression is a formal parameter of the caller
2663 FBI->node (described by FBI->info), create a call note for the
2664 statement. */
2665
2666 static void
2667 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2668 gcall *call, tree target)
2669 {
2670 tree obj = OBJ_TYPE_REF_OBJECT (target);
2671 int index;
2672 HOST_WIDE_INT anc_offset;
2673
2674 if (!flag_devirtualize)
2675 return;
2676
2677 if (TREE_CODE (obj) != SSA_NAME)
2678 return;
2679
2680 class ipa_node_params *info = fbi->info;
2681 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2682 {
2683 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2684 return;
2685
2686 anc_offset = 0;
2687 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2688 gcc_assert (index >= 0);
2689 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2690 call))
2691 return;
2692 }
2693 else
2694 {
2695 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2696 tree expr;
2697
2698 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2699 if (!expr)
2700 return;
2701 index = ipa_get_param_decl_index (info,
2702 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2703 gcc_assert (index >= 0);
2704 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2705 call, anc_offset))
2706 return;
2707 }
2708
2709 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2710 call, true);
2711 class cgraph_indirect_call_info *ii = cs->indirect_info;
2712 ii->offset = anc_offset;
2713 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2714 ii->otr_type = obj_type_ref_class (target);
2715 ii->polymorphic = 1;
2716 }
2717
2718 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2719 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2720 containing intermediate information about each formal parameter. */
2721
2722 static void
2723 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2724 {
2725 tree target = gimple_call_fn (call);
2726
2727 if (!target
2728 || (TREE_CODE (target) != SSA_NAME
2729 && !virtual_method_call_p (target)))
2730 return;
2731
2732 struct cgraph_edge *cs = fbi->node->get_edge (call);
2733 /* If we previously turned the call into a direct call, there is
2734 no need to analyze. */
2735 if (cs && !cs->indirect_unknown_callee)
2736 return;
2737
2738 if (cs->indirect_info->polymorphic && flag_devirtualize)
2739 {
2740 tree instance;
2741 tree target = gimple_call_fn (call);
2742 ipa_polymorphic_call_context context (current_function_decl,
2743 target, call, &instance);
2744
2745 gcc_checking_assert (cs->indirect_info->otr_type
2746 == obj_type_ref_class (target));
2747 gcc_checking_assert (cs->indirect_info->otr_token
2748 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2749
2750 cs->indirect_info->vptr_changed
2751 = !context.get_dynamic_type (instance,
2752 OBJ_TYPE_REF_OBJECT (target),
2753 obj_type_ref_class (target), call,
2754 &fbi->aa_walk_budget);
2755 cs->indirect_info->context = context;
2756 }
2757
2758 if (TREE_CODE (target) == SSA_NAME)
2759 ipa_analyze_indirect_call_uses (fbi, call, target);
2760 else if (virtual_method_call_p (target))
2761 ipa_analyze_virtual_call_uses (fbi, call, target);
2762 }
2763
2764
2765 /* Analyze the call statement STMT with respect to formal parameters (described
2766 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2767 formal parameters are called. */
2768
2769 static void
2770 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2771 {
2772 if (is_gimple_call (stmt))
2773 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2774 }
2775
2776 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2777 If OP is a parameter declaration, mark it as used in the info structure
2778 passed in DATA. */
2779
2780 static bool
2781 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2782 {
2783 class ipa_node_params *info = (class ipa_node_params *) data;
2784
2785 op = get_base_address (op);
2786 if (op
2787 && TREE_CODE (op) == PARM_DECL)
2788 {
2789 int index = ipa_get_param_decl_index (info, op);
2790 gcc_assert (index >= 0);
2791 ipa_set_param_used (info, index, true);
2792 }
2793
2794 return false;
2795 }
2796
2797 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2798 the findings in various structures of the associated ipa_node_params
2799 structure, such as parameter flags, notes etc. FBI holds various data about
2800 the function being analyzed. */
2801
2802 static void
2803 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2804 {
2805 gimple_stmt_iterator gsi;
2806 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2807 {
2808 gimple *stmt = gsi_stmt (gsi);
2809
2810 if (is_gimple_debug (stmt))
2811 continue;
2812
2813 ipa_analyze_stmt_uses (fbi, stmt);
2814 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2815 visit_ref_for_mod_analysis,
2816 visit_ref_for_mod_analysis,
2817 visit_ref_for_mod_analysis);
2818 }
2819 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2820 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2821 visit_ref_for_mod_analysis,
2822 visit_ref_for_mod_analysis,
2823 visit_ref_for_mod_analysis);
2824 }
2825
2826 /* Calculate controlled uses of parameters of NODE. */
2827
2828 static void
2829 ipa_analyze_controlled_uses (struct cgraph_node *node)
2830 {
2831 class ipa_node_params *info = IPA_NODE_REF (node);
2832
2833 for (int i = 0; i < ipa_get_param_count (info); i++)
2834 {
2835 tree parm = ipa_get_param (info, i);
2836 int controlled_uses = 0;
2837
2838 /* For SSA regs see if parameter is used. For non-SSA we compute
2839 the flag during modification analysis. */
2840 if (is_gimple_reg (parm))
2841 {
2842 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2843 parm);
2844 if (ddef && !has_zero_uses (ddef))
2845 {
2846 imm_use_iterator imm_iter;
2847 use_operand_p use_p;
2848
2849 ipa_set_param_used (info, i, true);
2850 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2851 if (!is_gimple_call (USE_STMT (use_p)))
2852 {
2853 if (!is_gimple_debug (USE_STMT (use_p)))
2854 {
2855 controlled_uses = IPA_UNDESCRIBED_USE;
2856 break;
2857 }
2858 }
2859 else
2860 controlled_uses++;
2861 }
2862 else
2863 controlled_uses = 0;
2864 }
2865 else
2866 controlled_uses = IPA_UNDESCRIBED_USE;
2867 ipa_set_controlled_uses (info, i, controlled_uses);
2868 }
2869 }
2870
2871 /* Free stuff in BI. */
2872
2873 static void
2874 free_ipa_bb_info (struct ipa_bb_info *bi)
2875 {
2876 bi->cg_edges.release ();
2877 bi->param_aa_statuses.release ();
2878 }
2879
2880 /* Dominator walker driving the analysis. */
2881
2882 class analysis_dom_walker : public dom_walker
2883 {
2884 public:
2885 analysis_dom_walker (struct ipa_func_body_info *fbi)
2886 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2887
2888 virtual edge before_dom_children (basic_block);
2889
2890 private:
2891 struct ipa_func_body_info *m_fbi;
2892 };
2893
2894 edge
2895 analysis_dom_walker::before_dom_children (basic_block bb)
2896 {
2897 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2898 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2899 return NULL;
2900 }
2901
2902 /* Release body info FBI. */
2903
2904 void
2905 ipa_release_body_info (struct ipa_func_body_info *fbi)
2906 {
2907 int i;
2908 struct ipa_bb_info *bi;
2909
2910 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2911 free_ipa_bb_info (bi);
2912 fbi->bb_infos.release ();
2913 }
2914
2915 /* Initialize the array describing properties of formal parameters
2916 of NODE, analyze their uses and compute jump functions associated
2917 with actual arguments of calls from within NODE. */
2918
2919 void
2920 ipa_analyze_node (struct cgraph_node *node)
2921 {
2922 struct ipa_func_body_info fbi;
2923 class ipa_node_params *info;
2924
2925 ipa_check_create_node_params ();
2926 ipa_check_create_edge_args ();
2927 info = IPA_NODE_REF_GET_CREATE (node);
2928
2929 if (info->analysis_done)
2930 return;
2931 info->analysis_done = 1;
2932
2933 if (ipa_func_spec_opts_forbid_analysis_p (node))
2934 {
2935 for (int i = 0; i < ipa_get_param_count (info); i++)
2936 {
2937 ipa_set_param_used (info, i, true);
2938 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2939 }
2940 return;
2941 }
2942
2943 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2944 push_cfun (func);
2945 calculate_dominance_info (CDI_DOMINATORS);
2946 ipa_initialize_node_params (node);
2947 ipa_analyze_controlled_uses (node);
2948
2949 fbi.node = node;
2950 fbi.info = IPA_NODE_REF (node);
2951 fbi.bb_infos = vNULL;
2952 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
2953 fbi.param_count = ipa_get_param_count (info);
2954 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2955
2956 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2957 {
2958 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2959 bi->cg_edges.safe_push (cs);
2960 }
2961
2962 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2963 {
2964 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2965 bi->cg_edges.safe_push (cs);
2966 }
2967
2968 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2969
2970 ipa_release_body_info (&fbi);
2971 free_dominance_info (CDI_DOMINATORS);
2972 pop_cfun ();
2973 }
2974
2975 /* Update the jump functions associated with call graph edge E when the call
2976 graph edge CS is being inlined, assuming that E->caller is already (possibly
2977 indirectly) inlined into CS->callee and that E has not been inlined. */
2978
2979 static void
2980 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2981 struct cgraph_edge *e)
2982 {
2983 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2984 class ipa_edge_args *args = IPA_EDGE_REF (e);
2985 if (!args)
2986 return;
2987 int count = ipa_get_cs_argument_count (args);
2988 int i;
2989
2990 for (i = 0; i < count; i++)
2991 {
2992 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2993 class ipa_polymorphic_call_context *dst_ctx
2994 = ipa_get_ith_polymorhic_call_context (args, i);
2995
2996 if (dst->agg.items)
2997 {
2998 struct ipa_agg_jf_item *item;
2999 int j;
3000
3001 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3002 {
3003 int dst_fid;
3004 struct ipa_jump_func *src;
3005
3006 if (item->jftype != IPA_JF_PASS_THROUGH
3007 && item->jftype != IPA_JF_LOAD_AGG)
3008 continue;
3009
3010 dst_fid = item->value.pass_through.formal_id;
3011 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3012 {
3013 item->jftype = IPA_JF_UNKNOWN;
3014 continue;
3015 }
3016
3017 item->value.pass_through.formal_id = -1;
3018 src = ipa_get_ith_jump_func (top, dst_fid);
3019 if (src->type == IPA_JF_CONST)
3020 {
3021 if (item->jftype == IPA_JF_PASS_THROUGH
3022 && item->value.pass_through.operation == NOP_EXPR)
3023 {
3024 item->jftype = IPA_JF_CONST;
3025 item->value.constant = src->value.constant.value;
3026 continue;
3027 }
3028 }
3029 else if (src->type == IPA_JF_PASS_THROUGH
3030 && src->value.pass_through.operation == NOP_EXPR)
3031 {
3032 if (item->jftype == IPA_JF_PASS_THROUGH
3033 || !item->value.load_agg.by_ref
3034 || src->value.pass_through.agg_preserved)
3035 item->value.pass_through.formal_id
3036 = src->value.pass_through.formal_id;
3037 }
3038 else if (src->type == IPA_JF_ANCESTOR)
3039 {
3040 if (item->jftype == IPA_JF_PASS_THROUGH)
3041 {
3042 if (!src->value.ancestor.offset)
3043 item->value.pass_through.formal_id
3044 = src->value.ancestor.formal_id;
3045 }
3046 else if (src->value.ancestor.agg_preserved)
3047 {
3048 gcc_checking_assert (item->value.load_agg.by_ref);
3049
3050 item->value.pass_through.formal_id
3051 = src->value.ancestor.formal_id;
3052 item->value.load_agg.offset
3053 += src->value.ancestor.offset;
3054 }
3055 }
3056
3057 if (item->value.pass_through.formal_id < 0)
3058 item->jftype = IPA_JF_UNKNOWN;
3059 }
3060 }
3061
3062 if (!top)
3063 {
3064 ipa_set_jf_unknown (dst);
3065 continue;
3066 }
3067
3068 if (dst->type == IPA_JF_ANCESTOR)
3069 {
3070 struct ipa_jump_func *src;
3071 int dst_fid = dst->value.ancestor.formal_id;
3072 class ipa_polymorphic_call_context *src_ctx
3073 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3074
3075 /* Variable number of arguments can cause havoc if we try to access
3076 one that does not exist in the inlined edge. So make sure we
3077 don't. */
3078 if (dst_fid >= ipa_get_cs_argument_count (top))
3079 {
3080 ipa_set_jf_unknown (dst);
3081 continue;
3082 }
3083
3084 src = ipa_get_ith_jump_func (top, dst_fid);
3085
3086 if (src_ctx && !src_ctx->useless_p ())
3087 {
3088 class ipa_polymorphic_call_context ctx = *src_ctx;
3089
3090 /* TODO: Make type preserved safe WRT contexts. */
3091 if (!ipa_get_jf_ancestor_type_preserved (dst))
3092 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3093 ctx.offset_by (dst->value.ancestor.offset);
3094 if (!ctx.useless_p ())
3095 {
3096 if (!dst_ctx)
3097 {
3098 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3099 count, true);
3100 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3101 }
3102
3103 dst_ctx->combine_with (ctx);
3104 }
3105 }
3106
3107 /* Parameter and argument in ancestor jump function must be pointer
3108 type, which means access to aggregate must be by-reference. */
3109 gcc_assert (!src->agg.items || src->agg.by_ref);
3110
3111 if (src->agg.items && dst->value.ancestor.agg_preserved)
3112 {
3113 struct ipa_agg_jf_item *item;
3114 int j;
3115
3116 /* Currently we do not produce clobber aggregate jump functions,
3117 replace with merging when we do. */
3118 gcc_assert (!dst->agg.items);
3119
3120 dst->agg.items = vec_safe_copy (src->agg.items);
3121 dst->agg.by_ref = src->agg.by_ref;
3122 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3123 item->offset -= dst->value.ancestor.offset;
3124 }
3125
3126 if (src->type == IPA_JF_PASS_THROUGH
3127 && src->value.pass_through.operation == NOP_EXPR)
3128 {
3129 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3130 dst->value.ancestor.agg_preserved &=
3131 src->value.pass_through.agg_preserved;
3132 }
3133 else if (src->type == IPA_JF_ANCESTOR)
3134 {
3135 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3136 dst->value.ancestor.offset += src->value.ancestor.offset;
3137 dst->value.ancestor.agg_preserved &=
3138 src->value.ancestor.agg_preserved;
3139 }
3140 else
3141 ipa_set_jf_unknown (dst);
3142 }
3143 else if (dst->type == IPA_JF_PASS_THROUGH)
3144 {
3145 struct ipa_jump_func *src;
3146 /* We must check range due to calls with variable number of arguments
3147 and we cannot combine jump functions with operations. */
3148 if (dst->value.pass_through.operation == NOP_EXPR
3149 && (top && dst->value.pass_through.formal_id
3150 < ipa_get_cs_argument_count (top)))
3151 {
3152 int dst_fid = dst->value.pass_through.formal_id;
3153 src = ipa_get_ith_jump_func (top, dst_fid);
3154 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3155 class ipa_polymorphic_call_context *src_ctx
3156 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3157
3158 if (src_ctx && !src_ctx->useless_p ())
3159 {
3160 class ipa_polymorphic_call_context ctx = *src_ctx;
3161
3162 /* TODO: Make type preserved safe WRT contexts. */
3163 if (!ipa_get_jf_pass_through_type_preserved (dst))
3164 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3165 if (!ctx.useless_p ())
3166 {
3167 if (!dst_ctx)
3168 {
3169 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3170 count, true);
3171 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3172 }
3173 dst_ctx->combine_with (ctx);
3174 }
3175 }
3176 switch (src->type)
3177 {
3178 case IPA_JF_UNKNOWN:
3179 ipa_set_jf_unknown (dst);
3180 break;
3181 case IPA_JF_CONST:
3182 ipa_set_jf_cst_copy (dst, src);
3183 break;
3184
3185 case IPA_JF_PASS_THROUGH:
3186 {
3187 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3188 enum tree_code operation;
3189 operation = ipa_get_jf_pass_through_operation (src);
3190
3191 if (operation == NOP_EXPR)
3192 {
3193 bool agg_p;
3194 agg_p = dst_agg_p
3195 && ipa_get_jf_pass_through_agg_preserved (src);
3196 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3197 }
3198 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3199 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3200 else
3201 {
3202 tree operand = ipa_get_jf_pass_through_operand (src);
3203 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3204 operation);
3205 }
3206 break;
3207 }
3208 case IPA_JF_ANCESTOR:
3209 {
3210 bool agg_p;
3211 agg_p = dst_agg_p
3212 && ipa_get_jf_ancestor_agg_preserved (src);
3213 ipa_set_ancestor_jf (dst,
3214 ipa_get_jf_ancestor_offset (src),
3215 ipa_get_jf_ancestor_formal_id (src),
3216 agg_p);
3217 break;
3218 }
3219 default:
3220 gcc_unreachable ();
3221 }
3222
3223 if (src->agg.items
3224 && (dst_agg_p || !src->agg.by_ref))
3225 {
3226 /* Currently we do not produce clobber aggregate jump
3227 functions, replace with merging when we do. */
3228 gcc_assert (!dst->agg.items);
3229
3230 dst->agg.by_ref = src->agg.by_ref;
3231 dst->agg.items = vec_safe_copy (src->agg.items);
3232 }
3233 }
3234 else
3235 ipa_set_jf_unknown (dst);
3236 }
3237 }
3238 }
3239
3240 /* If TARGET is an addr_expr of a function declaration, make it the
3241 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3242 Otherwise, return NULL. */
3243
3244 struct cgraph_edge *
3245 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3246 bool speculative)
3247 {
3248 struct cgraph_node *callee;
3249 bool unreachable = false;
3250
3251 if (TREE_CODE (target) == ADDR_EXPR)
3252 target = TREE_OPERAND (target, 0);
3253 if (TREE_CODE (target) != FUNCTION_DECL)
3254 {
3255 target = canonicalize_constructor_val (target, NULL);
3256 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3257 {
3258 /* Member pointer call that goes through a VMT lookup. */
3259 if (ie->indirect_info->member_ptr
3260 /* Or if target is not an invariant expression and we do not
3261 know if it will evaulate to function at runtime.
3262 This can happen when folding through &VAR, where &VAR
3263 is IP invariant, but VAR itself is not.
3264
3265 TODO: Revisit this when GCC 5 is branched. It seems that
3266 member_ptr check is not needed and that we may try to fold
3267 the expression and see if VAR is readonly. */
3268 || !is_gimple_ip_invariant (target))
3269 {
3270 if (dump_enabled_p ())
3271 {
3272 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3273 "discovered direct call non-invariant %s\n",
3274 ie->caller->dump_name ());
3275 }
3276 return NULL;
3277 }
3278
3279
3280 if (dump_enabled_p ())
3281 {
3282 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3283 "discovered direct call to non-function in %s, "
3284 "making it __builtin_unreachable\n",
3285 ie->caller->dump_name ());
3286 }
3287
3288 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3289 callee = cgraph_node::get_create (target);
3290 unreachable = true;
3291 }
3292 else
3293 callee = cgraph_node::get (target);
3294 }
3295 else
3296 callee = cgraph_node::get (target);
3297
3298 /* Because may-edges are not explicitely represented and vtable may be external,
3299 we may create the first reference to the object in the unit. */
3300 if (!callee || callee->inlined_to)
3301 {
3302
3303 /* We are better to ensure we can refer to it.
3304 In the case of static functions we are out of luck, since we already
3305 removed its body. In the case of public functions we may or may
3306 not introduce the reference. */
3307 if (!canonicalize_constructor_val (target, NULL)
3308 || !TREE_PUBLIC (target))
3309 {
3310 if (dump_file)
3311 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3312 "(%s -> %s) but cannot refer to it. Giving up.\n",
3313 ie->caller->dump_name (),
3314 ie->callee->dump_name ());
3315 return NULL;
3316 }
3317 callee = cgraph_node::get_create (target);
3318 }
3319
3320 /* If the edge is already speculated. */
3321 if (speculative && ie->speculative)
3322 {
3323 if (dump_file)
3324 {
3325 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3326 if (!e2)
3327 {
3328 if (dump_file)
3329 fprintf (dump_file, "ipa-prop: Discovered call to a "
3330 "speculative target (%s -> %s) but the call is "
3331 "already speculated to different target. "
3332 "Giving up.\n",
3333 ie->caller->dump_name (), callee->dump_name ());
3334 }
3335 else
3336 {
3337 if (dump_file)
3338 fprintf (dump_file,
3339 "ipa-prop: Discovered call to a speculative target "
3340 "(%s -> %s) this agree with previous speculation.\n",
3341 ie->caller->dump_name (), callee->dump_name ());
3342 }
3343 }
3344 return NULL;
3345 }
3346
3347 if (!dbg_cnt (devirt))
3348 return NULL;
3349
3350 ipa_check_create_node_params ();
3351
3352 /* We cannot make edges to inline clones. It is bug that someone removed
3353 the cgraph node too early. */
3354 gcc_assert (!callee->inlined_to);
3355
3356 if (dump_file && !unreachable)
3357 {
3358 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3359 "(%s -> %s), for stmt ",
3360 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3361 speculative ? "speculative" : "known",
3362 ie->caller->dump_name (),
3363 callee->dump_name ());
3364 if (ie->call_stmt)
3365 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3366 else
3367 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3368 }
3369 if (dump_enabled_p ())
3370 {
3371 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3372 "converting indirect call in %s to direct call to %s\n",
3373 ie->caller->dump_name (), callee->dump_name ());
3374 }
3375 if (!speculative)
3376 {
3377 struct cgraph_edge *orig = ie;
3378 ie = cgraph_edge::make_direct (ie, callee);
3379 /* If we resolved speculative edge the cost is already up to date
3380 for direct call (adjusted by inline_edge_duplication_hook). */
3381 if (ie == orig)
3382 {
3383 ipa_call_summary *es = ipa_call_summaries->get (ie);
3384 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3385 - eni_size_weights.call_cost);
3386 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3387 - eni_time_weights.call_cost);
3388 }
3389 }
3390 else
3391 {
3392 if (!callee->can_be_discarded_p ())
3393 {
3394 cgraph_node *alias;
3395 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3396 if (alias)
3397 callee = alias;
3398 }
3399 /* make_speculative will update ie's cost to direct call cost. */
3400 ie = ie->make_speculative
3401 (callee, ie->count.apply_scale (8, 10));
3402 }
3403
3404 return ie;
3405 }
3406
3407 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3408 CONSTRUCTOR and return it. Return NULL if the search fails for some
3409 reason. */
3410
3411 static tree
3412 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3413 {
3414 tree type = TREE_TYPE (constructor);
3415 if (TREE_CODE (type) != ARRAY_TYPE
3416 && TREE_CODE (type) != RECORD_TYPE)
3417 return NULL;
3418
3419 unsigned ix;
3420 tree index, val;
3421 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3422 {
3423 HOST_WIDE_INT elt_offset;
3424 if (TREE_CODE (type) == ARRAY_TYPE)
3425 {
3426 offset_int off;
3427 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3428 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3429
3430 if (index)
3431 {
3432 if (TREE_CODE (index) == RANGE_EXPR)
3433 off = wi::to_offset (TREE_OPERAND (index, 0));
3434 else
3435 off = wi::to_offset (index);
3436 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3437 {
3438 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3439 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3440 off = wi::sext (off - wi::to_offset (low_bound),
3441 TYPE_PRECISION (TREE_TYPE (index)));
3442 }
3443 off *= wi::to_offset (unit_size);
3444 /* ??? Handle more than just the first index of a
3445 RANGE_EXPR. */
3446 }
3447 else
3448 off = wi::to_offset (unit_size) * ix;
3449
3450 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3451 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3452 continue;
3453 elt_offset = off.to_shwi ();
3454 }
3455 else if (TREE_CODE (type) == RECORD_TYPE)
3456 {
3457 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3458 if (DECL_BIT_FIELD (index))
3459 continue;
3460 elt_offset = int_bit_position (index);
3461 }
3462 else
3463 gcc_unreachable ();
3464
3465 if (elt_offset > req_offset)
3466 return NULL;
3467
3468 if (TREE_CODE (val) == CONSTRUCTOR)
3469 return find_constructor_constant_at_offset (val,
3470 req_offset - elt_offset);
3471
3472 if (elt_offset == req_offset
3473 && is_gimple_reg_type (TREE_TYPE (val))
3474 && is_gimple_ip_invariant (val))
3475 return val;
3476 }
3477 return NULL;
3478 }
3479
3480 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3481 invariant from a static constructor and if so, return it. Otherwise return
3482 NULL. */
3483
3484 static tree
3485 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3486 {
3487 if (by_ref)
3488 {
3489 if (TREE_CODE (scalar) != ADDR_EXPR)
3490 return NULL;
3491 scalar = TREE_OPERAND (scalar, 0);
3492 }
3493
3494 if (!VAR_P (scalar)
3495 || !is_global_var (scalar)
3496 || !TREE_READONLY (scalar)
3497 || !DECL_INITIAL (scalar)
3498 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3499 return NULL;
3500
3501 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3502 }
3503
3504 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3505 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3506 return NULL if there is none. BY_REF specifies whether the value has to be
3507 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3508 the boolean it points to is set to true if the value comes from an
3509 initializer of a constant. */
3510
3511 tree
3512 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3513 HOST_WIDE_INT offset, bool by_ref,
3514 bool *from_global_constant)
3515 {
3516 struct ipa_agg_value *item;
3517 int i;
3518
3519 if (scalar)
3520 {
3521 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3522 if (res)
3523 {
3524 if (from_global_constant)
3525 *from_global_constant = true;
3526 return res;
3527 }
3528 }
3529
3530 if (!agg
3531 || by_ref != agg->by_ref)
3532 return NULL;
3533
3534 FOR_EACH_VEC_ELT (agg->items, i, item)
3535 if (item->offset == offset)
3536 {
3537 /* Currently we do not have clobber values, return NULL for them once
3538 we do. */
3539 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3540 if (from_global_constant)
3541 *from_global_constant = false;
3542 return item->value;
3543 }
3544 return NULL;
3545 }
3546
3547 /* Remove a reference to SYMBOL from the list of references of a node given by
3548 reference description RDESC. Return true if the reference has been
3549 successfully found and removed. */
3550
3551 static bool
3552 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3553 {
3554 struct ipa_ref *to_del;
3555 struct cgraph_edge *origin;
3556
3557 origin = rdesc->cs;
3558 if (!origin)
3559 return false;
3560 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3561 origin->lto_stmt_uid);
3562 if (!to_del)
3563 return false;
3564
3565 to_del->remove_reference ();
3566 if (dump_file)
3567 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3568 origin->caller->dump_name (), symbol->dump_name ());
3569 return true;
3570 }
3571
3572 /* If JFUNC has a reference description with refcount different from
3573 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3574 NULL. JFUNC must be a constant jump function. */
3575
3576 static struct ipa_cst_ref_desc *
3577 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3578 {
3579 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3580 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3581 return rdesc;
3582 else
3583 return NULL;
3584 }
3585
3586 /* If the value of constant jump function JFUNC is an address of a function
3587 declaration, return the associated call graph node. Otherwise return
3588 NULL. */
3589
3590 static cgraph_node *
3591 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3592 {
3593 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3594 tree cst = ipa_get_jf_constant (jfunc);
3595 if (TREE_CODE (cst) != ADDR_EXPR
3596 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3597 return NULL;
3598
3599 return cgraph_node::get (TREE_OPERAND (cst, 0));
3600 }
3601
3602
3603 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3604 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3605 the edge specified in the rdesc. Return false if either the symbol or the
3606 reference could not be found, otherwise return true. */
3607
3608 static bool
3609 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3610 {
3611 struct ipa_cst_ref_desc *rdesc;
3612 if (jfunc->type == IPA_JF_CONST
3613 && (rdesc = jfunc_rdesc_usable (jfunc))
3614 && --rdesc->refcount == 0)
3615 {
3616 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3617 if (!symbol)
3618 return false;
3619
3620 return remove_described_reference (symbol, rdesc);
3621 }
3622 return true;
3623 }
3624
3625 /* Try to find a destination for indirect edge IE that corresponds to a simple
3626 call or a call of a member function pointer and where the destination is a
3627 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3628 the type of the parameter to which the result of JFUNC is passed. If it can
3629 be determined, return the newly direct edge, otherwise return NULL.
3630 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3631 relative to. */
3632
3633 static struct cgraph_edge *
3634 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3635 struct ipa_jump_func *jfunc, tree target_type,
3636 struct cgraph_node *new_root,
3637 class ipa_node_params *new_root_info)
3638 {
3639 struct cgraph_edge *cs;
3640 tree target;
3641 bool agg_contents = ie->indirect_info->agg_contents;
3642 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3643 if (agg_contents)
3644 {
3645 bool from_global_constant;
3646 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3647 new_root,
3648 &jfunc->agg);
3649 target = ipa_find_agg_cst_for_param (&agg, scalar,
3650 ie->indirect_info->offset,
3651 ie->indirect_info->by_ref,
3652 &from_global_constant);
3653 agg.release ();
3654 if (target
3655 && !from_global_constant
3656 && !ie->indirect_info->guaranteed_unmodified)
3657 return NULL;
3658 }
3659 else
3660 target = scalar;
3661 if (!target)
3662 return NULL;
3663 cs = ipa_make_edge_direct_to_target (ie, target);
3664
3665 if (cs && !agg_contents)
3666 {
3667 bool ok;
3668 gcc_checking_assert (cs->callee
3669 && (cs != ie
3670 || jfunc->type != IPA_JF_CONST
3671 || !cgraph_node_for_jfunc (jfunc)
3672 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3673 ok = try_decrement_rdesc_refcount (jfunc);
3674 gcc_checking_assert (ok);
3675 }
3676
3677 return cs;
3678 }
3679
3680 /* Return the target to be used in cases of impossible devirtualization. IE
3681 and target (the latter can be NULL) are dumped when dumping is enabled. */
3682
3683 tree
3684 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3685 {
3686 if (dump_file)
3687 {
3688 if (target)
3689 fprintf (dump_file,
3690 "Type inconsistent devirtualization: %s->%s\n",
3691 ie->caller->dump_name (),
3692 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3693 else
3694 fprintf (dump_file,
3695 "No devirtualization target in %s\n",
3696 ie->caller->dump_name ());
3697 }
3698 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3699 cgraph_node::get_create (new_target);
3700 return new_target;
3701 }
3702
3703 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3704 call based on a formal parameter which is described by jump function JFUNC
3705 and if it can be determined, make it direct and return the direct edge.
3706 Otherwise, return NULL. CTX describes the polymorphic context that the
3707 parameter the call is based on brings along with it. NEW_ROOT and
3708 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3709 to. */
3710
3711 static struct cgraph_edge *
3712 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3713 struct ipa_jump_func *jfunc,
3714 class ipa_polymorphic_call_context ctx,
3715 struct cgraph_node *new_root,
3716 class ipa_node_params *new_root_info)
3717 {
3718 tree target = NULL;
3719 bool speculative = false;
3720
3721 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3722 return NULL;
3723
3724 gcc_assert (!ie->indirect_info->by_ref);
3725
3726 /* Try to do lookup via known virtual table pointer value. */
3727 if (!ie->indirect_info->vptr_changed
3728 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3729 {
3730 tree vtable;
3731 unsigned HOST_WIDE_INT offset;
3732 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3733 : NULL;
3734 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3735 new_root,
3736 &jfunc->agg);
3737 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3738 ie->indirect_info->offset,
3739 true);
3740 agg.release ();
3741 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3742 {
3743 bool can_refer;
3744 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3745 vtable, offset, &can_refer);
3746 if (can_refer)
3747 {
3748 if (!t
3749 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3750 || !possible_polymorphic_call_target_p
3751 (ie, cgraph_node::get (t)))
3752 {
3753 /* Do not speculate builtin_unreachable, it is stupid! */
3754 if (!ie->indirect_info->vptr_changed)
3755 target = ipa_impossible_devirt_target (ie, target);
3756 else
3757 target = NULL;
3758 }
3759 else
3760 {
3761 target = t;
3762 speculative = ie->indirect_info->vptr_changed;
3763 }
3764 }
3765 }
3766 }
3767
3768 ipa_polymorphic_call_context ie_context (ie);
3769 vec <cgraph_node *>targets;
3770 bool final;
3771
3772 ctx.offset_by (ie->indirect_info->offset);
3773 if (ie->indirect_info->vptr_changed)
3774 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3775 ie->indirect_info->otr_type);
3776 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3777 targets = possible_polymorphic_call_targets
3778 (ie->indirect_info->otr_type,
3779 ie->indirect_info->otr_token,
3780 ctx, &final);
3781 if (final && targets.length () <= 1)
3782 {
3783 speculative = false;
3784 if (targets.length () == 1)
3785 target = targets[0]->decl;
3786 else
3787 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3788 }
3789 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3790 && !ie->speculative && ie->maybe_hot_p ())
3791 {
3792 cgraph_node *n;
3793 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3794 ie->indirect_info->otr_token,
3795 ie->indirect_info->context);
3796 if (n)
3797 {
3798 target = n->decl;
3799 speculative = true;
3800 }
3801 }
3802
3803 if (target)
3804 {
3805 if (!possible_polymorphic_call_target_p
3806 (ie, cgraph_node::get_create (target)))
3807 {
3808 if (speculative)
3809 return NULL;
3810 target = ipa_impossible_devirt_target (ie, target);
3811 }
3812 return ipa_make_edge_direct_to_target (ie, target, speculative);
3813 }
3814 else
3815 return NULL;
3816 }
3817
3818 /* Update the param called notes associated with NODE when CS is being inlined,
3819 assuming NODE is (potentially indirectly) inlined into CS->callee.
3820 Moreover, if the callee is discovered to be constant, create a new cgraph
3821 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3822 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3823
3824 static bool
3825 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3826 struct cgraph_node *node,
3827 vec<cgraph_edge *> *new_edges)
3828 {
3829 class ipa_edge_args *top;
3830 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3831 struct cgraph_node *new_root;
3832 class ipa_node_params *new_root_info, *inlined_node_info;
3833 bool res = false;
3834
3835 ipa_check_create_edge_args ();
3836 top = IPA_EDGE_REF (cs);
3837 new_root = cs->caller->inlined_to
3838 ? cs->caller->inlined_to : cs->caller;
3839 new_root_info = IPA_NODE_REF (new_root);
3840 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3841
3842 for (ie = node->indirect_calls; ie; ie = next_ie)
3843 {
3844 class cgraph_indirect_call_info *ici = ie->indirect_info;
3845 struct ipa_jump_func *jfunc;
3846 int param_index;
3847
3848 next_ie = ie->next_callee;
3849
3850 if (ici->param_index == -1)
3851 continue;
3852
3853 /* We must check range due to calls with variable number of arguments: */
3854 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3855 {
3856 ici->param_index = -1;
3857 continue;
3858 }
3859
3860 param_index = ici->param_index;
3861 jfunc = ipa_get_ith_jump_func (top, param_index);
3862
3863 auto_vec<cgraph_node *, 4> spec_targets;
3864 if (ie->speculative)
3865 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3866 direct;
3867 direct = direct->next_speculative_call_target ())
3868 spec_targets.safe_push (direct->callee);
3869
3870 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3871 new_direct_edge = NULL;
3872 else if (ici->polymorphic)
3873 {
3874 ipa_polymorphic_call_context ctx;
3875 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3876 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3877 new_root,
3878 new_root_info);
3879 }
3880 else
3881 {
3882 tree target_type = ipa_get_type (inlined_node_info, param_index);
3883 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3884 target_type,
3885 new_root,
3886 new_root_info);
3887 }
3888
3889 /* If speculation was removed, then we need to do nothing. */
3890 if (new_direct_edge && new_direct_edge != ie
3891 && spec_targets.contains (new_direct_edge->callee))
3892 {
3893 new_direct_edge->indirect_inlining_edge = 1;
3894 top = IPA_EDGE_REF (cs);
3895 res = true;
3896 if (!new_direct_edge->speculative)
3897 continue;
3898 }
3899 else if (new_direct_edge)
3900 {
3901 new_direct_edge->indirect_inlining_edge = 1;
3902 if (new_edges)
3903 {
3904 new_edges->safe_push (new_direct_edge);
3905 res = true;
3906 }
3907 top = IPA_EDGE_REF (cs);
3908 /* If speculative edge was introduced we still need to update
3909 call info of the indirect edge. */
3910 if (!new_direct_edge->speculative)
3911 continue;
3912 }
3913 if (jfunc->type == IPA_JF_PASS_THROUGH
3914 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3915 {
3916 if (ici->agg_contents
3917 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3918 && !ici->polymorphic)
3919 ici->param_index = -1;
3920 else
3921 {
3922 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3923 if (ici->polymorphic
3924 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3925 ici->vptr_changed = true;
3926 ipa_set_param_used_by_indirect_call (new_root_info,
3927 ici->param_index, true);
3928 if (ici->polymorphic)
3929 ipa_set_param_used_by_polymorphic_call (new_root_info,
3930 ici->param_index, true);
3931 }
3932 }
3933 else if (jfunc->type == IPA_JF_ANCESTOR)
3934 {
3935 if (ici->agg_contents
3936 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3937 && !ici->polymorphic)
3938 ici->param_index = -1;
3939 else
3940 {
3941 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3942 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3943 if (ici->polymorphic
3944 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3945 ici->vptr_changed = true;
3946 ipa_set_param_used_by_indirect_call (new_root_info,
3947 ici->param_index, true);
3948 if (ici->polymorphic)
3949 ipa_set_param_used_by_polymorphic_call (new_root_info,
3950 ici->param_index, true);
3951 }
3952 }
3953 else
3954 /* Either we can find a destination for this edge now or never. */
3955 ici->param_index = -1;
3956 }
3957
3958 return res;
3959 }
3960
3961 /* Recursively traverse subtree of NODE (including node) made of inlined
3962 cgraph_edges when CS has been inlined and invoke
3963 update_indirect_edges_after_inlining on all nodes and
3964 update_jump_functions_after_inlining on all non-inlined edges that lead out
3965 of this subtree. Newly discovered indirect edges will be added to
3966 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3967 created. */
3968
3969 static bool
3970 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3971 struct cgraph_node *node,
3972 vec<cgraph_edge *> *new_edges)
3973 {
3974 struct cgraph_edge *e;
3975 bool res;
3976
3977 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3978
3979 for (e = node->callees; e; e = e->next_callee)
3980 if (!e->inline_failed)
3981 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3982 else
3983 update_jump_functions_after_inlining (cs, e);
3984 for (e = node->indirect_calls; e; e = e->next_callee)
3985 update_jump_functions_after_inlining (cs, e);
3986
3987 return res;
3988 }
3989
3990 /* Combine two controlled uses counts as done during inlining. */
3991
3992 static int
3993 combine_controlled_uses_counters (int c, int d)
3994 {
3995 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3996 return IPA_UNDESCRIBED_USE;
3997 else
3998 return c + d - 1;
3999 }
4000
4001 /* Propagate number of controlled users from CS->caleee to the new root of the
4002 tree of inlined nodes. */
4003
4004 static void
4005 propagate_controlled_uses (struct cgraph_edge *cs)
4006 {
4007 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4008 if (!args)
4009 return;
4010 struct cgraph_node *new_root = cs->caller->inlined_to
4011 ? cs->caller->inlined_to : cs->caller;
4012 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4013 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4014 int count, i;
4015
4016 if (!old_root_info)
4017 return;
4018
4019 count = MIN (ipa_get_cs_argument_count (args),
4020 ipa_get_param_count (old_root_info));
4021 for (i = 0; i < count; i++)
4022 {
4023 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4024 struct ipa_cst_ref_desc *rdesc;
4025
4026 if (jf->type == IPA_JF_PASS_THROUGH)
4027 {
4028 int src_idx, c, d;
4029 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4030 c = ipa_get_controlled_uses (new_root_info, src_idx);
4031 d = ipa_get_controlled_uses (old_root_info, i);
4032
4033 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4034 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4035 c = combine_controlled_uses_counters (c, d);
4036 ipa_set_controlled_uses (new_root_info, src_idx, c);
4037 if (c == 0 && new_root_info->ipcp_orig_node)
4038 {
4039 struct cgraph_node *n;
4040 struct ipa_ref *ref;
4041 tree t = new_root_info->known_csts[src_idx];
4042
4043 if (t && TREE_CODE (t) == ADDR_EXPR
4044 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4045 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4046 && (ref = new_root->find_reference (n, NULL, 0)))
4047 {
4048 if (dump_file)
4049 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4050 "reference from %s to %s.\n",
4051 new_root->dump_name (),
4052 n->dump_name ());
4053 ref->remove_reference ();
4054 }
4055 }
4056 }
4057 else if (jf->type == IPA_JF_CONST
4058 && (rdesc = jfunc_rdesc_usable (jf)))
4059 {
4060 int d = ipa_get_controlled_uses (old_root_info, i);
4061 int c = rdesc->refcount;
4062 rdesc->refcount = combine_controlled_uses_counters (c, d);
4063 if (rdesc->refcount == 0)
4064 {
4065 tree cst = ipa_get_jf_constant (jf);
4066 struct cgraph_node *n;
4067 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4068 && TREE_CODE (TREE_OPERAND (cst, 0))
4069 == FUNCTION_DECL);
4070 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4071 if (n)
4072 {
4073 struct cgraph_node *clone;
4074 bool ok;
4075 ok = remove_described_reference (n, rdesc);
4076 gcc_checking_assert (ok);
4077
4078 clone = cs->caller;
4079 while (clone->inlined_to
4080 && clone->ipcp_clone
4081 && clone != rdesc->cs->caller)
4082 {
4083 struct ipa_ref *ref;
4084 ref = clone->find_reference (n, NULL, 0);
4085 if (ref)
4086 {
4087 if (dump_file)
4088 fprintf (dump_file, "ipa-prop: Removing "
4089 "cloning-created reference "
4090 "from %s to %s.\n",
4091 clone->dump_name (),
4092 n->dump_name ());
4093 ref->remove_reference ();
4094 }
4095 clone = clone->callers->caller;
4096 }
4097 }
4098 }
4099 }
4100 }
4101
4102 for (i = ipa_get_param_count (old_root_info);
4103 i < ipa_get_cs_argument_count (args);
4104 i++)
4105 {
4106 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4107
4108 if (jf->type == IPA_JF_CONST)
4109 {
4110 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4111 if (rdesc)
4112 rdesc->refcount = IPA_UNDESCRIBED_USE;
4113 }
4114 else if (jf->type == IPA_JF_PASS_THROUGH)
4115 ipa_set_controlled_uses (new_root_info,
4116 jf->value.pass_through.formal_id,
4117 IPA_UNDESCRIBED_USE);
4118 }
4119 }
4120
4121 /* Update jump functions and call note functions on inlining the call site CS.
4122 CS is expected to lead to a node already cloned by
4123 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4124 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4125 created. */
4126
4127 bool
4128 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4129 vec<cgraph_edge *> *new_edges)
4130 {
4131 bool changed;
4132 /* Do nothing if the preparation phase has not been carried out yet
4133 (i.e. during early inlining). */
4134 if (!ipa_node_params_sum)
4135 return false;
4136 gcc_assert (ipa_edge_args_sum);
4137
4138 propagate_controlled_uses (cs);
4139 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4140 ipa_node_params_sum->remove (cs->callee);
4141
4142 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4143 if (args)
4144 {
4145 bool ok = true;
4146 if (args->jump_functions)
4147 {
4148 struct ipa_jump_func *jf;
4149 int i;
4150 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4151 if (jf->type == IPA_JF_CONST
4152 && ipa_get_jf_constant_rdesc (jf))
4153 {
4154 ok = false;
4155 break;
4156 }
4157 }
4158 if (ok)
4159 ipa_edge_args_sum->remove (cs);
4160 }
4161 if (ipcp_transformation_sum)
4162 ipcp_transformation_sum->remove (cs->callee);
4163
4164 return changed;
4165 }
4166
4167 /* Ensure that array of edge arguments infos is big enough to accommodate a
4168 structure for all edges and reallocates it if not. Also, allocate
4169 associated hash tables is they do not already exist. */
4170
4171 void
4172 ipa_check_create_edge_args (void)
4173 {
4174 if (!ipa_edge_args_sum)
4175 ipa_edge_args_sum
4176 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4177 ipa_edge_args_sum_t (symtab, true));
4178 if (!ipa_bits_hash_table)
4179 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4180 if (!ipa_vr_hash_table)
4181 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4182 }
4183
4184 /* Free all ipa_edge structures. */
4185
4186 void
4187 ipa_free_all_edge_args (void)
4188 {
4189 if (!ipa_edge_args_sum)
4190 return;
4191
4192 ggc_delete (ipa_edge_args_sum);
4193 ipa_edge_args_sum = NULL;
4194 }
4195
4196 /* Free all ipa_node_params structures. */
4197
4198 void
4199 ipa_free_all_node_params (void)
4200 {
4201 if (ipa_node_params_sum)
4202 ggc_delete (ipa_node_params_sum);
4203 ipa_node_params_sum = NULL;
4204 }
4205
4206 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4207 tables if they do not already exist. */
4208
4209 void
4210 ipcp_transformation_initialize (void)
4211 {
4212 if (!ipa_bits_hash_table)
4213 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4214 if (!ipa_vr_hash_table)
4215 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4216 if (ipcp_transformation_sum == NULL)
4217 {
4218 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4219 ipcp_transformation_sum->disable_insertion_hook ();
4220 }
4221 }
4222
4223 /* Release the IPA CP transformation summary. */
4224
4225 void
4226 ipcp_free_transformation_sum (void)
4227 {
4228 if (!ipcp_transformation_sum)
4229 return;
4230
4231 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4232 ggc_free (ipcp_transformation_sum);
4233 ipcp_transformation_sum = NULL;
4234 }
4235
4236 /* Set the aggregate replacements of NODE to be AGGVALS. */
4237
4238 void
4239 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4240 struct ipa_agg_replacement_value *aggvals)
4241 {
4242 ipcp_transformation_initialize ();
4243 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4244 s->agg_values = aggvals;
4245 }
4246
4247 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4248 count data structures accordingly. */
4249
4250 void
4251 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4252 {
4253 if (args->jump_functions)
4254 {
4255 struct ipa_jump_func *jf;
4256 int i;
4257 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4258 {
4259 struct ipa_cst_ref_desc *rdesc;
4260 try_decrement_rdesc_refcount (jf);
4261 if (jf->type == IPA_JF_CONST
4262 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4263 && rdesc->cs == cs)
4264 rdesc->cs = NULL;
4265 }
4266 }
4267 }
4268
4269 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4270 reference count data strucutres accordingly. */
4271
4272 void
4273 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4274 ipa_edge_args *old_args, ipa_edge_args *new_args)
4275 {
4276 unsigned int i;
4277
4278 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4279 if (old_args->polymorphic_call_contexts)
4280 new_args->polymorphic_call_contexts
4281 = vec_safe_copy (old_args->polymorphic_call_contexts);
4282
4283 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4284 {
4285 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4286 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4287
4288 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4289
4290 if (src_jf->type == IPA_JF_CONST)
4291 {
4292 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4293
4294 if (!src_rdesc)
4295 dst_jf->value.constant.rdesc = NULL;
4296 else if (src->caller == dst->caller)
4297 {
4298 struct ipa_ref *ref;
4299 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4300 gcc_checking_assert (n);
4301 ref = src->caller->find_reference (n, src->call_stmt,
4302 src->lto_stmt_uid);
4303 gcc_checking_assert (ref);
4304 dst->caller->clone_reference (ref, ref->stmt);
4305
4306 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4307 dst_rdesc->cs = dst;
4308 dst_rdesc->refcount = src_rdesc->refcount;
4309 dst_rdesc->next_duplicate = NULL;
4310 dst_jf->value.constant.rdesc = dst_rdesc;
4311 }
4312 else if (src_rdesc->cs == src)
4313 {
4314 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4315 dst_rdesc->cs = dst;
4316 dst_rdesc->refcount = src_rdesc->refcount;
4317 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4318 src_rdesc->next_duplicate = dst_rdesc;
4319 dst_jf->value.constant.rdesc = dst_rdesc;
4320 }
4321 else
4322 {
4323 struct ipa_cst_ref_desc *dst_rdesc;
4324 /* This can happen during inlining, when a JFUNC can refer to a
4325 reference taken in a function up in the tree of inline clones.
4326 We need to find the duplicate that refers to our tree of
4327 inline clones. */
4328
4329 gcc_assert (dst->caller->inlined_to);
4330 for (dst_rdesc = src_rdesc->next_duplicate;
4331 dst_rdesc;
4332 dst_rdesc = dst_rdesc->next_duplicate)
4333 {
4334 struct cgraph_node *top;
4335 top = dst_rdesc->cs->caller->inlined_to
4336 ? dst_rdesc->cs->caller->inlined_to
4337 : dst_rdesc->cs->caller;
4338 if (dst->caller->inlined_to == top)
4339 break;
4340 }
4341 gcc_assert (dst_rdesc);
4342 dst_jf->value.constant.rdesc = dst_rdesc;
4343 }
4344 }
4345 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4346 && src->caller == dst->caller)
4347 {
4348 struct cgraph_node *inline_root = dst->caller->inlined_to
4349 ? dst->caller->inlined_to : dst->caller;
4350 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4351 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4352
4353 int c = ipa_get_controlled_uses (root_info, idx);
4354 if (c != IPA_UNDESCRIBED_USE)
4355 {
4356 c++;
4357 ipa_set_controlled_uses (root_info, idx, c);
4358 }
4359 }
4360 }
4361 }
4362
4363 /* Analyze newly added function into callgraph. */
4364
4365 static void
4366 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4367 {
4368 if (node->has_gimple_body_p ())
4369 ipa_analyze_node (node);
4370 }
4371
4372 /* Hook that is called by summary when a node is duplicated. */
4373
4374 void
4375 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4376 ipa_node_params *old_info,
4377 ipa_node_params *new_info)
4378 {
4379 ipa_agg_replacement_value *old_av, *new_av;
4380
4381 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4382 new_info->lattices = NULL;
4383 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4384 new_info->known_csts = old_info->known_csts.copy ();
4385 new_info->known_contexts = old_info->known_contexts.copy ();
4386
4387 new_info->analysis_done = old_info->analysis_done;
4388 new_info->node_enqueued = old_info->node_enqueued;
4389 new_info->versionable = old_info->versionable;
4390
4391 old_av = ipa_get_agg_replacements_for_node (src);
4392 if (old_av)
4393 {
4394 new_av = NULL;
4395 while (old_av)
4396 {
4397 struct ipa_agg_replacement_value *v;
4398
4399 v = ggc_alloc<ipa_agg_replacement_value> ();
4400 memcpy (v, old_av, sizeof (*v));
4401 v->next = new_av;
4402 new_av = v;
4403 old_av = old_av->next;
4404 }
4405 ipa_set_node_agg_value_chain (dst, new_av);
4406 }
4407 }
4408
4409 /* Duplication of ipcp transformation summaries. */
4410
4411 void
4412 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4413 ipcp_transformation *src_trans,
4414 ipcp_transformation *dst_trans)
4415 {
4416 /* Avoid redundant work of duplicating vectors we will never use. */
4417 if (dst->inlined_to)
4418 return;
4419 dst_trans->bits = vec_safe_copy (src_trans->bits);
4420 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4421 ipa_agg_replacement_value *agg = src_trans->agg_values,
4422 **aggptr = &dst_trans->agg_values;
4423 while (agg)
4424 {
4425 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4426 **aggptr = *agg;
4427 agg = agg->next;
4428 aggptr = &(*aggptr)->next;
4429 }
4430 }
4431
4432 /* Register our cgraph hooks if they are not already there. */
4433
4434 void
4435 ipa_register_cgraph_hooks (void)
4436 {
4437 ipa_check_create_node_params ();
4438 ipa_check_create_edge_args ();
4439
4440 function_insertion_hook_holder =
4441 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4442 }
4443
4444 /* Unregister our cgraph hooks if they are not already there. */
4445
4446 static void
4447 ipa_unregister_cgraph_hooks (void)
4448 {
4449 if (function_insertion_hook_holder)
4450 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4451 function_insertion_hook_holder = NULL;
4452 }
4453
4454 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4455 longer needed after ipa-cp. */
4456
4457 void
4458 ipa_free_all_structures_after_ipa_cp (void)
4459 {
4460 if (!optimize && !in_lto_p)
4461 {
4462 ipa_free_all_edge_args ();
4463 ipa_free_all_node_params ();
4464 ipcp_sources_pool.release ();
4465 ipcp_cst_values_pool.release ();
4466 ipcp_poly_ctx_values_pool.release ();
4467 ipcp_agg_lattice_pool.release ();
4468 ipa_unregister_cgraph_hooks ();
4469 ipa_refdesc_pool.release ();
4470 }
4471 }
4472
4473 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4474 longer needed after indirect inlining. */
4475
4476 void
4477 ipa_free_all_structures_after_iinln (void)
4478 {
4479 ipa_free_all_edge_args ();
4480 ipa_free_all_node_params ();
4481 ipa_unregister_cgraph_hooks ();
4482 ipcp_sources_pool.release ();
4483 ipcp_cst_values_pool.release ();
4484 ipcp_poly_ctx_values_pool.release ();
4485 ipcp_agg_lattice_pool.release ();
4486 ipa_refdesc_pool.release ();
4487 }
4488
4489 /* Print ipa_tree_map data structures of all functions in the
4490 callgraph to F. */
4491
4492 void
4493 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4494 {
4495 int i, count;
4496 class ipa_node_params *info;
4497
4498 if (!node->definition)
4499 return;
4500 info = IPA_NODE_REF (node);
4501 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4502 if (!info)
4503 {
4504 fprintf (f, " no params return\n");
4505 return;
4506 }
4507 count = ipa_get_param_count (info);
4508 for (i = 0; i < count; i++)
4509 {
4510 int c;
4511
4512 fprintf (f, " ");
4513 ipa_dump_param (f, info, i);
4514 if (ipa_is_param_used (info, i))
4515 fprintf (f, " used");
4516 if (ipa_is_param_used_by_ipa_predicates (info, i))
4517 fprintf (f, " used_by_ipa_predicates");
4518 if (ipa_is_param_used_by_indirect_call (info, i))
4519 fprintf (f, " used_by_indirect_call");
4520 if (ipa_is_param_used_by_polymorphic_call (info, i))
4521 fprintf (f, " used_by_polymorphic_call");
4522 c = ipa_get_controlled_uses (info, i);
4523 if (c == IPA_UNDESCRIBED_USE)
4524 fprintf (f, " undescribed_use");
4525 else
4526 fprintf (f, " controlled_uses=%i", c);
4527 fprintf (f, "\n");
4528 }
4529 }
4530
4531 /* Print ipa_tree_map data structures of all functions in the
4532 callgraph to F. */
4533
4534 void
4535 ipa_print_all_params (FILE * f)
4536 {
4537 struct cgraph_node *node;
4538
4539 fprintf (f, "\nFunction parameters:\n");
4540 FOR_EACH_FUNCTION (node)
4541 ipa_print_node_params (f, node);
4542 }
4543
4544 /* Dump the AV linked list. */
4545
4546 void
4547 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4548 {
4549 bool comma = false;
4550 fprintf (f, " Aggregate replacements:");
4551 for (; av; av = av->next)
4552 {
4553 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4554 av->index, av->offset);
4555 print_generic_expr (f, av->value);
4556 comma = true;
4557 }
4558 fprintf (f, "\n");
4559 }
4560
4561 /* Stream out jump function JUMP_FUNC to OB. */
4562
4563 static void
4564 ipa_write_jump_function (struct output_block *ob,
4565 struct ipa_jump_func *jump_func)
4566 {
4567 struct ipa_agg_jf_item *item;
4568 struct bitpack_d bp;
4569 int i, count;
4570 int flag = 0;
4571
4572 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4573 as well as WPA memory by handling them specially. */
4574 if (jump_func->type == IPA_JF_CONST
4575 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4576 flag = 1;
4577
4578 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4579 switch (jump_func->type)
4580 {
4581 case IPA_JF_UNKNOWN:
4582 break;
4583 case IPA_JF_CONST:
4584 gcc_assert (
4585 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4586 stream_write_tree (ob,
4587 flag
4588 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4589 : jump_func->value.constant.value, true);
4590 break;
4591 case IPA_JF_PASS_THROUGH:
4592 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4593 if (jump_func->value.pass_through.operation == NOP_EXPR)
4594 {
4595 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4596 bp = bitpack_create (ob->main_stream);
4597 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4598 streamer_write_bitpack (&bp);
4599 }
4600 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4601 == tcc_unary)
4602 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4603 else
4604 {
4605 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4606 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4607 }
4608 break;
4609 case IPA_JF_ANCESTOR:
4610 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4611 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4612 bp = bitpack_create (ob->main_stream);
4613 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4614 streamer_write_bitpack (&bp);
4615 break;
4616 default:
4617 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4618 }
4619
4620 count = vec_safe_length (jump_func->agg.items);
4621 streamer_write_uhwi (ob, count);
4622 if (count)
4623 {
4624 bp = bitpack_create (ob->main_stream);
4625 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4626 streamer_write_bitpack (&bp);
4627 }
4628
4629 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4630 {
4631 stream_write_tree (ob, item->type, true);
4632 streamer_write_uhwi (ob, item->offset);
4633 streamer_write_uhwi (ob, item->jftype);
4634 switch (item->jftype)
4635 {
4636 case IPA_JF_UNKNOWN:
4637 break;
4638 case IPA_JF_CONST:
4639 stream_write_tree (ob, item->value.constant, true);
4640 break;
4641 case IPA_JF_PASS_THROUGH:
4642 case IPA_JF_LOAD_AGG:
4643 streamer_write_uhwi (ob, item->value.pass_through.operation);
4644 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4645 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4646 != tcc_unary)
4647 stream_write_tree (ob, item->value.pass_through.operand, true);
4648 if (item->jftype == IPA_JF_LOAD_AGG)
4649 {
4650 stream_write_tree (ob, item->value.load_agg.type, true);
4651 streamer_write_uhwi (ob, item->value.load_agg.offset);
4652 bp = bitpack_create (ob->main_stream);
4653 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4654 streamer_write_bitpack (&bp);
4655 }
4656 break;
4657 default:
4658 fatal_error (UNKNOWN_LOCATION,
4659 "invalid jump function in LTO stream");
4660 }
4661 }
4662
4663 bp = bitpack_create (ob->main_stream);
4664 bp_pack_value (&bp, !!jump_func->bits, 1);
4665 streamer_write_bitpack (&bp);
4666 if (jump_func->bits)
4667 {
4668 streamer_write_widest_int (ob, jump_func->bits->value);
4669 streamer_write_widest_int (ob, jump_func->bits->mask);
4670 }
4671 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4672 streamer_write_bitpack (&bp);
4673 if (jump_func->m_vr)
4674 {
4675 streamer_write_enum (ob->main_stream, value_rang_type,
4676 VR_LAST, jump_func->m_vr->kind ());
4677 stream_write_tree (ob, jump_func->m_vr->min (), true);
4678 stream_write_tree (ob, jump_func->m_vr->max (), true);
4679 }
4680 }
4681
4682 /* Read in jump function JUMP_FUNC from IB. */
4683
4684 static void
4685 ipa_read_jump_function (class lto_input_block *ib,
4686 struct ipa_jump_func *jump_func,
4687 struct cgraph_edge *cs,
4688 class data_in *data_in,
4689 bool prevails)
4690 {
4691 enum jump_func_type jftype;
4692 enum tree_code operation;
4693 int i, count;
4694 int val = streamer_read_uhwi (ib);
4695 bool flag = val & 1;
4696
4697 jftype = (enum jump_func_type) (val / 2);
4698 switch (jftype)
4699 {
4700 case IPA_JF_UNKNOWN:
4701 ipa_set_jf_unknown (jump_func);
4702 break;
4703 case IPA_JF_CONST:
4704 {
4705 tree t = stream_read_tree (ib, data_in);
4706 if (flag && prevails)
4707 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4708 ipa_set_jf_constant (jump_func, t, cs);
4709 }
4710 break;
4711 case IPA_JF_PASS_THROUGH:
4712 operation = (enum tree_code) streamer_read_uhwi (ib);
4713 if (operation == NOP_EXPR)
4714 {
4715 int formal_id = streamer_read_uhwi (ib);
4716 struct bitpack_d bp = streamer_read_bitpack (ib);
4717 bool agg_preserved = bp_unpack_value (&bp, 1);
4718 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4719 }
4720 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4721 {
4722 int formal_id = streamer_read_uhwi (ib);
4723 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4724 }
4725 else
4726 {
4727 tree operand = stream_read_tree (ib, data_in);
4728 int formal_id = streamer_read_uhwi (ib);
4729 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4730 operation);
4731 }
4732 break;
4733 case IPA_JF_ANCESTOR:
4734 {
4735 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4736 int formal_id = streamer_read_uhwi (ib);
4737 struct bitpack_d bp = streamer_read_bitpack (ib);
4738 bool agg_preserved = bp_unpack_value (&bp, 1);
4739 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4740 break;
4741 }
4742 default:
4743 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4744 }
4745
4746 count = streamer_read_uhwi (ib);
4747 if (prevails)
4748 vec_alloc (jump_func->agg.items, count);
4749 if (count)
4750 {
4751 struct bitpack_d bp = streamer_read_bitpack (ib);
4752 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4753 }
4754 for (i = 0; i < count; i++)
4755 {
4756 struct ipa_agg_jf_item item;
4757 item.type = stream_read_tree (ib, data_in);
4758 item.offset = streamer_read_uhwi (ib);
4759 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4760
4761 switch (item.jftype)
4762 {
4763 case IPA_JF_UNKNOWN:
4764 break;
4765 case IPA_JF_CONST:
4766 item.value.constant = stream_read_tree (ib, data_in);
4767 break;
4768 case IPA_JF_PASS_THROUGH:
4769 case IPA_JF_LOAD_AGG:
4770 operation = (enum tree_code) streamer_read_uhwi (ib);
4771 item.value.pass_through.operation = operation;
4772 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4773 if (TREE_CODE_CLASS (operation) == tcc_unary)
4774 item.value.pass_through.operand = NULL_TREE;
4775 else
4776 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4777 if (item.jftype == IPA_JF_LOAD_AGG)
4778 {
4779 struct bitpack_d bp;
4780 item.value.load_agg.type = stream_read_tree (ib, data_in);
4781 item.value.load_agg.offset = streamer_read_uhwi (ib);
4782 bp = streamer_read_bitpack (ib);
4783 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4784 }
4785 break;
4786 default:
4787 fatal_error (UNKNOWN_LOCATION,
4788 "invalid jump function in LTO stream");
4789 }
4790 if (prevails)
4791 jump_func->agg.items->quick_push (item);
4792 }
4793
4794 struct bitpack_d bp = streamer_read_bitpack (ib);
4795 bool bits_known = bp_unpack_value (&bp, 1);
4796 if (bits_known)
4797 {
4798 widest_int value = streamer_read_widest_int (ib);
4799 widest_int mask = streamer_read_widest_int (ib);
4800 if (prevails)
4801 ipa_set_jfunc_bits (jump_func, value, mask);
4802 }
4803 else
4804 jump_func->bits = NULL;
4805
4806 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4807 bool vr_known = bp_unpack_value (&vr_bp, 1);
4808 if (vr_known)
4809 {
4810 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4811 VR_LAST);
4812 tree min = stream_read_tree (ib, data_in);
4813 tree max = stream_read_tree (ib, data_in);
4814 if (prevails)
4815 ipa_set_jfunc_vr (jump_func, type, min, max);
4816 }
4817 else
4818 jump_func->m_vr = NULL;
4819 }
4820
4821 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4822 relevant to indirect inlining to OB. */
4823
4824 static void
4825 ipa_write_indirect_edge_info (struct output_block *ob,
4826 struct cgraph_edge *cs)
4827 {
4828 class cgraph_indirect_call_info *ii = cs->indirect_info;
4829 struct bitpack_d bp;
4830
4831 streamer_write_hwi (ob, ii->param_index);
4832 bp = bitpack_create (ob->main_stream);
4833 bp_pack_value (&bp, ii->polymorphic, 1);
4834 bp_pack_value (&bp, ii->agg_contents, 1);
4835 bp_pack_value (&bp, ii->member_ptr, 1);
4836 bp_pack_value (&bp, ii->by_ref, 1);
4837 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4838 bp_pack_value (&bp, ii->vptr_changed, 1);
4839 streamer_write_bitpack (&bp);
4840 if (ii->agg_contents || ii->polymorphic)
4841 streamer_write_hwi (ob, ii->offset);
4842 else
4843 gcc_assert (ii->offset == 0);
4844
4845 if (ii->polymorphic)
4846 {
4847 streamer_write_hwi (ob, ii->otr_token);
4848 stream_write_tree (ob, ii->otr_type, true);
4849 ii->context.stream_out (ob);
4850 }
4851 }
4852
4853 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4854 relevant to indirect inlining from IB. */
4855
4856 static void
4857 ipa_read_indirect_edge_info (class lto_input_block *ib,
4858 class data_in *data_in,
4859 struct cgraph_edge *cs,
4860 class ipa_node_params *info)
4861 {
4862 class cgraph_indirect_call_info *ii = cs->indirect_info;
4863 struct bitpack_d bp;
4864
4865 ii->param_index = (int) streamer_read_hwi (ib);
4866 bp = streamer_read_bitpack (ib);
4867 ii->polymorphic = bp_unpack_value (&bp, 1);
4868 ii->agg_contents = bp_unpack_value (&bp, 1);
4869 ii->member_ptr = bp_unpack_value (&bp, 1);
4870 ii->by_ref = bp_unpack_value (&bp, 1);
4871 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4872 ii->vptr_changed = bp_unpack_value (&bp, 1);
4873 if (ii->agg_contents || ii->polymorphic)
4874 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4875 else
4876 ii->offset = 0;
4877 if (ii->polymorphic)
4878 {
4879 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4880 ii->otr_type = stream_read_tree (ib, data_in);
4881 ii->context.stream_in (ib, data_in);
4882 }
4883 if (info && ii->param_index >= 0)
4884 {
4885 if (ii->polymorphic)
4886 ipa_set_param_used_by_polymorphic_call (info,
4887 ii->param_index , true);
4888 ipa_set_param_used_by_indirect_call (info,
4889 ii->param_index, true);
4890 }
4891 }
4892
4893 /* Stream out NODE info to OB. */
4894
4895 static void
4896 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4897 {
4898 int node_ref;
4899 lto_symtab_encoder_t encoder;
4900 class ipa_node_params *info = IPA_NODE_REF (node);
4901 int j;
4902 struct cgraph_edge *e;
4903 struct bitpack_d bp;
4904
4905 encoder = ob->decl_state->symtab_node_encoder;
4906 node_ref = lto_symtab_encoder_encode (encoder, node);
4907 streamer_write_uhwi (ob, node_ref);
4908
4909 streamer_write_uhwi (ob, ipa_get_param_count (info));
4910 for (j = 0; j < ipa_get_param_count (info); j++)
4911 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4912 bp = bitpack_create (ob->main_stream);
4913 gcc_assert (info->analysis_done
4914 || ipa_get_param_count (info) == 0);
4915 gcc_assert (!info->node_enqueued);
4916 gcc_assert (!info->ipcp_orig_node);
4917 for (j = 0; j < ipa_get_param_count (info); j++)
4918 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4919 streamer_write_bitpack (&bp);
4920 for (j = 0; j < ipa_get_param_count (info); j++)
4921 {
4922 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4923 stream_write_tree (ob, ipa_get_type (info, j), true);
4924 }
4925 for (e = node->callees; e; e = e->next_callee)
4926 {
4927 class ipa_edge_args *args = IPA_EDGE_REF (e);
4928
4929 if (!args)
4930 {
4931 streamer_write_uhwi (ob, 0);
4932 continue;
4933 }
4934
4935 streamer_write_uhwi (ob,
4936 ipa_get_cs_argument_count (args) * 2
4937 + (args->polymorphic_call_contexts != NULL));
4938 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4939 {
4940 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4941 if (args->polymorphic_call_contexts != NULL)
4942 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4943 }
4944 }
4945 for (e = node->indirect_calls; e; e = e->next_callee)
4946 {
4947 class ipa_edge_args *args = IPA_EDGE_REF (e);
4948 if (!args)
4949 streamer_write_uhwi (ob, 0);
4950 else
4951 {
4952 streamer_write_uhwi (ob,
4953 ipa_get_cs_argument_count (args) * 2
4954 + (args->polymorphic_call_contexts != NULL));
4955 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4956 {
4957 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4958 if (args->polymorphic_call_contexts != NULL)
4959 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4960 }
4961 }
4962 ipa_write_indirect_edge_info (ob, e);
4963 }
4964 }
4965
4966 /* Stream in edge E from IB. */
4967
4968 static void
4969 ipa_read_edge_info (class lto_input_block *ib,
4970 class data_in *data_in,
4971 struct cgraph_edge *e, bool prevails)
4972 {
4973 int count = streamer_read_uhwi (ib);
4974 bool contexts_computed = count & 1;
4975
4976 count /= 2;
4977 if (!count)
4978 return;
4979 if (prevails
4980 && (e->possibly_call_in_translation_unit_p ()
4981 /* Also stream in jump functions to builtins in hope that they
4982 will get fnspecs. */
4983 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
4984 {
4985 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4986 vec_safe_grow_cleared (args->jump_functions, count, true);
4987 if (contexts_computed)
4988 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
4989 for (int k = 0; k < count; k++)
4990 {
4991 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4992 data_in, prevails);
4993 if (contexts_computed)
4994 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4995 (ib, data_in);
4996 }
4997 }
4998 else
4999 {
5000 for (int k = 0; k < count; k++)
5001 {
5002 struct ipa_jump_func dummy;
5003 ipa_read_jump_function (ib, &dummy, e,
5004 data_in, prevails);
5005 if (contexts_computed)
5006 {
5007 class ipa_polymorphic_call_context ctx;
5008 ctx.stream_in (ib, data_in);
5009 }
5010 }
5011 }
5012 }
5013
5014 /* Stream in NODE info from IB. */
5015
5016 static void
5017 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5018 class data_in *data_in)
5019 {
5020 int k;
5021 struct cgraph_edge *e;
5022 struct bitpack_d bp;
5023 bool prevails = node->prevailing_p ();
5024 class ipa_node_params *info = prevails
5025 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
5026
5027 int param_count = streamer_read_uhwi (ib);
5028 if (prevails)
5029 {
5030 ipa_alloc_node_params (node, param_count);
5031 for (k = 0; k < param_count; k++)
5032 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5033 if (ipa_get_param_count (info) != 0)
5034 info->analysis_done = true;
5035 info->node_enqueued = false;
5036 }
5037 else
5038 for (k = 0; k < param_count; k++)
5039 streamer_read_uhwi (ib);
5040
5041 bp = streamer_read_bitpack (ib);
5042 for (k = 0; k < param_count; k++)
5043 {
5044 bool used = bp_unpack_value (&bp, 1);
5045
5046 if (prevails)
5047 ipa_set_param_used (info, k, used);
5048 }
5049 for (k = 0; k < param_count; k++)
5050 {
5051 int nuses = streamer_read_hwi (ib);
5052 tree type = stream_read_tree (ib, data_in);
5053
5054 if (prevails)
5055 {
5056 ipa_set_controlled_uses (info, k, nuses);
5057 (*info->descriptors)[k].decl_or_type = type;
5058 }
5059 }
5060 for (e = node->callees; e; e = e->next_callee)
5061 ipa_read_edge_info (ib, data_in, e, prevails);
5062 for (e = node->indirect_calls; e; e = e->next_callee)
5063 {
5064 ipa_read_edge_info (ib, data_in, e, prevails);
5065 ipa_read_indirect_edge_info (ib, data_in, e, info);
5066 }
5067 }
5068
5069 /* Write jump functions for nodes in SET. */
5070
5071 void
5072 ipa_prop_write_jump_functions (void)
5073 {
5074 struct cgraph_node *node;
5075 struct output_block *ob;
5076 unsigned int count = 0;
5077 lto_symtab_encoder_iterator lsei;
5078 lto_symtab_encoder_t encoder;
5079
5080 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5081 return;
5082
5083 ob = create_output_block (LTO_section_jump_functions);
5084 encoder = ob->decl_state->symtab_node_encoder;
5085 ob->symbol = NULL;
5086 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5087 lsei_next_function_in_partition (&lsei))
5088 {
5089 node = lsei_cgraph_node (lsei);
5090 if (node->has_gimple_body_p ()
5091 && IPA_NODE_REF (node) != NULL)
5092 count++;
5093 }
5094
5095 streamer_write_uhwi (ob, count);
5096
5097 /* Process all of the functions. */
5098 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5099 lsei_next_function_in_partition (&lsei))
5100 {
5101 node = lsei_cgraph_node (lsei);
5102 if (node->has_gimple_body_p ()
5103 && IPA_NODE_REF (node) != NULL)
5104 ipa_write_node_info (ob, node);
5105 }
5106 streamer_write_char_stream (ob->main_stream, 0);
5107 produce_asm (ob, NULL);
5108 destroy_output_block (ob);
5109 }
5110
5111 /* Read section in file FILE_DATA of length LEN with data DATA. */
5112
5113 static void
5114 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5115 size_t len)
5116 {
5117 const struct lto_function_header *header =
5118 (const struct lto_function_header *) data;
5119 const int cfg_offset = sizeof (struct lto_function_header);
5120 const int main_offset = cfg_offset + header->cfg_size;
5121 const int string_offset = main_offset + header->main_size;
5122 class data_in *data_in;
5123 unsigned int i;
5124 unsigned int count;
5125
5126 lto_input_block ib_main ((const char *) data + main_offset,
5127 header->main_size, file_data->mode_table);
5128
5129 data_in =
5130 lto_data_in_create (file_data, (const char *) data + string_offset,
5131 header->string_size, vNULL);
5132 count = streamer_read_uhwi (&ib_main);
5133
5134 for (i = 0; i < count; i++)
5135 {
5136 unsigned int index;
5137 struct cgraph_node *node;
5138 lto_symtab_encoder_t encoder;
5139
5140 index = streamer_read_uhwi (&ib_main);
5141 encoder = file_data->symtab_node_encoder;
5142 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5143 index));
5144 gcc_assert (node->definition);
5145 ipa_read_node_info (&ib_main, node, data_in);
5146 }
5147 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5148 len);
5149 lto_data_in_delete (data_in);
5150 }
5151
5152 /* Read ipcp jump functions. */
5153
5154 void
5155 ipa_prop_read_jump_functions (void)
5156 {
5157 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5158 struct lto_file_decl_data *file_data;
5159 unsigned int j = 0;
5160
5161 ipa_check_create_node_params ();
5162 ipa_check_create_edge_args ();
5163 ipa_register_cgraph_hooks ();
5164
5165 while ((file_data = file_data_vec[j++]))
5166 {
5167 size_t len;
5168 const char *data
5169 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5170 &len);
5171 if (data)
5172 ipa_prop_read_section (file_data, data, len);
5173 }
5174 }
5175
5176 void
5177 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5178 {
5179 int node_ref;
5180 unsigned int count = 0;
5181 lto_symtab_encoder_t encoder;
5182 struct ipa_agg_replacement_value *aggvals, *av;
5183
5184 aggvals = ipa_get_agg_replacements_for_node (node);
5185 encoder = ob->decl_state->symtab_node_encoder;
5186 node_ref = lto_symtab_encoder_encode (encoder, node);
5187 streamer_write_uhwi (ob, node_ref);
5188
5189 for (av = aggvals; av; av = av->next)
5190 count++;
5191 streamer_write_uhwi (ob, count);
5192
5193 for (av = aggvals; av; av = av->next)
5194 {
5195 struct bitpack_d bp;
5196
5197 streamer_write_uhwi (ob, av->offset);
5198 streamer_write_uhwi (ob, av->index);
5199 stream_write_tree (ob, av->value, true);
5200
5201 bp = bitpack_create (ob->main_stream);
5202 bp_pack_value (&bp, av->by_ref, 1);
5203 streamer_write_bitpack (&bp);
5204 }
5205
5206 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5207 if (ts && vec_safe_length (ts->m_vr) > 0)
5208 {
5209 count = ts->m_vr->length ();
5210 streamer_write_uhwi (ob, count);
5211 for (unsigned i = 0; i < count; ++i)
5212 {
5213 struct bitpack_d bp;
5214 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5215 bp = bitpack_create (ob->main_stream);
5216 bp_pack_value (&bp, parm_vr->known, 1);
5217 streamer_write_bitpack (&bp);
5218 if (parm_vr->known)
5219 {
5220 streamer_write_enum (ob->main_stream, value_rang_type,
5221 VR_LAST, parm_vr->type);
5222 streamer_write_wide_int (ob, parm_vr->min);
5223 streamer_write_wide_int (ob, parm_vr->max);
5224 }
5225 }
5226 }
5227 else
5228 streamer_write_uhwi (ob, 0);
5229
5230 if (ts && vec_safe_length (ts->bits) > 0)
5231 {
5232 count = ts->bits->length ();
5233 streamer_write_uhwi (ob, count);
5234
5235 for (unsigned i = 0; i < count; ++i)
5236 {
5237 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5238 struct bitpack_d bp = bitpack_create (ob->main_stream);
5239 bp_pack_value (&bp, !!bits_jfunc, 1);
5240 streamer_write_bitpack (&bp);
5241 if (bits_jfunc)
5242 {
5243 streamer_write_widest_int (ob, bits_jfunc->value);
5244 streamer_write_widest_int (ob, bits_jfunc->mask);
5245 }
5246 }
5247 }
5248 else
5249 streamer_write_uhwi (ob, 0);
5250 }
5251
5252 /* Stream in the aggregate value replacement chain for NODE from IB. */
5253
5254 static void
5255 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5256 data_in *data_in)
5257 {
5258 struct ipa_agg_replacement_value *aggvals = NULL;
5259 unsigned int count, i;
5260
5261 count = streamer_read_uhwi (ib);
5262 for (i = 0; i <count; i++)
5263 {
5264 struct ipa_agg_replacement_value *av;
5265 struct bitpack_d bp;
5266
5267 av = ggc_alloc<ipa_agg_replacement_value> ();
5268 av->offset = streamer_read_uhwi (ib);
5269 av->index = streamer_read_uhwi (ib);
5270 av->value = stream_read_tree (ib, data_in);
5271 bp = streamer_read_bitpack (ib);
5272 av->by_ref = bp_unpack_value (&bp, 1);
5273 av->next = aggvals;
5274 aggvals = av;
5275 }
5276 ipa_set_node_agg_value_chain (node, aggvals);
5277
5278 count = streamer_read_uhwi (ib);
5279 if (count > 0)
5280 {
5281 ipcp_transformation_initialize ();
5282 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5283 vec_safe_grow_cleared (ts->m_vr, count, true);
5284 for (i = 0; i < count; i++)
5285 {
5286 ipa_vr *parm_vr;
5287 parm_vr = &(*ts->m_vr)[i];
5288 struct bitpack_d bp;
5289 bp = streamer_read_bitpack (ib);
5290 parm_vr->known = bp_unpack_value (&bp, 1);
5291 if (parm_vr->known)
5292 {
5293 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5294 VR_LAST);
5295 parm_vr->min = streamer_read_wide_int (ib);
5296 parm_vr->max = streamer_read_wide_int (ib);
5297 }
5298 }
5299 }
5300 count = streamer_read_uhwi (ib);
5301 if (count > 0)
5302 {
5303 ipcp_transformation_initialize ();
5304 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5305 vec_safe_grow_cleared (ts->bits, count, true);
5306
5307 for (i = 0; i < count; i++)
5308 {
5309 struct bitpack_d bp = streamer_read_bitpack (ib);
5310 bool known = bp_unpack_value (&bp, 1);
5311 if (known)
5312 {
5313 const widest_int value = streamer_read_widest_int (ib);
5314 const widest_int mask = streamer_read_widest_int (ib);
5315 ipa_bits *bits
5316 = ipa_get_ipa_bits_for_value (value, mask);
5317 (*ts->bits)[i] = bits;
5318 }
5319 }
5320 }
5321 }
5322
5323 /* Write all aggregate replacement for nodes in set. */
5324
5325 void
5326 ipcp_write_transformation_summaries (void)
5327 {
5328 struct cgraph_node *node;
5329 struct output_block *ob;
5330 unsigned int count = 0;
5331 lto_symtab_encoder_iterator lsei;
5332 lto_symtab_encoder_t encoder;
5333
5334 ob = create_output_block (LTO_section_ipcp_transform);
5335 encoder = ob->decl_state->symtab_node_encoder;
5336 ob->symbol = NULL;
5337 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5338 lsei_next_function_in_partition (&lsei))
5339 {
5340 node = lsei_cgraph_node (lsei);
5341 if (node->has_gimple_body_p ())
5342 count++;
5343 }
5344
5345 streamer_write_uhwi (ob, count);
5346
5347 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5348 lsei_next_function_in_partition (&lsei))
5349 {
5350 node = lsei_cgraph_node (lsei);
5351 if (node->has_gimple_body_p ())
5352 write_ipcp_transformation_info (ob, node);
5353 }
5354 streamer_write_char_stream (ob->main_stream, 0);
5355 produce_asm (ob, NULL);
5356 destroy_output_block (ob);
5357 }
5358
5359 /* Read replacements section in file FILE_DATA of length LEN with data
5360 DATA. */
5361
5362 static void
5363 read_replacements_section (struct lto_file_decl_data *file_data,
5364 const char *data,
5365 size_t len)
5366 {
5367 const struct lto_function_header *header =
5368 (const struct lto_function_header *) data;
5369 const int cfg_offset = sizeof (struct lto_function_header);
5370 const int main_offset = cfg_offset + header->cfg_size;
5371 const int string_offset = main_offset + header->main_size;
5372 class data_in *data_in;
5373 unsigned int i;
5374 unsigned int count;
5375
5376 lto_input_block ib_main ((const char *) data + main_offset,
5377 header->main_size, file_data->mode_table);
5378
5379 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5380 header->string_size, vNULL);
5381 count = streamer_read_uhwi (&ib_main);
5382
5383 for (i = 0; i < count; i++)
5384 {
5385 unsigned int index;
5386 struct cgraph_node *node;
5387 lto_symtab_encoder_t encoder;
5388
5389 index = streamer_read_uhwi (&ib_main);
5390 encoder = file_data->symtab_node_encoder;
5391 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5392 index));
5393 gcc_assert (node->definition);
5394 read_ipcp_transformation_info (&ib_main, node, data_in);
5395 }
5396 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5397 len);
5398 lto_data_in_delete (data_in);
5399 }
5400
5401 /* Read IPA-CP aggregate replacements. */
5402
5403 void
5404 ipcp_read_transformation_summaries (void)
5405 {
5406 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5407 struct lto_file_decl_data *file_data;
5408 unsigned int j = 0;
5409
5410 while ((file_data = file_data_vec[j++]))
5411 {
5412 size_t len;
5413 const char *data
5414 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5415 &len);
5416 if (data)
5417 read_replacements_section (file_data, data, len);
5418 }
5419 }
5420
5421 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5422 NODE. */
5423
5424 static void
5425 adjust_agg_replacement_values (struct cgraph_node *node,
5426 struct ipa_agg_replacement_value *aggval)
5427 {
5428 struct ipa_agg_replacement_value *v;
5429 clone_info *cinfo = clone_info::get (node);
5430
5431 if (!cinfo || !cinfo->param_adjustments)
5432 return;
5433
5434 auto_vec<int, 16> new_indices;
5435 cinfo->param_adjustments->get_updated_indices (&new_indices);
5436 for (v = aggval; v; v = v->next)
5437 {
5438 gcc_checking_assert (v->index >= 0);
5439
5440 if ((unsigned) v->index < new_indices.length ())
5441 v->index = new_indices[v->index];
5442 else
5443 /* This can happen if we know about a constant passed by reference by
5444 an argument which is never actually used for anything, let alone
5445 loading that constant. */
5446 v->index = -1;
5447 }
5448 }
5449
5450 /* Dominator walker driving the ipcp modification phase. */
5451
5452 class ipcp_modif_dom_walker : public dom_walker
5453 {
5454 public:
5455 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5456 vec<ipa_param_descriptor, va_gc> *descs,
5457 struct ipa_agg_replacement_value *av,
5458 bool *sc, bool *cc)
5459 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5460 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5461
5462 virtual edge before_dom_children (basic_block);
5463
5464 private:
5465 struct ipa_func_body_info *m_fbi;
5466 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5467 struct ipa_agg_replacement_value *m_aggval;
5468 bool *m_something_changed, *m_cfg_changed;
5469 };
5470
5471 edge
5472 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5473 {
5474 gimple_stmt_iterator gsi;
5475 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5476 {
5477 struct ipa_agg_replacement_value *v;
5478 gimple *stmt = gsi_stmt (gsi);
5479 tree rhs, val, t;
5480 HOST_WIDE_INT offset;
5481 poly_int64 size;
5482 int index;
5483 bool by_ref, vce;
5484
5485 if (!gimple_assign_load_p (stmt))
5486 continue;
5487 rhs = gimple_assign_rhs1 (stmt);
5488 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5489 continue;
5490
5491 vce = false;
5492 t = rhs;
5493 while (handled_component_p (t))
5494 {
5495 /* V_C_E can do things like convert an array of integers to one
5496 bigger integer and similar things we do not handle below. */
5497 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5498 {
5499 vce = true;
5500 break;
5501 }
5502 t = TREE_OPERAND (t, 0);
5503 }
5504 if (vce)
5505 continue;
5506
5507 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5508 &offset, &size, &by_ref))
5509 continue;
5510 for (v = m_aggval; v; v = v->next)
5511 if (v->index == index
5512 && v->offset == offset)
5513 break;
5514 if (!v
5515 || v->by_ref != by_ref
5516 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5517 size))
5518 continue;
5519
5520 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5521 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5522 {
5523 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5524 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5525 else if (TYPE_SIZE (TREE_TYPE (rhs))
5526 == TYPE_SIZE (TREE_TYPE (v->value)))
5527 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5528 else
5529 {
5530 if (dump_file)
5531 {
5532 fprintf (dump_file, " const ");
5533 print_generic_expr (dump_file, v->value);
5534 fprintf (dump_file, " can't be converted to type of ");
5535 print_generic_expr (dump_file, rhs);
5536 fprintf (dump_file, "\n");
5537 }
5538 continue;
5539 }
5540 }
5541 else
5542 val = v->value;
5543
5544 if (dump_file && (dump_flags & TDF_DETAILS))
5545 {
5546 fprintf (dump_file, "Modifying stmt:\n ");
5547 print_gimple_stmt (dump_file, stmt, 0);
5548 }
5549 gimple_assign_set_rhs_from_tree (&gsi, val);
5550 update_stmt (stmt);
5551
5552 if (dump_file && (dump_flags & TDF_DETAILS))
5553 {
5554 fprintf (dump_file, "into:\n ");
5555 print_gimple_stmt (dump_file, stmt, 0);
5556 fprintf (dump_file, "\n");
5557 }
5558
5559 *m_something_changed = true;
5560 if (maybe_clean_eh_stmt (stmt)
5561 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5562 *m_cfg_changed = true;
5563 }
5564 return NULL;
5565 }
5566
5567 /* Return true if we have recorded VALUE and MASK about PARM.
5568 Set VALUE and MASk accordingly. */
5569
5570 bool
5571 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5572 {
5573 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5574 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5575 if (!ts || vec_safe_length (ts->bits) == 0)
5576 return false;
5577
5578 int i = 0;
5579 for (tree p = DECL_ARGUMENTS (current_function_decl);
5580 p != parm; p = DECL_CHAIN (p))
5581 {
5582 i++;
5583 /* Ignore static chain. */
5584 if (!p)
5585 return false;
5586 }
5587
5588 clone_info *cinfo = clone_info::get (cnode);
5589 if (cinfo && cinfo->param_adjustments)
5590 {
5591 i = cinfo->param_adjustments->get_original_index (i);
5592 if (i < 0)
5593 return false;
5594 }
5595
5596 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5597 if (!bits[i])
5598 return false;
5599 *mask = bits[i]->mask;
5600 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5601 return true;
5602 }
5603
5604
5605 /* Update bits info of formal parameters as described in
5606 ipcp_transformation. */
5607
5608 static void
5609 ipcp_update_bits (struct cgraph_node *node)
5610 {
5611 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5612
5613 if (!ts || vec_safe_length (ts->bits) == 0)
5614 return;
5615 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5616 unsigned count = bits.length ();
5617 if (!count)
5618 return;
5619
5620 auto_vec<int, 16> new_indices;
5621 bool need_remapping = false;
5622 clone_info *cinfo = clone_info::get (node);
5623 if (cinfo && cinfo->param_adjustments)
5624 {
5625 cinfo->param_adjustments->get_updated_indices (&new_indices);
5626 need_remapping = true;
5627 }
5628 auto_vec <tree, 16> parm_decls;
5629 push_function_arg_decls (&parm_decls, node->decl);
5630
5631 for (unsigned i = 0; i < count; ++i)
5632 {
5633 tree parm;
5634 if (need_remapping)
5635 {
5636 if (i >= new_indices.length ())
5637 continue;
5638 int idx = new_indices[i];
5639 if (idx < 0)
5640 continue;
5641 parm = parm_decls[idx];
5642 }
5643 else
5644 parm = parm_decls[i];
5645 gcc_checking_assert (parm);
5646
5647
5648 if (!bits[i]
5649 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5650 || POINTER_TYPE_P (TREE_TYPE (parm)))
5651 || !is_gimple_reg (parm))
5652 continue;
5653
5654 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5655 if (!ddef)
5656 continue;
5657
5658 if (dump_file)
5659 {
5660 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5661 print_hex (bits[i]->mask, dump_file);
5662 fprintf (dump_file, "\n");
5663 }
5664
5665 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5666 {
5667 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5668 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5669
5670 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5671 | wide_int::from (bits[i]->value, prec, sgn);
5672 set_nonzero_bits (ddef, nonzero_bits);
5673 }
5674 else
5675 {
5676 unsigned tem = bits[i]->mask.to_uhwi ();
5677 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5678 unsigned align = tem & -tem;
5679 unsigned misalign = bitpos & (align - 1);
5680
5681 if (align > 1)
5682 {
5683 if (dump_file)
5684 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5685
5686 unsigned old_align, old_misalign;
5687 struct ptr_info_def *pi = get_ptr_info (ddef);
5688 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5689
5690 if (old_known
5691 && old_align > align)
5692 {
5693 if (dump_file)
5694 {
5695 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5696 if ((old_misalign & (align - 1)) != misalign)
5697 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5698 old_misalign, misalign);
5699 }
5700 continue;
5701 }
5702
5703 if (old_known
5704 && ((misalign & (old_align - 1)) != old_misalign)
5705 && dump_file)
5706 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5707 old_misalign, misalign);
5708
5709 set_ptr_info_alignment (pi, align, misalign);
5710 }
5711 }
5712 }
5713 }
5714
5715 bool
5716 ipa_vr::nonzero_p (tree expr_type) const
5717 {
5718 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5719 return true;
5720
5721 unsigned prec = TYPE_PRECISION (expr_type);
5722 return (type == VR_RANGE
5723 && TYPE_UNSIGNED (expr_type)
5724 && wi::eq_p (min, wi::one (prec))
5725 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5726 }
5727
5728 /* Update value range of formal parameters as described in
5729 ipcp_transformation. */
5730
5731 static void
5732 ipcp_update_vr (struct cgraph_node *node)
5733 {
5734 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5735 if (!ts || vec_safe_length (ts->m_vr) == 0)
5736 return;
5737 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5738 unsigned count = vr.length ();
5739 if (!count)
5740 return;
5741
5742 auto_vec<int, 16> new_indices;
5743 bool need_remapping = false;
5744 clone_info *cinfo = clone_info::get (node);
5745 if (cinfo && cinfo->param_adjustments)
5746 {
5747 cinfo->param_adjustments->get_updated_indices (&new_indices);
5748 need_remapping = true;
5749 }
5750 auto_vec <tree, 16> parm_decls;
5751 push_function_arg_decls (&parm_decls, node->decl);
5752
5753 for (unsigned i = 0; i < count; ++i)
5754 {
5755 tree parm;
5756 int remapped_idx;
5757 if (need_remapping)
5758 {
5759 if (i >= new_indices.length ())
5760 continue;
5761 remapped_idx = new_indices[i];
5762 if (remapped_idx < 0)
5763 continue;
5764 }
5765 else
5766 remapped_idx = i;
5767
5768 parm = parm_decls[remapped_idx];
5769
5770 gcc_checking_assert (parm);
5771 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5772
5773 if (!ddef || !is_gimple_reg (parm))
5774 continue;
5775
5776 if (vr[i].known
5777 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5778 {
5779 tree type = TREE_TYPE (ddef);
5780 unsigned prec = TYPE_PRECISION (type);
5781 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5782 {
5783 if (dump_file)
5784 {
5785 fprintf (dump_file, "Setting value range of param %u "
5786 "(now %i) ", i, remapped_idx);
5787 fprintf (dump_file, "%s[",
5788 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5789 print_decs (vr[i].min, dump_file);
5790 fprintf (dump_file, ", ");
5791 print_decs (vr[i].max, dump_file);
5792 fprintf (dump_file, "]\n");
5793 }
5794 set_range_info (ddef, vr[i].type,
5795 wide_int_storage::from (vr[i].min, prec,
5796 TYPE_SIGN (type)),
5797 wide_int_storage::from (vr[i].max, prec,
5798 TYPE_SIGN (type)));
5799 }
5800 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5801 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5802 {
5803 if (dump_file)
5804 fprintf (dump_file, "Setting nonnull for %u\n", i);
5805 set_ptr_nonnull (ddef);
5806 }
5807 }
5808 }
5809 }
5810
5811 /* IPCP transformation phase doing propagation of aggregate values. */
5812
5813 unsigned int
5814 ipcp_transform_function (struct cgraph_node *node)
5815 {
5816 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5817 struct ipa_func_body_info fbi;
5818 struct ipa_agg_replacement_value *aggval;
5819 int param_count;
5820 bool cfg_changed = false, something_changed = false;
5821
5822 gcc_checking_assert (cfun);
5823 gcc_checking_assert (current_function_decl);
5824
5825 if (dump_file)
5826 fprintf (dump_file, "Modification phase of node %s\n",
5827 node->dump_name ());
5828
5829 ipcp_update_bits (node);
5830 ipcp_update_vr (node);
5831 aggval = ipa_get_agg_replacements_for_node (node);
5832 if (!aggval)
5833 return 0;
5834 param_count = count_formal_params (node->decl);
5835 if (param_count == 0)
5836 return 0;
5837 adjust_agg_replacement_values (node, aggval);
5838 if (dump_file)
5839 ipa_dump_agg_replacement_values (dump_file, aggval);
5840
5841 fbi.node = node;
5842 fbi.info = NULL;
5843 fbi.bb_infos = vNULL;
5844 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5845 fbi.param_count = param_count;
5846 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5847
5848 vec_safe_grow_cleared (descriptors, param_count, true);
5849 ipa_populate_param_decls (node, *descriptors);
5850 calculate_dominance_info (CDI_DOMINATORS);
5851 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5852 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5853
5854 int i;
5855 struct ipa_bb_info *bi;
5856 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5857 free_ipa_bb_info (bi);
5858 fbi.bb_infos.release ();
5859 free_dominance_info (CDI_DOMINATORS);
5860
5861 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5862 s->agg_values = NULL;
5863 s->bits = NULL;
5864 s->m_vr = NULL;
5865
5866 vec_free (descriptors);
5867
5868 if (!something_changed)
5869 return 0;
5870
5871 if (cfg_changed)
5872 delete_unreachable_blocks_update_callgraph (node, false);
5873
5874 return TODO_update_ssa_only_virtuals;
5875 }
5876
5877
5878 /* Return true if OTHER describes same agg value. */
5879 bool
5880 ipa_agg_value::equal_to (const ipa_agg_value &other)
5881 {
5882 return offset == other.offset
5883 && operand_equal_p (value, other.value, 0);
5884 }
5885
5886 /* Destructor also removing individual aggregate values. */
5887
5888 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5889 {
5890 ipa_release_agg_values (m_known_aggs, false);
5891 }
5892
5893
5894
5895 #include "gt-ipa-prop.h"