[Ada] Argument_String_To_List creates empty items from whitespace
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
63
64 /* Traits for a hash table for reusing already existing ipa_bits. */
65
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 {
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static void
82 mark_empty (ipa_bits *&p)
83 {
84 p = NULL;
85 }
86 static bool
87 is_empty (const ipa_bits *p)
88 {
89 return p == NULL;
90 }
91 static bool
92 is_deleted (const ipa_bits *p)
93 {
94 return p == reinterpret_cast<const ipa_bits *> (1);
95 }
96 static void
97 mark_deleted (ipa_bits *&p)
98 {
99 p = reinterpret_cast<ipa_bits *> (1);
100 }
101 };
102
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
108
109 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 {
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
113 static hashval_t
114 hash (const value_range *p)
115 {
116 gcc_checking_assert (!p->equiv);
117 inchash::hash hstate (p->type);
118 hstate.add_ptr (p->min);
119 hstate.add_ptr (p->max);
120 return hstate.end ();
121 }
122 static bool
123 equal (const value_range *a, const value_range *b)
124 {
125 return a->type == b->type && a->min == b->min && a->max == b->max;
126 }
127 static void
128 mark_empty (value_range *&p)
129 {
130 p = NULL;
131 }
132 static bool
133 is_empty (const value_range *p)
134 {
135 return p == NULL;
136 }
137 static bool
138 is_deleted (const value_range *p)
139 {
140 return p == reinterpret_cast<const value_range *> (1);
141 }
142 static void
143 mark_deleted (value_range *&p)
144 {
145 p = reinterpret_cast<value_range *> (1);
146 }
147 };
148
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
157 {
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165 };
166
167 /* Allocation pool for reference descriptions. */
168
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
171
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 {
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 }
184
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
191 {
192 int i, count;
193
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
198
199 return -1;
200 }
201
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205 int
206 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
207 {
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 }
210
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
213
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
217 {
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
222
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 {
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
233 }
234 }
235
236 /* Return how many formal parameters FNDECL has. */
237
238 int
239 count_formal_params (tree fndecl)
240 {
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
244
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
247
248 return count;
249 }
250
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255 void
256 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
257 {
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
260 {
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
263 }
264 }
265
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
268
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 {
272 struct ipa_node_params *info = IPA_NODE_REF (node);
273
274 if (!info->descriptors && param_count)
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
281 }
282
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
286
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
289 {
290 struct ipa_node_params *info = IPA_NODE_REF (node);
291
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
295 }
296
297 /* Print the jump functions associated with call graph edge CS to file F. */
298
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 {
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
317 {
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 }
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 {
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
353 }
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
370 {
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
373 }
374 fprintf (f, "\n");
375 }
376 }
377
378 struct ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
385
386 if (jump_func->bits)
387 {
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
396
397 if (jump_func->m_vr)
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max), f);
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
409 }
410 }
411
412
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 {
419 struct cgraph_edge *cs;
420
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
432
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 {
435 struct cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
438
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
451
452 if (cs->call_stmt)
453 {
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 }
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463 }
464
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466
467 void
468 ipa_print_all_jump_functions (FILE *f)
469 {
470 struct cgraph_node *node;
471
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477 }
478
479 /* Set jfunc to be a know-really nothing jump function. */
480
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
487 }
488
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496 {
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500 }
501
502 /* Set JFUNC to be a constant jmp function. */
503
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
507 {
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
515
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
524 }
525
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
530 {
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537
538 /* Set JFUNC to be an unary pass through jump function. */
539
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543 {
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function. */
551
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555 {
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
561 }
562
563 /* Set JFUNC to be an ancestor jump function. */
564
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
568 {
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583 }
584
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
588 struct prop_type_change_info
589 {
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598 };
599
600 /* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
661 }
662 return true;
663 }
664
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
669
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683 }
684
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
688
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
734 }
735 }
736 return true;
737 }
738
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
748
749 static bool
750 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
751 gcall *call, struct ipa_jump_func *jfunc,
752 HOST_WIDE_INT offset)
753 {
754 struct prop_type_change_info tci;
755 ao_ref ao;
756 bool entry_reached = false;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
761
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
773
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
779
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
783
784 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
785 &tci, NULL, &entry_reached);
786 if (!tci.type_maybe_changed)
787 return false;
788
789 ipa_set_jf_unknown (jfunc);
790 return true;
791 }
792
793 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
798
799 static bool
800 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
801 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
802 {
803 if (!flag_devirtualize)
804 return false;
805
806 if (TREE_CODE (base) == MEM_REF
807 && !param_type_may_change_p (current_function_decl,
808 TREE_OPERAND (base, 0),
809 call))
810 return false;
811 return detect_type_change_from_memory_writes (arg, base, comp_type,
812 call, jfunc, offset);
813 }
814
815 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
817 be zero). */
818
819 static bool
820 detect_type_change_ssa (tree arg, tree comp_type,
821 gcall *call, struct ipa_jump_func *jfunc)
822 {
823 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
824 if (!flag_devirtualize
825 || !POINTER_TYPE_P (TREE_TYPE (arg)))
826 return false;
827
828 if (!param_type_may_change_p (current_function_decl, arg, call))
829 return false;
830
831 arg = build2 (MEM_REF, ptr_type_node, arg,
832 build_int_cst (ptr_type_node, 0));
833
834 return detect_type_change_from_memory_writes (arg, arg, comp_type,
835 call, jfunc, 0);
836 }
837
838 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
840
841 static bool
842 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
843 void *data)
844 {
845 bool *b = (bool *) data;
846 *b = true;
847 return true;
848 }
849
850 /* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
852
853 static bool
854 aa_overwalked (struct ipa_func_body_info *fbi)
855 {
856 gcc_checking_assert (fbi);
857 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 }
859
860 /* Find the nearest valid aa status for parameter specified by INDEX that
861 dominates BB. */
862
863 static struct ipa_param_aa_status *
864 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
865 int index)
866 {
867 while (true)
868 {
869 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
870 if (!bb)
871 return NULL;
872 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
873 if (!bi->param_aa_statuses.is_empty ()
874 && bi->param_aa_statuses[index].valid)
875 return &bi->param_aa_statuses[index];
876 }
877 }
878
879 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
881 necessary. */
882
883 static struct ipa_param_aa_status *
884 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
885 int index)
886 {
887 gcc_checking_assert (fbi);
888 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
889 if (bi->param_aa_statuses.is_empty ())
890 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
891 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
892 if (!paa->valid)
893 {
894 gcc_checking_assert (!paa->parm_modified
895 && !paa->ref_modified
896 && !paa->pt_modified);
897 struct ipa_param_aa_status *dom_paa;
898 dom_paa = find_dominating_aa_status (fbi, bb, index);
899 if (dom_paa)
900 *paa = *dom_paa;
901 else
902 paa->valid = true;
903 }
904
905 return paa;
906 }
907
908 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
909 a value known not to be modified in this function before reaching the
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
912
913 static bool
914 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
915 gimple *stmt, tree parm_load)
916 {
917 struct ipa_param_aa_status *paa;
918 bool modified = false;
919 ao_ref refd;
920
921 tree base = get_base_address (parm_load);
922 gcc_assert (TREE_CODE (base) == PARM_DECL);
923 if (TREE_READONLY (base))
924 return true;
925
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
930 that case. */
931 if (fbi)
932 {
933 if (aa_overwalked (fbi))
934 return false;
935 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
936 if (paa->parm_modified)
937 return false;
938 }
939 else
940 paa = NULL;
941
942 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
943 ao_ref_init (&refd, parm_load);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
945 &modified, NULL);
946 if (fbi)
947 fbi->aa_walked += walked;
948 if (paa && modified)
949 paa->parm_modified = true;
950 return !modified;
951 }
952
953 /* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
956
957 static int
958 load_from_unmodified_param (struct ipa_func_body_info *fbi,
959 vec<ipa_param_descriptor, va_gc> *descriptors,
960 gimple *stmt)
961 {
962 int index;
963 tree op1;
964
965 if (!gimple_assign_single_p (stmt))
966 return -1;
967
968 op1 = gimple_assign_rhs1 (stmt);
969 if (TREE_CODE (op1) != PARM_DECL)
970 return -1;
971
972 index = ipa_get_param_decl_index_1 (descriptors, op1);
973 if (index < 0
974 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
975 return -1;
976
977 return index;
978 }
979
980 /* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
983
984 static bool
985 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
986 int index, gimple *stmt, tree ref)
987 {
988 struct ipa_param_aa_status *paa;
989 bool modified = false;
990 ao_ref refd;
991
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
998 {
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->ref_modified)
1003 return false;
1004 }
1005 else
1006 paa = NULL;
1007
1008 gcc_checking_assert (gimple_vuse (stmt));
1009 ao_ref_init (&refd, ref);
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->ref_modified = true;
1016 return !modified;
1017 }
1018
1019 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
1022
1023 static bool
1024 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1025 gimple *call, tree parm)
1026 {
1027 bool modified = false;
1028 ao_ref refd;
1029
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call)
1034 || !POINTER_TYPE_P (TREE_TYPE (parm))
1035 || aa_overwalked (fbi))
1036 return false;
1037
1038 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1039 gimple_bb (call),
1040 index);
1041 if (paa->pt_modified)
1042 return false;
1043
1044 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1045 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1046 &modified, NULL);
1047 fbi->aa_walked += walked;
1048 if (modified)
1049 paa->pt_modified = true;
1050 return !modified;
1051 }
1052
1053 /* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1055
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1061
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1067
1068 bool
1069 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1070 vec<ipa_param_descriptor, va_gc> *descriptors,
1071 gimple *stmt, tree op, int *index_p,
1072 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1073 bool *by_ref_p, bool *guaranteed_unmodified)
1074 {
1075 int index;
1076 HOST_WIDE_INT size;
1077 bool reverse;
1078 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1079
1080 if (!base)
1081 return false;
1082
1083 if (DECL_P (base))
1084 {
1085 int index = ipa_get_param_decl_index_1 (descriptors, base);
1086 if (index >= 0
1087 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1088 {
1089 *index_p = index;
1090 *by_ref_p = false;
1091 if (size_p)
1092 *size_p = size;
1093 if (guaranteed_unmodified)
1094 *guaranteed_unmodified = true;
1095 return true;
1096 }
1097 return false;
1098 }
1099
1100 if (TREE_CODE (base) != MEM_REF
1101 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1102 || !integer_zerop (TREE_OPERAND (base, 1)))
1103 return false;
1104
1105 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1106 {
1107 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1108 index = ipa_get_param_decl_index_1 (descriptors, parm);
1109 }
1110 else
1111 {
1112 /* This branch catches situations where a pointer parameter is not a
1113 gimple register, for example:
1114
1115 void hip7(S*) (struct S * p)
1116 {
1117 void (*<T2e4>) (struct S *) D.1867;
1118 struct S * p.1;
1119
1120 <bb 2>:
1121 p.1_1 = p;
1122 D.1867_2 = p.1_1->f;
1123 D.1867_2 ();
1124 gdp = &p;
1125 */
1126
1127 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1128 index = load_from_unmodified_param (fbi, descriptors, def);
1129 }
1130
1131 if (index >= 0)
1132 {
1133 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1134 if (!data_preserved && !guaranteed_unmodified)
1135 return false;
1136
1137 *index_p = index;
1138 *by_ref_p = true;
1139 if (size_p)
1140 *size_p = size;
1141 if (guaranteed_unmodified)
1142 *guaranteed_unmodified = data_preserved;
1143 return true;
1144 }
1145 return false;
1146 }
1147
1148 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1149 of an assignment statement STMT, try to determine whether we are actually
1150 handling any of the following cases and construct an appropriate jump
1151 function into JFUNC if so:
1152
1153 1) The passed value is loaded from a formal parameter which is not a gimple
1154 register (most probably because it is addressable, the value has to be
1155 scalar) and we can guarantee the value has not changed. This case can
1156 therefore be described by a simple pass-through jump function. For example:
1157
1158 foo (int a)
1159 {
1160 int a.0;
1161
1162 a.0_2 = a;
1163 bar (a.0_2);
1164
1165 2) The passed value can be described by a simple arithmetic pass-through
1166 jump function. E.g.
1167
1168 foo (int a)
1169 {
1170 int D.2064;
1171
1172 D.2064_4 = a.1(D) + 4;
1173 bar (D.2064_4);
1174
1175 This case can also occur in combination of the previous one, e.g.:
1176
1177 foo (int a, int z)
1178 {
1179 int a.0;
1180 int D.2064;
1181
1182 a.0_3 = a;
1183 D.2064_4 = a.0_3 + 4;
1184 foo (D.2064_4);
1185
1186 3) The passed value is an address of an object within another one (which
1187 also passed by reference). Such situations are described by an ancestor
1188 jump function and describe situations such as:
1189
1190 B::foo() (struct B * const this)
1191 {
1192 struct A * D.1845;
1193
1194 D.1845_2 = &this_1(D)->D.1748;
1195 A::bar (D.1845_2);
1196
1197 INFO is the structure describing individual parameters access different
1198 stages of IPA optimizations. PARMS_AINFO contains the information that is
1199 only needed for intraprocedural analysis. */
1200
1201 static void
1202 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1203 struct ipa_node_params *info,
1204 struct ipa_jump_func *jfunc,
1205 gcall *call, gimple *stmt, tree name,
1206 tree param_type)
1207 {
1208 HOST_WIDE_INT offset, size;
1209 tree op1, tc_ssa, base, ssa;
1210 bool reverse;
1211 int index;
1212
1213 op1 = gimple_assign_rhs1 (stmt);
1214
1215 if (TREE_CODE (op1) == SSA_NAME)
1216 {
1217 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1218 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1219 else
1220 index = load_from_unmodified_param (fbi, info->descriptors,
1221 SSA_NAME_DEF_STMT (op1));
1222 tc_ssa = op1;
1223 }
1224 else
1225 {
1226 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1227 tc_ssa = gimple_assign_lhs (stmt);
1228 }
1229
1230 if (index >= 0)
1231 {
1232 switch (gimple_assign_rhs_class (stmt))
1233 {
1234 case GIMPLE_BINARY_RHS:
1235 {
1236 tree op2 = gimple_assign_rhs2 (stmt);
1237 if (!is_gimple_ip_invariant (op2)
1238 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1239 != tcc_comparison)
1240 && !useless_type_conversion_p (TREE_TYPE (name),
1241 TREE_TYPE (op1))))
1242 return;
1243
1244 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1245 gimple_assign_rhs_code (stmt));
1246 break;
1247 }
1248 case GIMPLE_SINGLE_RHS:
1249 {
1250 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1251 tc_ssa);
1252 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1253 break;
1254 }
1255 case GIMPLE_UNARY_RHS:
1256 if (is_gimple_assign (stmt)
1257 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1258 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1259 ipa_set_jf_unary_pass_through (jfunc, index,
1260 gimple_assign_rhs_code (stmt));
1261 default:;
1262 }
1263 return;
1264 }
1265
1266 if (TREE_CODE (op1) != ADDR_EXPR)
1267 return;
1268 op1 = TREE_OPERAND (op1, 0);
1269 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1270 return;
1271 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1272 offset_int mem_offset;
1273 if (!base
1274 || TREE_CODE (base) != MEM_REF
1275 || !mem_ref_offset (base).is_constant (&mem_offset))
1276 return;
1277 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1278 ssa = TREE_OPERAND (base, 0);
1279 if (TREE_CODE (ssa) != SSA_NAME
1280 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1281 || offset < 0)
1282 return;
1283
1284 /* Dynamic types are changed in constructors and destructors. */
1285 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1286 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1287 ipa_set_ancestor_jf (jfunc, offset, index,
1288 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1289 }
1290
1291 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1292 it looks like:
1293
1294 iftmp.1_3 = &obj_2(D)->D.1762;
1295
1296 The base of the MEM_REF must be a default definition SSA NAME of a
1297 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1298 whole MEM_REF expression is returned and the offset calculated from any
1299 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1300 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1301
1302 static tree
1303 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1304 {
1305 HOST_WIDE_INT size;
1306 tree expr, parm, obj;
1307 bool reverse;
1308
1309 if (!gimple_assign_single_p (assign))
1310 return NULL_TREE;
1311 expr = gimple_assign_rhs1 (assign);
1312
1313 if (TREE_CODE (expr) != ADDR_EXPR)
1314 return NULL_TREE;
1315 expr = TREE_OPERAND (expr, 0);
1316 obj = expr;
1317 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1318
1319 offset_int mem_offset;
1320 if (!expr
1321 || TREE_CODE (expr) != MEM_REF
1322 || !mem_ref_offset (expr).is_constant (&mem_offset))
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1329
1330 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1331 *obj_p = obj;
1332 return expr;
1333 }
1334
1335
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1341
1342 Essentially we want to match the following pattern:
1343
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1348
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1351
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1356
1357 static void
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1359 struct ipa_node_params *info,
1360 struct ipa_jump_func *jfunc,
1361 gcall *call, gphi *phi)
1362 {
1363 HOST_WIDE_INT offset;
1364 gimple *assign, *cond;
1365 basic_block phi_bb, assign_bb, cond_bb;
1366 tree tmp, parm, expr, obj;
1367 int index, i;
1368
1369 if (gimple_phi_num_args (phi) != 2)
1370 return;
1371
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1383
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
1386 if (!single_pred_p (assign_bb))
1387 return;
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1393 if (index < 0)
1394 return;
1395
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1404
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1407 {
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1411 }
1412
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
1415 }
1416
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1421
1422 static bool
1423 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1424 {
1425 tree fld;
1426
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1429
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1434 return false;
1435
1436 if (method_ptr)
1437 *method_ptr = fld;
1438
1439 fld = DECL_CHAIN (fld);
1440 if (!fld || INTEGRAL_TYPE_P (fld)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1442 return false;
1443 if (delta)
1444 *delta = fld;
1445
1446 if (DECL_CHAIN (fld))
1447 return false;
1448
1449 return true;
1450 }
1451
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
1455
1456 static inline tree
1457 get_ssa_def_if_simple_copy (tree rhs)
1458 {
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1460 {
1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1462
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
1465 else
1466 break;
1467 }
1468 return rhs;
1469 }
1470
1471 /* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1473
1474 struct ipa_known_agg_contents_list
1475 {
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1482 };
1483
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1488
1489 static struct ipa_known_agg_contents_list **
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1494 {
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1497 {
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1501 }
1502
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1504 {
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1513 }
1514 return p;
1515 }
1516
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1520
1521 static void
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1525 {
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1528 {
1529 if (list->constant)
1530 {
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1536 }
1537 list = list->next;
1538 }
1539 }
1540
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1546
1547 static void
1548 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
1550 struct ipa_jump_func *jfunc)
1551 {
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
1555 gimple_stmt_iterator gsi;
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
1559
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1562
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
1566
1567 if (POINTER_TYPE_P (arg_type))
1568 {
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1571 {
1572 tree type_size;
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1579 arg_size = tree_to_uhwi (type_size);
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 }
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1583 {
1584 bool reverse;
1585
1586 arg = TREE_OPERAND (arg, 0);
1587 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1588 &arg_size, &reverse);
1589 if (!arg_base)
1590 return;
1591 if (DECL_P (arg_base))
1592 {
1593 check_ref = false;
1594 ao_ref_init (&r, arg_base);
1595 }
1596 else
1597 return;
1598 }
1599 else
1600 return;
1601 }
1602 else
1603 {
1604 bool reverse;
1605
1606 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1607
1608 by_ref = false;
1609 check_ref = false;
1610 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1611 &arg_size, &reverse);
1612 if (!arg_base)
1613 return;
1614
1615 ao_ref_init (&r, arg);
1616 }
1617
1618 /* Second stage walks back the BB, looks at individual statements and as long
1619 as it is confident of how the statements affect contents of the
1620 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1621 describing it. */
1622 gsi = gsi_for_stmt (call);
1623 gsi_prev (&gsi);
1624 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1625 {
1626 struct ipa_known_agg_contents_list *n, **p;
1627 gimple *stmt = gsi_stmt (gsi);
1628 HOST_WIDE_INT lhs_offset, lhs_size;
1629 tree lhs, rhs, lhs_base;
1630 bool reverse;
1631
1632 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1633 continue;
1634 if (!gimple_assign_single_p (stmt))
1635 break;
1636
1637 lhs = gimple_assign_lhs (stmt);
1638 rhs = gimple_assign_rhs1 (stmt);
1639 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1640 || TREE_CODE (lhs) == BIT_FIELD_REF
1641 || contains_bitfld_component_ref_p (lhs))
1642 break;
1643
1644 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1645 &lhs_size, &reverse);
1646 if (!lhs_base)
1647 break;
1648
1649 if (check_ref)
1650 {
1651 if (TREE_CODE (lhs_base) != MEM_REF
1652 || TREE_OPERAND (lhs_base, 0) != arg_base
1653 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1654 break;
1655 }
1656 else if (lhs_base != arg_base)
1657 {
1658 if (DECL_P (lhs_base))
1659 continue;
1660 else
1661 break;
1662 }
1663
1664 bool already_there = false;
1665 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1666 &already_there);
1667 if (!p)
1668 break;
1669 if (already_there)
1670 continue;
1671
1672 rhs = get_ssa_def_if_simple_copy (rhs);
1673 n = XALLOCA (struct ipa_known_agg_contents_list);
1674 n->size = lhs_size;
1675 n->offset = lhs_offset;
1676 if (is_gimple_ip_invariant (rhs))
1677 {
1678 n->constant = rhs;
1679 const_count++;
1680 }
1681 else
1682 n->constant = NULL_TREE;
1683 n->next = *p;
1684 *p = n;
1685
1686 item_count++;
1687 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1688 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1689 break;
1690 }
1691
1692 /* Third stage just goes over the list and creates an appropriate vector of
1693 ipa_agg_jf_item structures out of it, of sourse only if there are
1694 any known constants to begin with. */
1695
1696 if (const_count)
1697 {
1698 jfunc->agg.by_ref = by_ref;
1699 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1700 }
1701 }
1702
1703 /* Return the Ith param type of callee associated with call graph
1704 edge E. */
1705
1706 tree
1707 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1708 {
1709 int n;
1710 tree type = (e->callee
1711 ? TREE_TYPE (e->callee->decl)
1712 : gimple_call_fntype (e->call_stmt));
1713 tree t = TYPE_ARG_TYPES (type);
1714
1715 for (n = 0; n < i; n++)
1716 {
1717 if (!t)
1718 break;
1719 t = TREE_CHAIN (t);
1720 }
1721 if (t)
1722 return TREE_VALUE (t);
1723 if (!e->callee)
1724 return NULL;
1725 t = DECL_ARGUMENTS (e->callee->decl);
1726 for (n = 0; n < i; n++)
1727 {
1728 if (!t)
1729 return NULL;
1730 t = TREE_CHAIN (t);
1731 }
1732 if (t)
1733 return TREE_TYPE (t);
1734 return NULL;
1735 }
1736
1737 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738 allocated structure or a previously existing one shared with other jump
1739 functions and/or transformation summaries. */
1740
1741 ipa_bits *
1742 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1743 {
1744 ipa_bits tmp;
1745 tmp.value = value;
1746 tmp.mask = mask;
1747
1748 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1749 if (*slot)
1750 return *slot;
1751
1752 ipa_bits *res = ggc_alloc<ipa_bits> ();
1753 res->value = value;
1754 res->mask = mask;
1755 *slot = res;
1756
1757 return res;
1758 }
1759
1760 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1761 table in order to avoid creating multiple same ipa_bits structures. */
1762
1763 static void
1764 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1765 const widest_int &mask)
1766 {
1767 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1768 }
1769
1770 /* Return a pointer to a value_range just like *TMP, but either find it in
1771 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1772
1773 static value_range *
1774 ipa_get_value_range (value_range *tmp)
1775 {
1776 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1777 if (*slot)
1778 return *slot;
1779
1780 value_range *vr = ggc_alloc<value_range> ();
1781 *vr = *tmp;
1782 *slot = vr;
1783
1784 return vr;
1785 }
1786
1787 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788 equiv set. Use hash table in order to avoid creating multiple same copies of
1789 value_ranges. */
1790
1791 static value_range *
1792 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1793 {
1794 value_range tmp;
1795 tmp.type = type;
1796 tmp.min = min;
1797 tmp.max = max;
1798 tmp.equiv = NULL;
1799 return ipa_get_value_range (&tmp);
1800 }
1801
1802 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1803 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1804 same value_range structures. */
1805
1806 static void
1807 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1808 tree min, tree max)
1809 {
1810 jf->m_vr = ipa_get_value_range (type, min, max);
1811 }
1812
1813 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1814 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1815
1816 static void
1817 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1818 {
1819 jf->m_vr = ipa_get_value_range (tmp);
1820 }
1821
1822 /* Compute jump function for all arguments of callsite CS and insert the
1823 information in the jump_functions array in the ipa_edge_args corresponding
1824 to this callsite. */
1825
1826 static void
1827 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1828 struct cgraph_edge *cs)
1829 {
1830 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1831 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1832 gcall *call = cs->call_stmt;
1833 int n, arg_num = gimple_call_num_args (call);
1834 bool useful_context = false;
1835
1836 if (arg_num == 0 || args->jump_functions)
1837 return;
1838 vec_safe_grow_cleared (args->jump_functions, arg_num);
1839 if (flag_devirtualize)
1840 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1841
1842 if (gimple_call_internal_p (call))
1843 return;
1844 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1845 return;
1846
1847 for (n = 0; n < arg_num; n++)
1848 {
1849 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1850 tree arg = gimple_call_arg (call, n);
1851 tree param_type = ipa_get_callee_param_type (cs, n);
1852 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1853 {
1854 tree instance;
1855 struct ipa_polymorphic_call_context context (cs->caller->decl,
1856 arg, cs->call_stmt,
1857 &instance);
1858 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1859 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1860 if (!context.useless_p ())
1861 useful_context = true;
1862 }
1863
1864 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1865 {
1866 bool addr_nonzero = false;
1867 bool strict_overflow = false;
1868
1869 if (TREE_CODE (arg) == SSA_NAME
1870 && param_type
1871 && get_ptr_nonnull (arg))
1872 addr_nonzero = true;
1873 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1874 addr_nonzero = true;
1875
1876 if (addr_nonzero)
1877 {
1878 tree z = build_int_cst (TREE_TYPE (arg), 0);
1879 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1880 }
1881 else
1882 gcc_assert (!jfunc->m_vr);
1883 }
1884 else
1885 {
1886 wide_int min, max;
1887 value_range_type type;
1888 if (TREE_CODE (arg) == SSA_NAME
1889 && param_type
1890 && (type = get_range_info (arg, &min, &max))
1891 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1892 {
1893 value_range tmpvr,resvr;
1894
1895 tmpvr.type = type;
1896 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1897 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1898 tmpvr.equiv = NULL;
1899 memset (&resvr, 0, sizeof (resvr));
1900 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1901 &tmpvr, TREE_TYPE (arg));
1902 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1903 ipa_set_jfunc_vr (jfunc, &resvr);
1904 else
1905 gcc_assert (!jfunc->m_vr);
1906 }
1907 else
1908 gcc_assert (!jfunc->m_vr);
1909 }
1910
1911 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1912 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1913 {
1914 if (TREE_CODE (arg) == SSA_NAME)
1915 ipa_set_jfunc_bits (jfunc, 0,
1916 widest_int::from (get_nonzero_bits (arg),
1917 TYPE_SIGN (TREE_TYPE (arg))));
1918 else
1919 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1920 }
1921 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1922 {
1923 unsigned HOST_WIDE_INT bitpos;
1924 unsigned align;
1925
1926 get_pointer_alignment_1 (arg, &align, &bitpos);
1927 widest_int mask = wi::bit_and_not
1928 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1929 align / BITS_PER_UNIT - 1);
1930 widest_int value = bitpos / BITS_PER_UNIT;
1931 ipa_set_jfunc_bits (jfunc, value, mask);
1932 }
1933 else
1934 gcc_assert (!jfunc->bits);
1935
1936 if (is_gimple_ip_invariant (arg)
1937 || (VAR_P (arg)
1938 && is_global_var (arg)
1939 && TREE_READONLY (arg)))
1940 ipa_set_jf_constant (jfunc, arg, cs);
1941 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1942 && TREE_CODE (arg) == PARM_DECL)
1943 {
1944 int index = ipa_get_param_decl_index (info, arg);
1945
1946 gcc_assert (index >=0);
1947 /* Aggregate passed by value, check for pass-through, otherwise we
1948 will attempt to fill in aggregate contents later in this
1949 for cycle. */
1950 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1951 {
1952 ipa_set_jf_simple_pass_through (jfunc, index, false);
1953 continue;
1954 }
1955 }
1956 else if (TREE_CODE (arg) == SSA_NAME)
1957 {
1958 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1959 {
1960 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1961 if (index >= 0)
1962 {
1963 bool agg_p;
1964 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1965 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1966 }
1967 }
1968 else
1969 {
1970 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1971 if (is_gimple_assign (stmt))
1972 compute_complex_assign_jump_func (fbi, info, jfunc,
1973 call, stmt, arg, param_type);
1974 else if (gimple_code (stmt) == GIMPLE_PHI)
1975 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1976 call,
1977 as_a <gphi *> (stmt));
1978 }
1979 }
1980
1981 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1982 passed (because type conversions are ignored in gimple). Usually we can
1983 safely get type from function declaration, but in case of K&R prototypes or
1984 variadic functions we can try our luck with type of the pointer passed.
1985 TODO: Since we look for actual initialization of the memory object, we may better
1986 work out the type based on the memory stores we find. */
1987 if (!param_type)
1988 param_type = TREE_TYPE (arg);
1989
1990 if ((jfunc->type != IPA_JF_PASS_THROUGH
1991 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1992 && (jfunc->type != IPA_JF_ANCESTOR
1993 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1994 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1995 || POINTER_TYPE_P (param_type)))
1996 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1997 }
1998 if (!useful_context)
1999 vec_free (args->polymorphic_call_contexts);
2000 }
2001
2002 /* Compute jump functions for all edges - both direct and indirect - outgoing
2003 from BB. */
2004
2005 static void
2006 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2007 {
2008 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2009 int i;
2010 struct cgraph_edge *cs;
2011
2012 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2013 {
2014 struct cgraph_node *callee = cs->callee;
2015
2016 if (callee)
2017 {
2018 callee->ultimate_alias_target ();
2019 /* We do not need to bother analyzing calls to unknown functions
2020 unless they may become known during lto/whopr. */
2021 if (!callee->definition && !flag_lto)
2022 continue;
2023 }
2024 ipa_compute_jump_functions_for_edge (fbi, cs);
2025 }
2026 }
2027
2028 /* If STMT looks like a statement loading a value from a member pointer formal
2029 parameter, return that parameter and store the offset of the field to
2030 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2031 might be clobbered). If USE_DELTA, then we look for a use of the delta
2032 field rather than the pfn. */
2033
2034 static tree
2035 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2036 HOST_WIDE_INT *offset_p)
2037 {
2038 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2039
2040 if (!gimple_assign_single_p (stmt))
2041 return NULL_TREE;
2042
2043 rhs = gimple_assign_rhs1 (stmt);
2044 if (TREE_CODE (rhs) == COMPONENT_REF)
2045 {
2046 ref_field = TREE_OPERAND (rhs, 1);
2047 rhs = TREE_OPERAND (rhs, 0);
2048 }
2049 else
2050 ref_field = NULL_TREE;
2051 if (TREE_CODE (rhs) != MEM_REF)
2052 return NULL_TREE;
2053 rec = TREE_OPERAND (rhs, 0);
2054 if (TREE_CODE (rec) != ADDR_EXPR)
2055 return NULL_TREE;
2056 rec = TREE_OPERAND (rec, 0);
2057 if (TREE_CODE (rec) != PARM_DECL
2058 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2059 return NULL_TREE;
2060 ref_offset = TREE_OPERAND (rhs, 1);
2061
2062 if (use_delta)
2063 fld = delta_field;
2064 else
2065 fld = ptr_field;
2066 if (offset_p)
2067 *offset_p = int_bit_position (fld);
2068
2069 if (ref_field)
2070 {
2071 if (integer_nonzerop (ref_offset))
2072 return NULL_TREE;
2073 return ref_field == fld ? rec : NULL_TREE;
2074 }
2075 else
2076 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2077 : NULL_TREE;
2078 }
2079
2080 /* Returns true iff T is an SSA_NAME defined by a statement. */
2081
2082 static bool
2083 ipa_is_ssa_with_stmt_def (tree t)
2084 {
2085 if (TREE_CODE (t) == SSA_NAME
2086 && !SSA_NAME_IS_DEFAULT_DEF (t))
2087 return true;
2088 else
2089 return false;
2090 }
2091
2092 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2093 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2094 indirect call graph edge. */
2095
2096 static struct cgraph_edge *
2097 ipa_note_param_call (struct cgraph_node *node, int param_index,
2098 gcall *stmt)
2099 {
2100 struct cgraph_edge *cs;
2101
2102 cs = node->get_edge (stmt);
2103 cs->indirect_info->param_index = param_index;
2104 cs->indirect_info->agg_contents = 0;
2105 cs->indirect_info->member_ptr = 0;
2106 cs->indirect_info->guaranteed_unmodified = 0;
2107 return cs;
2108 }
2109
2110 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2111 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2112 intermediate information about each formal parameter. Currently it checks
2113 whether the call calls a pointer that is a formal parameter and if so, the
2114 parameter is marked with the called flag and an indirect call graph edge
2115 describing the call is created. This is very simple for ordinary pointers
2116 represented in SSA but not-so-nice when it comes to member pointers. The
2117 ugly part of this function does nothing more than trying to match the
2118 pattern of such a call. An example of such a pattern is the gimple dump
2119 below, the call is on the last line:
2120
2121 <bb 2>:
2122 f$__delta_5 = f.__delta;
2123 f$__pfn_24 = f.__pfn;
2124
2125 or
2126 <bb 2>:
2127 f$__delta_5 = MEM[(struct *)&f];
2128 f$__pfn_24 = MEM[(struct *)&f + 4B];
2129
2130 and a few lines below:
2131
2132 <bb 5>
2133 D.2496_3 = (int) f$__pfn_24;
2134 D.2497_4 = D.2496_3 & 1;
2135 if (D.2497_4 != 0)
2136 goto <bb 3>;
2137 else
2138 goto <bb 4>;
2139
2140 <bb 6>:
2141 D.2500_7 = (unsigned int) f$__delta_5;
2142 D.2501_8 = &S + D.2500_7;
2143 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2144 D.2503_10 = *D.2502_9;
2145 D.2504_12 = f$__pfn_24 + -1;
2146 D.2505_13 = (unsigned int) D.2504_12;
2147 D.2506_14 = D.2503_10 + D.2505_13;
2148 D.2507_15 = *D.2506_14;
2149 iftmp.11_16 = (String:: *) D.2507_15;
2150
2151 <bb 7>:
2152 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2153 D.2500_19 = (unsigned int) f$__delta_5;
2154 D.2508_20 = &S + D.2500_19;
2155 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2156
2157 Such patterns are results of simple calls to a member pointer:
2158
2159 int doprinting (int (MyString::* f)(int) const)
2160 {
2161 MyString S ("somestring");
2162
2163 return (S.*f)(4);
2164 }
2165
2166 Moreover, the function also looks for called pointers loaded from aggregates
2167 passed by value or reference. */
2168
2169 static void
2170 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2171 tree target)
2172 {
2173 struct ipa_node_params *info = fbi->info;
2174 HOST_WIDE_INT offset;
2175 bool by_ref;
2176
2177 if (SSA_NAME_IS_DEFAULT_DEF (target))
2178 {
2179 tree var = SSA_NAME_VAR (target);
2180 int index = ipa_get_param_decl_index (info, var);
2181 if (index >= 0)
2182 ipa_note_param_call (fbi->node, index, call);
2183 return;
2184 }
2185
2186 int index;
2187 gimple *def = SSA_NAME_DEF_STMT (target);
2188 bool guaranteed_unmodified;
2189 if (gimple_assign_single_p (def)
2190 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2191 gimple_assign_rhs1 (def), &index, &offset,
2192 NULL, &by_ref, &guaranteed_unmodified))
2193 {
2194 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2195 cs->indirect_info->offset = offset;
2196 cs->indirect_info->agg_contents = 1;
2197 cs->indirect_info->by_ref = by_ref;
2198 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2199 return;
2200 }
2201
2202 /* Now we need to try to match the complex pattern of calling a member
2203 pointer. */
2204 if (gimple_code (def) != GIMPLE_PHI
2205 || gimple_phi_num_args (def) != 2
2206 || !POINTER_TYPE_P (TREE_TYPE (target))
2207 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2208 return;
2209
2210 /* First, we need to check whether one of these is a load from a member
2211 pointer that is a parameter to this function. */
2212 tree n1 = PHI_ARG_DEF (def, 0);
2213 tree n2 = PHI_ARG_DEF (def, 1);
2214 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2215 return;
2216 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2217 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2218
2219 tree rec;
2220 basic_block bb, virt_bb;
2221 basic_block join = gimple_bb (def);
2222 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2223 {
2224 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2225 return;
2226
2227 bb = EDGE_PRED (join, 0)->src;
2228 virt_bb = gimple_bb (d2);
2229 }
2230 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2231 {
2232 bb = EDGE_PRED (join, 1)->src;
2233 virt_bb = gimple_bb (d1);
2234 }
2235 else
2236 return;
2237
2238 /* Second, we need to check that the basic blocks are laid out in the way
2239 corresponding to the pattern. */
2240
2241 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2242 || single_pred (virt_bb) != bb
2243 || single_succ (virt_bb) != join)
2244 return;
2245
2246 /* Third, let's see that the branching is done depending on the least
2247 significant bit of the pfn. */
2248
2249 gimple *branch = last_stmt (bb);
2250 if (!branch || gimple_code (branch) != GIMPLE_COND)
2251 return;
2252
2253 if ((gimple_cond_code (branch) != NE_EXPR
2254 && gimple_cond_code (branch) != EQ_EXPR)
2255 || !integer_zerop (gimple_cond_rhs (branch)))
2256 return;
2257
2258 tree cond = gimple_cond_lhs (branch);
2259 if (!ipa_is_ssa_with_stmt_def (cond))
2260 return;
2261
2262 def = SSA_NAME_DEF_STMT (cond);
2263 if (!is_gimple_assign (def)
2264 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2265 || !integer_onep (gimple_assign_rhs2 (def)))
2266 return;
2267
2268 cond = gimple_assign_rhs1 (def);
2269 if (!ipa_is_ssa_with_stmt_def (cond))
2270 return;
2271
2272 def = SSA_NAME_DEF_STMT (cond);
2273
2274 if (is_gimple_assign (def)
2275 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2276 {
2277 cond = gimple_assign_rhs1 (def);
2278 if (!ipa_is_ssa_with_stmt_def (cond))
2279 return;
2280 def = SSA_NAME_DEF_STMT (cond);
2281 }
2282
2283 tree rec2;
2284 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2285 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2286 == ptrmemfunc_vbit_in_delta),
2287 NULL);
2288 if (rec != rec2)
2289 return;
2290
2291 index = ipa_get_param_decl_index (info, rec);
2292 if (index >= 0
2293 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2294 {
2295 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2296 cs->indirect_info->offset = offset;
2297 cs->indirect_info->agg_contents = 1;
2298 cs->indirect_info->member_ptr = 1;
2299 cs->indirect_info->guaranteed_unmodified = 1;
2300 }
2301
2302 return;
2303 }
2304
2305 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2306 object referenced in the expression is a formal parameter of the caller
2307 FBI->node (described by FBI->info), create a call note for the
2308 statement. */
2309
2310 static void
2311 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2312 gcall *call, tree target)
2313 {
2314 tree obj = OBJ_TYPE_REF_OBJECT (target);
2315 int index;
2316 HOST_WIDE_INT anc_offset;
2317
2318 if (!flag_devirtualize)
2319 return;
2320
2321 if (TREE_CODE (obj) != SSA_NAME)
2322 return;
2323
2324 struct ipa_node_params *info = fbi->info;
2325 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2326 {
2327 struct ipa_jump_func jfunc;
2328 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2329 return;
2330
2331 anc_offset = 0;
2332 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2333 gcc_assert (index >= 0);
2334 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2335 call, &jfunc))
2336 return;
2337 }
2338 else
2339 {
2340 struct ipa_jump_func jfunc;
2341 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2342 tree expr;
2343
2344 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2345 if (!expr)
2346 return;
2347 index = ipa_get_param_decl_index (info,
2348 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2349 gcc_assert (index >= 0);
2350 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2351 call, &jfunc, anc_offset))
2352 return;
2353 }
2354
2355 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2356 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2357 ii->offset = anc_offset;
2358 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2359 ii->otr_type = obj_type_ref_class (target);
2360 ii->polymorphic = 1;
2361 }
2362
2363 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2364 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2365 containing intermediate information about each formal parameter. */
2366
2367 static void
2368 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2369 {
2370 tree target = gimple_call_fn (call);
2371
2372 if (!target
2373 || (TREE_CODE (target) != SSA_NAME
2374 && !virtual_method_call_p (target)))
2375 return;
2376
2377 struct cgraph_edge *cs = fbi->node->get_edge (call);
2378 /* If we previously turned the call into a direct call, there is
2379 no need to analyze. */
2380 if (cs && !cs->indirect_unknown_callee)
2381 return;
2382
2383 if (cs->indirect_info->polymorphic && flag_devirtualize)
2384 {
2385 tree instance;
2386 tree target = gimple_call_fn (call);
2387 ipa_polymorphic_call_context context (current_function_decl,
2388 target, call, &instance);
2389
2390 gcc_checking_assert (cs->indirect_info->otr_type
2391 == obj_type_ref_class (target));
2392 gcc_checking_assert (cs->indirect_info->otr_token
2393 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2394
2395 cs->indirect_info->vptr_changed
2396 = !context.get_dynamic_type (instance,
2397 OBJ_TYPE_REF_OBJECT (target),
2398 obj_type_ref_class (target), call);
2399 cs->indirect_info->context = context;
2400 }
2401
2402 if (TREE_CODE (target) == SSA_NAME)
2403 ipa_analyze_indirect_call_uses (fbi, call, target);
2404 else if (virtual_method_call_p (target))
2405 ipa_analyze_virtual_call_uses (fbi, call, target);
2406 }
2407
2408
2409 /* Analyze the call statement STMT with respect to formal parameters (described
2410 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2411 formal parameters are called. */
2412
2413 static void
2414 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2415 {
2416 if (is_gimple_call (stmt))
2417 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2418 }
2419
2420 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2421 If OP is a parameter declaration, mark it as used in the info structure
2422 passed in DATA. */
2423
2424 static bool
2425 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2426 {
2427 struct ipa_node_params *info = (struct ipa_node_params *) data;
2428
2429 op = get_base_address (op);
2430 if (op
2431 && TREE_CODE (op) == PARM_DECL)
2432 {
2433 int index = ipa_get_param_decl_index (info, op);
2434 gcc_assert (index >= 0);
2435 ipa_set_param_used (info, index, true);
2436 }
2437
2438 return false;
2439 }
2440
2441 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2442 the findings in various structures of the associated ipa_node_params
2443 structure, such as parameter flags, notes etc. FBI holds various data about
2444 the function being analyzed. */
2445
2446 static void
2447 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2448 {
2449 gimple_stmt_iterator gsi;
2450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2451 {
2452 gimple *stmt = gsi_stmt (gsi);
2453
2454 if (is_gimple_debug (stmt))
2455 continue;
2456
2457 ipa_analyze_stmt_uses (fbi, stmt);
2458 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2459 visit_ref_for_mod_analysis,
2460 visit_ref_for_mod_analysis,
2461 visit_ref_for_mod_analysis);
2462 }
2463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2464 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2465 visit_ref_for_mod_analysis,
2466 visit_ref_for_mod_analysis,
2467 visit_ref_for_mod_analysis);
2468 }
2469
2470 /* Calculate controlled uses of parameters of NODE. */
2471
2472 static void
2473 ipa_analyze_controlled_uses (struct cgraph_node *node)
2474 {
2475 struct ipa_node_params *info = IPA_NODE_REF (node);
2476
2477 for (int i = 0; i < ipa_get_param_count (info); i++)
2478 {
2479 tree parm = ipa_get_param (info, i);
2480 int controlled_uses = 0;
2481
2482 /* For SSA regs see if parameter is used. For non-SSA we compute
2483 the flag during modification analysis. */
2484 if (is_gimple_reg (parm))
2485 {
2486 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2487 parm);
2488 if (ddef && !has_zero_uses (ddef))
2489 {
2490 imm_use_iterator imm_iter;
2491 use_operand_p use_p;
2492
2493 ipa_set_param_used (info, i, true);
2494 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2495 if (!is_gimple_call (USE_STMT (use_p)))
2496 {
2497 if (!is_gimple_debug (USE_STMT (use_p)))
2498 {
2499 controlled_uses = IPA_UNDESCRIBED_USE;
2500 break;
2501 }
2502 }
2503 else
2504 controlled_uses++;
2505 }
2506 else
2507 controlled_uses = 0;
2508 }
2509 else
2510 controlled_uses = IPA_UNDESCRIBED_USE;
2511 ipa_set_controlled_uses (info, i, controlled_uses);
2512 }
2513 }
2514
2515 /* Free stuff in BI. */
2516
2517 static void
2518 free_ipa_bb_info (struct ipa_bb_info *bi)
2519 {
2520 bi->cg_edges.release ();
2521 bi->param_aa_statuses.release ();
2522 }
2523
2524 /* Dominator walker driving the analysis. */
2525
2526 class analysis_dom_walker : public dom_walker
2527 {
2528 public:
2529 analysis_dom_walker (struct ipa_func_body_info *fbi)
2530 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2531
2532 virtual edge before_dom_children (basic_block);
2533
2534 private:
2535 struct ipa_func_body_info *m_fbi;
2536 };
2537
2538 edge
2539 analysis_dom_walker::before_dom_children (basic_block bb)
2540 {
2541 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2542 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2543 return NULL;
2544 }
2545
2546 /* Release body info FBI. */
2547
2548 void
2549 ipa_release_body_info (struct ipa_func_body_info *fbi)
2550 {
2551 int i;
2552 struct ipa_bb_info *bi;
2553
2554 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2555 free_ipa_bb_info (bi);
2556 fbi->bb_infos.release ();
2557 }
2558
2559 /* Initialize the array describing properties of formal parameters
2560 of NODE, analyze their uses and compute jump functions associated
2561 with actual arguments of calls from within NODE. */
2562
2563 void
2564 ipa_analyze_node (struct cgraph_node *node)
2565 {
2566 struct ipa_func_body_info fbi;
2567 struct ipa_node_params *info;
2568
2569 ipa_check_create_node_params ();
2570 ipa_check_create_edge_args ();
2571 info = IPA_NODE_REF (node);
2572
2573 if (info->analysis_done)
2574 return;
2575 info->analysis_done = 1;
2576
2577 if (ipa_func_spec_opts_forbid_analysis_p (node))
2578 {
2579 for (int i = 0; i < ipa_get_param_count (info); i++)
2580 {
2581 ipa_set_param_used (info, i, true);
2582 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2583 }
2584 return;
2585 }
2586
2587 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2588 push_cfun (func);
2589 calculate_dominance_info (CDI_DOMINATORS);
2590 ipa_initialize_node_params (node);
2591 ipa_analyze_controlled_uses (node);
2592
2593 fbi.node = node;
2594 fbi.info = IPA_NODE_REF (node);
2595 fbi.bb_infos = vNULL;
2596 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2597 fbi.param_count = ipa_get_param_count (info);
2598 fbi.aa_walked = 0;
2599
2600 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2601 {
2602 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2603 bi->cg_edges.safe_push (cs);
2604 }
2605
2606 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2607 {
2608 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2609 bi->cg_edges.safe_push (cs);
2610 }
2611
2612 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2613
2614 ipa_release_body_info (&fbi);
2615 free_dominance_info (CDI_DOMINATORS);
2616 pop_cfun ();
2617 }
2618
2619 /* Update the jump functions associated with call graph edge E when the call
2620 graph edge CS is being inlined, assuming that E->caller is already (possibly
2621 indirectly) inlined into CS->callee and that E has not been inlined. */
2622
2623 static void
2624 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2625 struct cgraph_edge *e)
2626 {
2627 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2629 int count = ipa_get_cs_argument_count (args);
2630 int i;
2631
2632 for (i = 0; i < count; i++)
2633 {
2634 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2635 struct ipa_polymorphic_call_context *dst_ctx
2636 = ipa_get_ith_polymorhic_call_context (args, i);
2637
2638 if (dst->type == IPA_JF_ANCESTOR)
2639 {
2640 struct ipa_jump_func *src;
2641 int dst_fid = dst->value.ancestor.formal_id;
2642 struct ipa_polymorphic_call_context *src_ctx
2643 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2644
2645 /* Variable number of arguments can cause havoc if we try to access
2646 one that does not exist in the inlined edge. So make sure we
2647 don't. */
2648 if (dst_fid >= ipa_get_cs_argument_count (top))
2649 {
2650 ipa_set_jf_unknown (dst);
2651 continue;
2652 }
2653
2654 src = ipa_get_ith_jump_func (top, dst_fid);
2655
2656 if (src_ctx && !src_ctx->useless_p ())
2657 {
2658 struct ipa_polymorphic_call_context ctx = *src_ctx;
2659
2660 /* TODO: Make type preserved safe WRT contexts. */
2661 if (!ipa_get_jf_ancestor_type_preserved (dst))
2662 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2663 ctx.offset_by (dst->value.ancestor.offset);
2664 if (!ctx.useless_p ())
2665 {
2666 if (!dst_ctx)
2667 {
2668 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2669 count);
2670 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2671 }
2672
2673 dst_ctx->combine_with (ctx);
2674 }
2675 }
2676
2677 if (src->agg.items
2678 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2679 {
2680 struct ipa_agg_jf_item *item;
2681 int j;
2682
2683 /* Currently we do not produce clobber aggregate jump functions,
2684 replace with merging when we do. */
2685 gcc_assert (!dst->agg.items);
2686
2687 dst->agg.items = vec_safe_copy (src->agg.items);
2688 dst->agg.by_ref = src->agg.by_ref;
2689 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2690 item->offset -= dst->value.ancestor.offset;
2691 }
2692
2693 if (src->type == IPA_JF_PASS_THROUGH
2694 && src->value.pass_through.operation == NOP_EXPR)
2695 {
2696 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 dst->value.ancestor.agg_preserved &=
2698 src->value.pass_through.agg_preserved;
2699 }
2700 else if (src->type == IPA_JF_PASS_THROUGH
2701 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2702 {
2703 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 dst->value.ancestor.agg_preserved = false;
2705 }
2706 else if (src->type == IPA_JF_ANCESTOR)
2707 {
2708 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2709 dst->value.ancestor.offset += src->value.ancestor.offset;
2710 dst->value.ancestor.agg_preserved &=
2711 src->value.ancestor.agg_preserved;
2712 }
2713 else
2714 ipa_set_jf_unknown (dst);
2715 }
2716 else if (dst->type == IPA_JF_PASS_THROUGH)
2717 {
2718 struct ipa_jump_func *src;
2719 /* We must check range due to calls with variable number of arguments
2720 and we cannot combine jump functions with operations. */
2721 if (dst->value.pass_through.operation == NOP_EXPR
2722 && (dst->value.pass_through.formal_id
2723 < ipa_get_cs_argument_count (top)))
2724 {
2725 int dst_fid = dst->value.pass_through.formal_id;
2726 src = ipa_get_ith_jump_func (top, dst_fid);
2727 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2728 struct ipa_polymorphic_call_context *src_ctx
2729 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2730
2731 if (src_ctx && !src_ctx->useless_p ())
2732 {
2733 struct ipa_polymorphic_call_context ctx = *src_ctx;
2734
2735 /* TODO: Make type preserved safe WRT contexts. */
2736 if (!ipa_get_jf_pass_through_type_preserved (dst))
2737 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2738 if (!ctx.useless_p ())
2739 {
2740 if (!dst_ctx)
2741 {
2742 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2743 count);
2744 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2745 }
2746 dst_ctx->combine_with (ctx);
2747 }
2748 }
2749 switch (src->type)
2750 {
2751 case IPA_JF_UNKNOWN:
2752 ipa_set_jf_unknown (dst);
2753 break;
2754 case IPA_JF_CONST:
2755 ipa_set_jf_cst_copy (dst, src);
2756 break;
2757
2758 case IPA_JF_PASS_THROUGH:
2759 {
2760 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2761 enum tree_code operation;
2762 operation = ipa_get_jf_pass_through_operation (src);
2763
2764 if (operation == NOP_EXPR)
2765 {
2766 bool agg_p;
2767 agg_p = dst_agg_p
2768 && ipa_get_jf_pass_through_agg_preserved (src);
2769 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2770 }
2771 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2772 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2773 else
2774 {
2775 tree operand = ipa_get_jf_pass_through_operand (src);
2776 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2777 operation);
2778 }
2779 break;
2780 }
2781 case IPA_JF_ANCESTOR:
2782 {
2783 bool agg_p;
2784 agg_p = dst_agg_p
2785 && ipa_get_jf_ancestor_agg_preserved (src);
2786 ipa_set_ancestor_jf (dst,
2787 ipa_get_jf_ancestor_offset (src),
2788 ipa_get_jf_ancestor_formal_id (src),
2789 agg_p);
2790 break;
2791 }
2792 default:
2793 gcc_unreachable ();
2794 }
2795
2796 if (src->agg.items
2797 && (dst_agg_p || !src->agg.by_ref))
2798 {
2799 /* Currently we do not produce clobber aggregate jump
2800 functions, replace with merging when we do. */
2801 gcc_assert (!dst->agg.items);
2802
2803 dst->agg.by_ref = src->agg.by_ref;
2804 dst->agg.items = vec_safe_copy (src->agg.items);
2805 }
2806 }
2807 else
2808 ipa_set_jf_unknown (dst);
2809 }
2810 }
2811 }
2812
2813 /* If TARGET is an addr_expr of a function declaration, make it the
2814 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2815 Otherwise, return NULL. */
2816
2817 struct cgraph_edge *
2818 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2819 bool speculative)
2820 {
2821 struct cgraph_node *callee;
2822 bool unreachable = false;
2823
2824 if (TREE_CODE (target) == ADDR_EXPR)
2825 target = TREE_OPERAND (target, 0);
2826 if (TREE_CODE (target) != FUNCTION_DECL)
2827 {
2828 target = canonicalize_constructor_val (target, NULL);
2829 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2830 {
2831 /* Member pointer call that goes through a VMT lookup. */
2832 if (ie->indirect_info->member_ptr
2833 /* Or if target is not an invariant expression and we do not
2834 know if it will evaulate to function at runtime.
2835 This can happen when folding through &VAR, where &VAR
2836 is IP invariant, but VAR itself is not.
2837
2838 TODO: Revisit this when GCC 5 is branched. It seems that
2839 member_ptr check is not needed and that we may try to fold
2840 the expression and see if VAR is readonly. */
2841 || !is_gimple_ip_invariant (target))
2842 {
2843 if (dump_enabled_p ())
2844 {
2845 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2846 "discovered direct call non-invariant %s\n",
2847 ie->caller->dump_name ());
2848 }
2849 return NULL;
2850 }
2851
2852
2853 if (dump_enabled_p ())
2854 {
2855 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2856 "discovered direct call to non-function in %s, "
2857 "making it __builtin_unreachable\n",
2858 ie->caller->dump_name ());
2859 }
2860
2861 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2862 callee = cgraph_node::get_create (target);
2863 unreachable = true;
2864 }
2865 else
2866 callee = cgraph_node::get (target);
2867 }
2868 else
2869 callee = cgraph_node::get (target);
2870
2871 /* Because may-edges are not explicitely represented and vtable may be external,
2872 we may create the first reference to the object in the unit. */
2873 if (!callee || callee->global.inlined_to)
2874 {
2875
2876 /* We are better to ensure we can refer to it.
2877 In the case of static functions we are out of luck, since we already
2878 removed its body. In the case of public functions we may or may
2879 not introduce the reference. */
2880 if (!canonicalize_constructor_val (target, NULL)
2881 || !TREE_PUBLIC (target))
2882 {
2883 if (dump_file)
2884 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2885 "(%s -> %s) but can not refer to it. Giving up.\n",
2886 ie->caller->dump_name (),
2887 ie->callee->dump_name ());
2888 return NULL;
2889 }
2890 callee = cgraph_node::get_create (target);
2891 }
2892
2893 /* If the edge is already speculated. */
2894 if (speculative && ie->speculative)
2895 {
2896 struct cgraph_edge *e2;
2897 struct ipa_ref *ref;
2898 ie->speculative_call_info (e2, ie, ref);
2899 if (e2->callee->ultimate_alias_target ()
2900 != callee->ultimate_alias_target ())
2901 {
2902 if (dump_file)
2903 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2904 "target (%s -> %s) but the call is already "
2905 "speculated to %s. Giving up.\n",
2906 ie->caller->dump_name (), callee->dump_name (),
2907 e2->callee->dump_name ());
2908 }
2909 else
2910 {
2911 if (dump_file)
2912 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2913 "(%s -> %s) this agree with previous speculation.\n",
2914 ie->caller->dump_name (), callee->dump_name ());
2915 }
2916 return NULL;
2917 }
2918
2919 if (!dbg_cnt (devirt))
2920 return NULL;
2921
2922 ipa_check_create_node_params ();
2923
2924 /* We can not make edges to inline clones. It is bug that someone removed
2925 the cgraph node too early. */
2926 gcc_assert (!callee->global.inlined_to);
2927
2928 if (dump_file && !unreachable)
2929 {
2930 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2931 "(%s -> %s), for stmt ",
2932 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2933 speculative ? "speculative" : "known",
2934 ie->caller->dump_name (),
2935 callee->dump_name ());
2936 if (ie->call_stmt)
2937 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2938 else
2939 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2940 }
2941 if (dump_enabled_p ())
2942 {
2943 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2944 "converting indirect call in %s to direct call to %s\n",
2945 ie->caller->name (), callee->name ());
2946 }
2947 if (!speculative)
2948 {
2949 struct cgraph_edge *orig = ie;
2950 ie = ie->make_direct (callee);
2951 /* If we resolved speculative edge the cost is already up to date
2952 for direct call (adjusted by inline_edge_duplication_hook). */
2953 if (ie == orig)
2954 {
2955 ipa_call_summary *es = ipa_call_summaries->get (ie);
2956 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2957 - eni_size_weights.call_cost);
2958 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2959 - eni_time_weights.call_cost);
2960 }
2961 }
2962 else
2963 {
2964 if (!callee->can_be_discarded_p ())
2965 {
2966 cgraph_node *alias;
2967 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2968 if (alias)
2969 callee = alias;
2970 }
2971 /* make_speculative will update ie's cost to direct call cost. */
2972 ie = ie->make_speculative
2973 (callee, ie->count.apply_scale (8, 10));
2974 }
2975
2976 return ie;
2977 }
2978
2979 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2980 CONSTRUCTOR and return it. Return NULL if the search fails for some
2981 reason. */
2982
2983 static tree
2984 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2985 {
2986 tree type = TREE_TYPE (constructor);
2987 if (TREE_CODE (type) != ARRAY_TYPE
2988 && TREE_CODE (type) != RECORD_TYPE)
2989 return NULL;
2990
2991 unsigned ix;
2992 tree index, val;
2993 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2994 {
2995 HOST_WIDE_INT elt_offset;
2996 if (TREE_CODE (type) == ARRAY_TYPE)
2997 {
2998 offset_int off;
2999 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3000 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3001
3002 if (index)
3003 {
3004 if (TREE_CODE (index) == RANGE_EXPR)
3005 off = wi::to_offset (TREE_OPERAND (index, 0));
3006 else
3007 off = wi::to_offset (index);
3008 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3009 {
3010 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3011 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3012 off = wi::sext (off - wi::to_offset (low_bound),
3013 TYPE_PRECISION (TREE_TYPE (index)));
3014 }
3015 off *= wi::to_offset (unit_size);
3016 /* ??? Handle more than just the first index of a
3017 RANGE_EXPR. */
3018 }
3019 else
3020 off = wi::to_offset (unit_size) * ix;
3021
3022 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3023 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3024 continue;
3025 elt_offset = off.to_shwi ();
3026 }
3027 else if (TREE_CODE (type) == RECORD_TYPE)
3028 {
3029 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3030 if (DECL_BIT_FIELD (index))
3031 continue;
3032 elt_offset = int_bit_position (index);
3033 }
3034 else
3035 gcc_unreachable ();
3036
3037 if (elt_offset > req_offset)
3038 return NULL;
3039
3040 if (TREE_CODE (val) == CONSTRUCTOR)
3041 return find_constructor_constant_at_offset (val,
3042 req_offset - elt_offset);
3043
3044 if (elt_offset == req_offset
3045 && is_gimple_reg_type (TREE_TYPE (val))
3046 && is_gimple_ip_invariant (val))
3047 return val;
3048 }
3049 return NULL;
3050 }
3051
3052 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3053 invariant from a static constructor and if so, return it. Otherwise return
3054 NULL. */
3055
3056 static tree
3057 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3058 {
3059 if (by_ref)
3060 {
3061 if (TREE_CODE (scalar) != ADDR_EXPR)
3062 return NULL;
3063 scalar = TREE_OPERAND (scalar, 0);
3064 }
3065
3066 if (!VAR_P (scalar)
3067 || !is_global_var (scalar)
3068 || !TREE_READONLY (scalar)
3069 || !DECL_INITIAL (scalar)
3070 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3071 return NULL;
3072
3073 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3074 }
3075
3076 /* Retrieve value from aggregate jump function AGG or static initializer of
3077 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3078 none. BY_REF specifies whether the value has to be passed by reference or
3079 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3080 to is set to true if the value comes from an initializer of a constant. */
3081
3082 tree
3083 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3084 HOST_WIDE_INT offset, bool by_ref,
3085 bool *from_global_constant)
3086 {
3087 struct ipa_agg_jf_item *item;
3088 int i;
3089
3090 if (scalar)
3091 {
3092 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3093 if (res)
3094 {
3095 if (from_global_constant)
3096 *from_global_constant = true;
3097 return res;
3098 }
3099 }
3100
3101 if (!agg
3102 || by_ref != agg->by_ref)
3103 return NULL;
3104
3105 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3106 if (item->offset == offset)
3107 {
3108 /* Currently we do not have clobber values, return NULL for them once
3109 we do. */
3110 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3111 if (from_global_constant)
3112 *from_global_constant = false;
3113 return item->value;
3114 }
3115 return NULL;
3116 }
3117
3118 /* Remove a reference to SYMBOL from the list of references of a node given by
3119 reference description RDESC. Return true if the reference has been
3120 successfully found and removed. */
3121
3122 static bool
3123 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3124 {
3125 struct ipa_ref *to_del;
3126 struct cgraph_edge *origin;
3127
3128 origin = rdesc->cs;
3129 if (!origin)
3130 return false;
3131 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3132 origin->lto_stmt_uid);
3133 if (!to_del)
3134 return false;
3135
3136 to_del->remove_reference ();
3137 if (dump_file)
3138 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3139 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3140 return true;
3141 }
3142
3143 /* If JFUNC has a reference description with refcount different from
3144 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3145 NULL. JFUNC must be a constant jump function. */
3146
3147 static struct ipa_cst_ref_desc *
3148 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3149 {
3150 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3151 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3152 return rdesc;
3153 else
3154 return NULL;
3155 }
3156
3157 /* If the value of constant jump function JFUNC is an address of a function
3158 declaration, return the associated call graph node. Otherwise return
3159 NULL. */
3160
3161 static cgraph_node *
3162 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3163 {
3164 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3165 tree cst = ipa_get_jf_constant (jfunc);
3166 if (TREE_CODE (cst) != ADDR_EXPR
3167 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3168 return NULL;
3169
3170 return cgraph_node::get (TREE_OPERAND (cst, 0));
3171 }
3172
3173
3174 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3175 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3176 the edge specified in the rdesc. Return false if either the symbol or the
3177 reference could not be found, otherwise return true. */
3178
3179 static bool
3180 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3181 {
3182 struct ipa_cst_ref_desc *rdesc;
3183 if (jfunc->type == IPA_JF_CONST
3184 && (rdesc = jfunc_rdesc_usable (jfunc))
3185 && --rdesc->refcount == 0)
3186 {
3187 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3188 if (!symbol)
3189 return false;
3190
3191 return remove_described_reference (symbol, rdesc);
3192 }
3193 return true;
3194 }
3195
3196 /* Try to find a destination for indirect edge IE that corresponds to a simple
3197 call or a call of a member function pointer and where the destination is a
3198 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3199 the type of the parameter to which the result of JFUNC is passed. If it can
3200 be determined, return the newly direct edge, otherwise return NULL.
3201 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3202
3203 static struct cgraph_edge *
3204 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3205 struct ipa_jump_func *jfunc, tree target_type,
3206 struct ipa_node_params *new_root_info)
3207 {
3208 struct cgraph_edge *cs;
3209 tree target;
3210 bool agg_contents = ie->indirect_info->agg_contents;
3211 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3212 if (agg_contents)
3213 {
3214 bool from_global_constant;
3215 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3216 ie->indirect_info->offset,
3217 ie->indirect_info->by_ref,
3218 &from_global_constant);
3219 if (target
3220 && !from_global_constant
3221 && !ie->indirect_info->guaranteed_unmodified)
3222 return NULL;
3223 }
3224 else
3225 target = scalar;
3226 if (!target)
3227 return NULL;
3228 cs = ipa_make_edge_direct_to_target (ie, target);
3229
3230 if (cs && !agg_contents)
3231 {
3232 bool ok;
3233 gcc_checking_assert (cs->callee
3234 && (cs != ie
3235 || jfunc->type != IPA_JF_CONST
3236 || !cgraph_node_for_jfunc (jfunc)
3237 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3238 ok = try_decrement_rdesc_refcount (jfunc);
3239 gcc_checking_assert (ok);
3240 }
3241
3242 return cs;
3243 }
3244
3245 /* Return the target to be used in cases of impossible devirtualization. IE
3246 and target (the latter can be NULL) are dumped when dumping is enabled. */
3247
3248 tree
3249 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3250 {
3251 if (dump_file)
3252 {
3253 if (target)
3254 fprintf (dump_file,
3255 "Type inconsistent devirtualization: %s->%s\n",
3256 ie->caller->dump_name (),
3257 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3258 else
3259 fprintf (dump_file,
3260 "No devirtualization target in %s\n",
3261 ie->caller->dump_name ());
3262 }
3263 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3264 cgraph_node::get_create (new_target);
3265 return new_target;
3266 }
3267
3268 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3269 call based on a formal parameter which is described by jump function JFUNC
3270 and if it can be determined, make it direct and return the direct edge.
3271 Otherwise, return NULL. CTX describes the polymorphic context that the
3272 parameter the call is based on brings along with it. */
3273
3274 static struct cgraph_edge *
3275 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3276 struct ipa_jump_func *jfunc,
3277 struct ipa_polymorphic_call_context ctx)
3278 {
3279 tree target = NULL;
3280 bool speculative = false;
3281
3282 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3283 return NULL;
3284
3285 gcc_assert (!ie->indirect_info->by_ref);
3286
3287 /* Try to do lookup via known virtual table pointer value. */
3288 if (!ie->indirect_info->vptr_changed
3289 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3290 {
3291 tree vtable;
3292 unsigned HOST_WIDE_INT offset;
3293 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3294 : NULL;
3295 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3296 ie->indirect_info->offset,
3297 true);
3298 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3299 {
3300 bool can_refer;
3301 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3302 vtable, offset, &can_refer);
3303 if (can_refer)
3304 {
3305 if (!t
3306 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3307 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3308 || !possible_polymorphic_call_target_p
3309 (ie, cgraph_node::get (t)))
3310 {
3311 /* Do not speculate builtin_unreachable, it is stupid! */
3312 if (!ie->indirect_info->vptr_changed)
3313 target = ipa_impossible_devirt_target (ie, target);
3314 else
3315 target = NULL;
3316 }
3317 else
3318 {
3319 target = t;
3320 speculative = ie->indirect_info->vptr_changed;
3321 }
3322 }
3323 }
3324 }
3325
3326 ipa_polymorphic_call_context ie_context (ie);
3327 vec <cgraph_node *>targets;
3328 bool final;
3329
3330 ctx.offset_by (ie->indirect_info->offset);
3331 if (ie->indirect_info->vptr_changed)
3332 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3333 ie->indirect_info->otr_type);
3334 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3335 targets = possible_polymorphic_call_targets
3336 (ie->indirect_info->otr_type,
3337 ie->indirect_info->otr_token,
3338 ctx, &final);
3339 if (final && targets.length () <= 1)
3340 {
3341 speculative = false;
3342 if (targets.length () == 1)
3343 target = targets[0]->decl;
3344 else
3345 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3346 }
3347 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3348 && !ie->speculative && ie->maybe_hot_p ())
3349 {
3350 cgraph_node *n;
3351 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3352 ie->indirect_info->otr_token,
3353 ie->indirect_info->context);
3354 if (n)
3355 {
3356 target = n->decl;
3357 speculative = true;
3358 }
3359 }
3360
3361 if (target)
3362 {
3363 if (!possible_polymorphic_call_target_p
3364 (ie, cgraph_node::get_create (target)))
3365 {
3366 if (speculative)
3367 return NULL;
3368 target = ipa_impossible_devirt_target (ie, target);
3369 }
3370 return ipa_make_edge_direct_to_target (ie, target, speculative);
3371 }
3372 else
3373 return NULL;
3374 }
3375
3376 /* Update the param called notes associated with NODE when CS is being inlined,
3377 assuming NODE is (potentially indirectly) inlined into CS->callee.
3378 Moreover, if the callee is discovered to be constant, create a new cgraph
3379 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3380 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3381
3382 static bool
3383 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3384 struct cgraph_node *node,
3385 vec<cgraph_edge *> *new_edges)
3386 {
3387 struct ipa_edge_args *top;
3388 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3389 struct ipa_node_params *new_root_info, *inlined_node_info;
3390 bool res = false;
3391
3392 ipa_check_create_edge_args ();
3393 top = IPA_EDGE_REF (cs);
3394 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3395 ? cs->caller->global.inlined_to
3396 : cs->caller);
3397 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3398
3399 for (ie = node->indirect_calls; ie; ie = next_ie)
3400 {
3401 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3402 struct ipa_jump_func *jfunc;
3403 int param_index;
3404 cgraph_node *spec_target = NULL;
3405
3406 next_ie = ie->next_callee;
3407
3408 if (ici->param_index == -1)
3409 continue;
3410
3411 /* We must check range due to calls with variable number of arguments: */
3412 if (ici->param_index >= ipa_get_cs_argument_count (top))
3413 {
3414 ici->param_index = -1;
3415 continue;
3416 }
3417
3418 param_index = ici->param_index;
3419 jfunc = ipa_get_ith_jump_func (top, param_index);
3420
3421 if (ie->speculative)
3422 {
3423 struct cgraph_edge *de;
3424 struct ipa_ref *ref;
3425 ie->speculative_call_info (de, ie, ref);
3426 spec_target = de->callee;
3427 }
3428
3429 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3430 new_direct_edge = NULL;
3431 else if (ici->polymorphic)
3432 {
3433 ipa_polymorphic_call_context ctx;
3434 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3435 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3436 }
3437 else
3438 {
3439 tree target_type = ipa_get_type (inlined_node_info, param_index);
3440 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3441 target_type,
3442 new_root_info);
3443 }
3444
3445 /* If speculation was removed, then we need to do nothing. */
3446 if (new_direct_edge && new_direct_edge != ie
3447 && new_direct_edge->callee == spec_target)
3448 {
3449 new_direct_edge->indirect_inlining_edge = 1;
3450 top = IPA_EDGE_REF (cs);
3451 res = true;
3452 if (!new_direct_edge->speculative)
3453 continue;
3454 }
3455 else if (new_direct_edge)
3456 {
3457 new_direct_edge->indirect_inlining_edge = 1;
3458 if (new_direct_edge->call_stmt)
3459 new_direct_edge->call_stmt_cannot_inline_p
3460 = !gimple_check_call_matching_types (
3461 new_direct_edge->call_stmt,
3462 new_direct_edge->callee->decl, false);
3463 if (new_edges)
3464 {
3465 new_edges->safe_push (new_direct_edge);
3466 res = true;
3467 }
3468 top = IPA_EDGE_REF (cs);
3469 /* If speculative edge was introduced we still need to update
3470 call info of the indirect edge. */
3471 if (!new_direct_edge->speculative)
3472 continue;
3473 }
3474 if (jfunc->type == IPA_JF_PASS_THROUGH
3475 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3476 {
3477 if (ici->agg_contents
3478 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3479 && !ici->polymorphic)
3480 ici->param_index = -1;
3481 else
3482 {
3483 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3484 if (ici->polymorphic
3485 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3486 ici->vptr_changed = true;
3487 }
3488 }
3489 else if (jfunc->type == IPA_JF_ANCESTOR)
3490 {
3491 if (ici->agg_contents
3492 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3493 && !ici->polymorphic)
3494 ici->param_index = -1;
3495 else
3496 {
3497 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3498 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3499 if (ici->polymorphic
3500 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3501 ici->vptr_changed = true;
3502 }
3503 }
3504 else
3505 /* Either we can find a destination for this edge now or never. */
3506 ici->param_index = -1;
3507 }
3508
3509 return res;
3510 }
3511
3512 /* Recursively traverse subtree of NODE (including node) made of inlined
3513 cgraph_edges when CS has been inlined and invoke
3514 update_indirect_edges_after_inlining on all nodes and
3515 update_jump_functions_after_inlining on all non-inlined edges that lead out
3516 of this subtree. Newly discovered indirect edges will be added to
3517 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3518 created. */
3519
3520 static bool
3521 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3522 struct cgraph_node *node,
3523 vec<cgraph_edge *> *new_edges)
3524 {
3525 struct cgraph_edge *e;
3526 bool res;
3527
3528 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3529
3530 for (e = node->callees; e; e = e->next_callee)
3531 if (!e->inline_failed)
3532 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3533 else
3534 update_jump_functions_after_inlining (cs, e);
3535 for (e = node->indirect_calls; e; e = e->next_callee)
3536 update_jump_functions_after_inlining (cs, e);
3537
3538 return res;
3539 }
3540
3541 /* Combine two controlled uses counts as done during inlining. */
3542
3543 static int
3544 combine_controlled_uses_counters (int c, int d)
3545 {
3546 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3547 return IPA_UNDESCRIBED_USE;
3548 else
3549 return c + d - 1;
3550 }
3551
3552 /* Propagate number of controlled users from CS->caleee to the new root of the
3553 tree of inlined nodes. */
3554
3555 static void
3556 propagate_controlled_uses (struct cgraph_edge *cs)
3557 {
3558 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3559 struct cgraph_node *new_root = cs->caller->global.inlined_to
3560 ? cs->caller->global.inlined_to : cs->caller;
3561 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3562 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3563 int count, i;
3564
3565 count = MIN (ipa_get_cs_argument_count (args),
3566 ipa_get_param_count (old_root_info));
3567 for (i = 0; i < count; i++)
3568 {
3569 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3570 struct ipa_cst_ref_desc *rdesc;
3571
3572 if (jf->type == IPA_JF_PASS_THROUGH)
3573 {
3574 int src_idx, c, d;
3575 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3576 c = ipa_get_controlled_uses (new_root_info, src_idx);
3577 d = ipa_get_controlled_uses (old_root_info, i);
3578
3579 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3580 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3581 c = combine_controlled_uses_counters (c, d);
3582 ipa_set_controlled_uses (new_root_info, src_idx, c);
3583 if (c == 0 && new_root_info->ipcp_orig_node)
3584 {
3585 struct cgraph_node *n;
3586 struct ipa_ref *ref;
3587 tree t = new_root_info->known_csts[src_idx];
3588
3589 if (t && TREE_CODE (t) == ADDR_EXPR
3590 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3591 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3592 && (ref = new_root->find_reference (n, NULL, 0)))
3593 {
3594 if (dump_file)
3595 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3596 "reference from %s to %s.\n",
3597 new_root->dump_name (),
3598 n->dump_name ());
3599 ref->remove_reference ();
3600 }
3601 }
3602 }
3603 else if (jf->type == IPA_JF_CONST
3604 && (rdesc = jfunc_rdesc_usable (jf)))
3605 {
3606 int d = ipa_get_controlled_uses (old_root_info, i);
3607 int c = rdesc->refcount;
3608 rdesc->refcount = combine_controlled_uses_counters (c, d);
3609 if (rdesc->refcount == 0)
3610 {
3611 tree cst = ipa_get_jf_constant (jf);
3612 struct cgraph_node *n;
3613 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3614 && TREE_CODE (TREE_OPERAND (cst, 0))
3615 == FUNCTION_DECL);
3616 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3617 if (n)
3618 {
3619 struct cgraph_node *clone;
3620 bool ok;
3621 ok = remove_described_reference (n, rdesc);
3622 gcc_checking_assert (ok);
3623
3624 clone = cs->caller;
3625 while (clone->global.inlined_to
3626 && clone != rdesc->cs->caller
3627 && IPA_NODE_REF (clone)->ipcp_orig_node)
3628 {
3629 struct ipa_ref *ref;
3630 ref = clone->find_reference (n, NULL, 0);
3631 if (ref)
3632 {
3633 if (dump_file)
3634 fprintf (dump_file, "ipa-prop: Removing "
3635 "cloning-created reference "
3636 "from %s to %s.\n",
3637 clone->dump_name (),
3638 n->dump_name ());
3639 ref->remove_reference ();
3640 }
3641 clone = clone->callers->caller;
3642 }
3643 }
3644 }
3645 }
3646 }
3647
3648 for (i = ipa_get_param_count (old_root_info);
3649 i < ipa_get_cs_argument_count (args);
3650 i++)
3651 {
3652 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3653
3654 if (jf->type == IPA_JF_CONST)
3655 {
3656 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3657 if (rdesc)
3658 rdesc->refcount = IPA_UNDESCRIBED_USE;
3659 }
3660 else if (jf->type == IPA_JF_PASS_THROUGH)
3661 ipa_set_controlled_uses (new_root_info,
3662 jf->value.pass_through.formal_id,
3663 IPA_UNDESCRIBED_USE);
3664 }
3665 }
3666
3667 /* Update jump functions and call note functions on inlining the call site CS.
3668 CS is expected to lead to a node already cloned by
3669 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3670 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3671 created. */
3672
3673 bool
3674 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3675 vec<cgraph_edge *> *new_edges)
3676 {
3677 bool changed;
3678 /* Do nothing if the preparation phase has not been carried out yet
3679 (i.e. during early inlining). */
3680 if (!ipa_node_params_sum)
3681 return false;
3682 gcc_assert (ipa_edge_args_sum);
3683
3684 propagate_controlled_uses (cs);
3685 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3686
3687 return changed;
3688 }
3689
3690 /* Ensure that array of edge arguments infos is big enough to accommodate a
3691 structure for all edges and reallocates it if not. Also, allocate
3692 associated hash tables is they do not already exist. */
3693
3694 void
3695 ipa_check_create_edge_args (void)
3696 {
3697 if (!ipa_edge_args_sum)
3698 ipa_edge_args_sum
3699 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3700 ipa_edge_args_sum_t (symtab, true));
3701 if (!ipa_bits_hash_table)
3702 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3703 if (!ipa_vr_hash_table)
3704 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3705 }
3706
3707 /* Free all ipa_edge structures. */
3708
3709 void
3710 ipa_free_all_edge_args (void)
3711 {
3712 if (!ipa_edge_args_sum)
3713 return;
3714
3715 ipa_edge_args_sum->release ();
3716 ipa_edge_args_sum = NULL;
3717 }
3718
3719 /* Free all ipa_node_params structures. */
3720
3721 void
3722 ipa_free_all_node_params (void)
3723 {
3724 ipa_node_params_sum->release ();
3725 ipa_node_params_sum = NULL;
3726 }
3727
3728 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3729 tables if they do not already exist. */
3730
3731 void
3732 ipcp_transformation_initialize (void)
3733 {
3734 if (!ipa_bits_hash_table)
3735 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3736 if (!ipa_vr_hash_table)
3737 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3738 if (ipcp_transformation_sum == NULL)
3739 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3740 }
3741
3742 /* Set the aggregate replacements of NODE to be AGGVALS. */
3743
3744 void
3745 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3746 struct ipa_agg_replacement_value *aggvals)
3747 {
3748 ipcp_transformation_initialize ();
3749 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3750 s->agg_values = aggvals;
3751 }
3752
3753 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3754 count data structures accordingly. */
3755
3756 void
3757 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3758 {
3759 if (args->jump_functions)
3760 {
3761 struct ipa_jump_func *jf;
3762 int i;
3763 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3764 {
3765 struct ipa_cst_ref_desc *rdesc;
3766 try_decrement_rdesc_refcount (jf);
3767 if (jf->type == IPA_JF_CONST
3768 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3769 && rdesc->cs == cs)
3770 rdesc->cs = NULL;
3771 }
3772 }
3773 }
3774
3775 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3776 reference count data strucutres accordingly. */
3777
3778 void
3779 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3780 ipa_edge_args *old_args, ipa_edge_args *new_args)
3781 {
3782 unsigned int i;
3783
3784 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3785 if (old_args->polymorphic_call_contexts)
3786 new_args->polymorphic_call_contexts
3787 = vec_safe_copy (old_args->polymorphic_call_contexts);
3788
3789 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3790 {
3791 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3792 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3793
3794 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3795
3796 if (src_jf->type == IPA_JF_CONST)
3797 {
3798 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3799
3800 if (!src_rdesc)
3801 dst_jf->value.constant.rdesc = NULL;
3802 else if (src->caller == dst->caller)
3803 {
3804 struct ipa_ref *ref;
3805 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3806 gcc_checking_assert (n);
3807 ref = src->caller->find_reference (n, src->call_stmt,
3808 src->lto_stmt_uid);
3809 gcc_checking_assert (ref);
3810 dst->caller->clone_reference (ref, ref->stmt);
3811
3812 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3813 dst_rdesc->cs = dst;
3814 dst_rdesc->refcount = src_rdesc->refcount;
3815 dst_rdesc->next_duplicate = NULL;
3816 dst_jf->value.constant.rdesc = dst_rdesc;
3817 }
3818 else if (src_rdesc->cs == src)
3819 {
3820 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3821 dst_rdesc->cs = dst;
3822 dst_rdesc->refcount = src_rdesc->refcount;
3823 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3824 src_rdesc->next_duplicate = dst_rdesc;
3825 dst_jf->value.constant.rdesc = dst_rdesc;
3826 }
3827 else
3828 {
3829 struct ipa_cst_ref_desc *dst_rdesc;
3830 /* This can happen during inlining, when a JFUNC can refer to a
3831 reference taken in a function up in the tree of inline clones.
3832 We need to find the duplicate that refers to our tree of
3833 inline clones. */
3834
3835 gcc_assert (dst->caller->global.inlined_to);
3836 for (dst_rdesc = src_rdesc->next_duplicate;
3837 dst_rdesc;
3838 dst_rdesc = dst_rdesc->next_duplicate)
3839 {
3840 struct cgraph_node *top;
3841 top = dst_rdesc->cs->caller->global.inlined_to
3842 ? dst_rdesc->cs->caller->global.inlined_to
3843 : dst_rdesc->cs->caller;
3844 if (dst->caller->global.inlined_to == top)
3845 break;
3846 }
3847 gcc_assert (dst_rdesc);
3848 dst_jf->value.constant.rdesc = dst_rdesc;
3849 }
3850 }
3851 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3852 && src->caller == dst->caller)
3853 {
3854 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3855 ? dst->caller->global.inlined_to : dst->caller;
3856 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3857 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3858
3859 int c = ipa_get_controlled_uses (root_info, idx);
3860 if (c != IPA_UNDESCRIBED_USE)
3861 {
3862 c++;
3863 ipa_set_controlled_uses (root_info, idx, c);
3864 }
3865 }
3866 }
3867 }
3868
3869 /* Analyze newly added function into callgraph. */
3870
3871 static void
3872 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3873 {
3874 if (node->has_gimple_body_p ())
3875 ipa_analyze_node (node);
3876 }
3877
3878 /* Hook that is called by summary when a node is duplicated. */
3879
3880 void
3881 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3882 ipa_node_params *old_info,
3883 ipa_node_params *new_info)
3884 {
3885 ipa_agg_replacement_value *old_av, *new_av;
3886
3887 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3888 new_info->lattices = NULL;
3889 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3890 new_info->known_csts = old_info->known_csts.copy ();
3891 new_info->known_contexts = old_info->known_contexts.copy ();
3892
3893 new_info->analysis_done = old_info->analysis_done;
3894 new_info->node_enqueued = old_info->node_enqueued;
3895 new_info->versionable = old_info->versionable;
3896
3897 old_av = ipa_get_agg_replacements_for_node (src);
3898 if (old_av)
3899 {
3900 new_av = NULL;
3901 while (old_av)
3902 {
3903 struct ipa_agg_replacement_value *v;
3904
3905 v = ggc_alloc<ipa_agg_replacement_value> ();
3906 memcpy (v, old_av, sizeof (*v));
3907 v->next = new_av;
3908 new_av = v;
3909 old_av = old_av->next;
3910 }
3911 ipa_set_node_agg_value_chain (dst, new_av);
3912 }
3913
3914 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3915
3916 if (src_trans)
3917 {
3918 ipcp_transformation_initialize ();
3919 src_trans = ipcp_transformation_sum->get_create (src);
3920 ipcp_transformation *dst_trans
3921 = ipcp_transformation_sum->get_create (dst);
3922
3923 dst_trans->bits = vec_safe_copy (src_trans->bits);
3924
3925 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3926 vec<ipa_vr, va_gc> *&dst_vr
3927 = ipcp_get_transformation_summary (dst)->m_vr;
3928 if (vec_safe_length (src_trans->m_vr) > 0)
3929 {
3930 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3931 for (unsigned i = 0; i < src_vr->length (); ++i)
3932 dst_vr->quick_push ((*src_vr)[i]);
3933 }
3934 }
3935 }
3936
3937 /* Register our cgraph hooks if they are not already there. */
3938
3939 void
3940 ipa_register_cgraph_hooks (void)
3941 {
3942 ipa_check_create_node_params ();
3943 ipa_check_create_edge_args ();
3944
3945 function_insertion_hook_holder =
3946 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3947 }
3948
3949 /* Unregister our cgraph hooks if they are not already there. */
3950
3951 static void
3952 ipa_unregister_cgraph_hooks (void)
3953 {
3954 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3955 function_insertion_hook_holder = NULL;
3956 }
3957
3958 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3959 longer needed after ipa-cp. */
3960
3961 void
3962 ipa_free_all_structures_after_ipa_cp (void)
3963 {
3964 if (!optimize && !in_lto_p)
3965 {
3966 ipa_free_all_edge_args ();
3967 ipa_free_all_node_params ();
3968 ipcp_sources_pool.release ();
3969 ipcp_cst_values_pool.release ();
3970 ipcp_poly_ctx_values_pool.release ();
3971 ipcp_agg_lattice_pool.release ();
3972 ipa_unregister_cgraph_hooks ();
3973 ipa_refdesc_pool.release ();
3974 }
3975 }
3976
3977 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3978 longer needed after indirect inlining. */
3979
3980 void
3981 ipa_free_all_structures_after_iinln (void)
3982 {
3983 ipa_free_all_edge_args ();
3984 ipa_free_all_node_params ();
3985 ipa_unregister_cgraph_hooks ();
3986 ipcp_sources_pool.release ();
3987 ipcp_cst_values_pool.release ();
3988 ipcp_poly_ctx_values_pool.release ();
3989 ipcp_agg_lattice_pool.release ();
3990 ipa_refdesc_pool.release ();
3991 }
3992
3993 /* Print ipa_tree_map data structures of all functions in the
3994 callgraph to F. */
3995
3996 void
3997 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3998 {
3999 int i, count;
4000 struct ipa_node_params *info;
4001
4002 if (!node->definition)
4003 return;
4004 info = IPA_NODE_REF (node);
4005 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4006 count = ipa_get_param_count (info);
4007 for (i = 0; i < count; i++)
4008 {
4009 int c;
4010
4011 fprintf (f, " ");
4012 ipa_dump_param (f, info, i);
4013 if (ipa_is_param_used (info, i))
4014 fprintf (f, " used");
4015 c = ipa_get_controlled_uses (info, i);
4016 if (c == IPA_UNDESCRIBED_USE)
4017 fprintf (f, " undescribed_use");
4018 else
4019 fprintf (f, " controlled_uses=%i", c);
4020 fprintf (f, "\n");
4021 }
4022 }
4023
4024 /* Print ipa_tree_map data structures of all functions in the
4025 callgraph to F. */
4026
4027 void
4028 ipa_print_all_params (FILE * f)
4029 {
4030 struct cgraph_node *node;
4031
4032 fprintf (f, "\nFunction parameters:\n");
4033 FOR_EACH_FUNCTION (node)
4034 ipa_print_node_params (f, node);
4035 }
4036
4037 /* Dump the AV linked list. */
4038
4039 void
4040 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4041 {
4042 bool comma = false;
4043 fprintf (f, " Aggregate replacements:");
4044 for (; av; av = av->next)
4045 {
4046 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4047 av->index, av->offset);
4048 print_generic_expr (f, av->value);
4049 comma = true;
4050 }
4051 fprintf (f, "\n");
4052 }
4053
4054 /* Stream out jump function JUMP_FUNC to OB. */
4055
4056 static void
4057 ipa_write_jump_function (struct output_block *ob,
4058 struct ipa_jump_func *jump_func)
4059 {
4060 struct ipa_agg_jf_item *item;
4061 struct bitpack_d bp;
4062 int i, count;
4063
4064 streamer_write_uhwi (ob, jump_func->type);
4065 switch (jump_func->type)
4066 {
4067 case IPA_JF_UNKNOWN:
4068 break;
4069 case IPA_JF_CONST:
4070 gcc_assert (
4071 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4072 stream_write_tree (ob, jump_func->value.constant.value, true);
4073 break;
4074 case IPA_JF_PASS_THROUGH:
4075 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4076 if (jump_func->value.pass_through.operation == NOP_EXPR)
4077 {
4078 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4079 bp = bitpack_create (ob->main_stream);
4080 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4081 streamer_write_bitpack (&bp);
4082 }
4083 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4084 == tcc_unary)
4085 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4086 else
4087 {
4088 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4089 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4090 }
4091 break;
4092 case IPA_JF_ANCESTOR:
4093 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4094 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4095 bp = bitpack_create (ob->main_stream);
4096 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4097 streamer_write_bitpack (&bp);
4098 break;
4099 }
4100
4101 count = vec_safe_length (jump_func->agg.items);
4102 streamer_write_uhwi (ob, count);
4103 if (count)
4104 {
4105 bp = bitpack_create (ob->main_stream);
4106 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4107 streamer_write_bitpack (&bp);
4108 }
4109
4110 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4111 {
4112 streamer_write_uhwi (ob, item->offset);
4113 stream_write_tree (ob, item->value, true);
4114 }
4115
4116 bp = bitpack_create (ob->main_stream);
4117 bp_pack_value (&bp, !!jump_func->bits, 1);
4118 streamer_write_bitpack (&bp);
4119 if (jump_func->bits)
4120 {
4121 streamer_write_widest_int (ob, jump_func->bits->value);
4122 streamer_write_widest_int (ob, jump_func->bits->mask);
4123 }
4124 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4125 streamer_write_bitpack (&bp);
4126 if (jump_func->m_vr)
4127 {
4128 streamer_write_enum (ob->main_stream, value_rang_type,
4129 VR_LAST, jump_func->m_vr->type);
4130 stream_write_tree (ob, jump_func->m_vr->min, true);
4131 stream_write_tree (ob, jump_func->m_vr->max, true);
4132 }
4133 }
4134
4135 /* Read in jump function JUMP_FUNC from IB. */
4136
4137 static void
4138 ipa_read_jump_function (struct lto_input_block *ib,
4139 struct ipa_jump_func *jump_func,
4140 struct cgraph_edge *cs,
4141 struct data_in *data_in)
4142 {
4143 enum jump_func_type jftype;
4144 enum tree_code operation;
4145 int i, count;
4146
4147 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4148 switch (jftype)
4149 {
4150 case IPA_JF_UNKNOWN:
4151 ipa_set_jf_unknown (jump_func);
4152 break;
4153 case IPA_JF_CONST:
4154 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4155 break;
4156 case IPA_JF_PASS_THROUGH:
4157 operation = (enum tree_code) streamer_read_uhwi (ib);
4158 if (operation == NOP_EXPR)
4159 {
4160 int formal_id = streamer_read_uhwi (ib);
4161 struct bitpack_d bp = streamer_read_bitpack (ib);
4162 bool agg_preserved = bp_unpack_value (&bp, 1);
4163 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4164 }
4165 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4166 {
4167 int formal_id = streamer_read_uhwi (ib);
4168 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4169 }
4170 else
4171 {
4172 tree operand = stream_read_tree (ib, data_in);
4173 int formal_id = streamer_read_uhwi (ib);
4174 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4175 operation);
4176 }
4177 break;
4178 case IPA_JF_ANCESTOR:
4179 {
4180 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4181 int formal_id = streamer_read_uhwi (ib);
4182 struct bitpack_d bp = streamer_read_bitpack (ib);
4183 bool agg_preserved = bp_unpack_value (&bp, 1);
4184 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4185 break;
4186 }
4187 }
4188
4189 count = streamer_read_uhwi (ib);
4190 vec_alloc (jump_func->agg.items, count);
4191 if (count)
4192 {
4193 struct bitpack_d bp = streamer_read_bitpack (ib);
4194 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4195 }
4196 for (i = 0; i < count; i++)
4197 {
4198 struct ipa_agg_jf_item item;
4199 item.offset = streamer_read_uhwi (ib);
4200 item.value = stream_read_tree (ib, data_in);
4201 jump_func->agg.items->quick_push (item);
4202 }
4203
4204 struct bitpack_d bp = streamer_read_bitpack (ib);
4205 bool bits_known = bp_unpack_value (&bp, 1);
4206 if (bits_known)
4207 {
4208 widest_int value = streamer_read_widest_int (ib);
4209 widest_int mask = streamer_read_widest_int (ib);
4210 ipa_set_jfunc_bits (jump_func, value, mask);
4211 }
4212 else
4213 jump_func->bits = NULL;
4214
4215 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4216 bool vr_known = bp_unpack_value (&vr_bp, 1);
4217 if (vr_known)
4218 {
4219 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4220 VR_LAST);
4221 tree min = stream_read_tree (ib, data_in);
4222 tree max = stream_read_tree (ib, data_in);
4223 ipa_set_jfunc_vr (jump_func, type, min, max);
4224 }
4225 else
4226 jump_func->m_vr = NULL;
4227 }
4228
4229 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4230 relevant to indirect inlining to OB. */
4231
4232 static void
4233 ipa_write_indirect_edge_info (struct output_block *ob,
4234 struct cgraph_edge *cs)
4235 {
4236 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4237 struct bitpack_d bp;
4238
4239 streamer_write_hwi (ob, ii->param_index);
4240 bp = bitpack_create (ob->main_stream);
4241 bp_pack_value (&bp, ii->polymorphic, 1);
4242 bp_pack_value (&bp, ii->agg_contents, 1);
4243 bp_pack_value (&bp, ii->member_ptr, 1);
4244 bp_pack_value (&bp, ii->by_ref, 1);
4245 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4246 bp_pack_value (&bp, ii->vptr_changed, 1);
4247 streamer_write_bitpack (&bp);
4248 if (ii->agg_contents || ii->polymorphic)
4249 streamer_write_hwi (ob, ii->offset);
4250 else
4251 gcc_assert (ii->offset == 0);
4252
4253 if (ii->polymorphic)
4254 {
4255 streamer_write_hwi (ob, ii->otr_token);
4256 stream_write_tree (ob, ii->otr_type, true);
4257 ii->context.stream_out (ob);
4258 }
4259 }
4260
4261 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4262 relevant to indirect inlining from IB. */
4263
4264 static void
4265 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4266 struct data_in *data_in,
4267 struct cgraph_edge *cs)
4268 {
4269 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4270 struct bitpack_d bp;
4271
4272 ii->param_index = (int) streamer_read_hwi (ib);
4273 bp = streamer_read_bitpack (ib);
4274 ii->polymorphic = bp_unpack_value (&bp, 1);
4275 ii->agg_contents = bp_unpack_value (&bp, 1);
4276 ii->member_ptr = bp_unpack_value (&bp, 1);
4277 ii->by_ref = bp_unpack_value (&bp, 1);
4278 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4279 ii->vptr_changed = bp_unpack_value (&bp, 1);
4280 if (ii->agg_contents || ii->polymorphic)
4281 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4282 else
4283 ii->offset = 0;
4284 if (ii->polymorphic)
4285 {
4286 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4287 ii->otr_type = stream_read_tree (ib, data_in);
4288 ii->context.stream_in (ib, data_in);
4289 }
4290 }
4291
4292 /* Stream out NODE info to OB. */
4293
4294 static void
4295 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4296 {
4297 int node_ref;
4298 lto_symtab_encoder_t encoder;
4299 struct ipa_node_params *info = IPA_NODE_REF (node);
4300 int j;
4301 struct cgraph_edge *e;
4302 struct bitpack_d bp;
4303
4304 encoder = ob->decl_state->symtab_node_encoder;
4305 node_ref = lto_symtab_encoder_encode (encoder, node);
4306 streamer_write_uhwi (ob, node_ref);
4307
4308 streamer_write_uhwi (ob, ipa_get_param_count (info));
4309 for (j = 0; j < ipa_get_param_count (info); j++)
4310 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4311 bp = bitpack_create (ob->main_stream);
4312 gcc_assert (info->analysis_done
4313 || ipa_get_param_count (info) == 0);
4314 gcc_assert (!info->node_enqueued);
4315 gcc_assert (!info->ipcp_orig_node);
4316 for (j = 0; j < ipa_get_param_count (info); j++)
4317 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4318 streamer_write_bitpack (&bp);
4319 for (j = 0; j < ipa_get_param_count (info); j++)
4320 {
4321 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4322 stream_write_tree (ob, ipa_get_type (info, j), true);
4323 }
4324 for (e = node->callees; e; e = e->next_callee)
4325 {
4326 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4327
4328 streamer_write_uhwi (ob,
4329 ipa_get_cs_argument_count (args) * 2
4330 + (args->polymorphic_call_contexts != NULL));
4331 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4332 {
4333 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4334 if (args->polymorphic_call_contexts != NULL)
4335 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4336 }
4337 }
4338 for (e = node->indirect_calls; e; e = e->next_callee)
4339 {
4340 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4341
4342 streamer_write_uhwi (ob,
4343 ipa_get_cs_argument_count (args) * 2
4344 + (args->polymorphic_call_contexts != NULL));
4345 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4346 {
4347 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4348 if (args->polymorphic_call_contexts != NULL)
4349 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4350 }
4351 ipa_write_indirect_edge_info (ob, e);
4352 }
4353 }
4354
4355 /* Stream in NODE info from IB. */
4356
4357 static void
4358 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4359 struct data_in *data_in)
4360 {
4361 struct ipa_node_params *info = IPA_NODE_REF (node);
4362 int k;
4363 struct cgraph_edge *e;
4364 struct bitpack_d bp;
4365
4366 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4367
4368 for (k = 0; k < ipa_get_param_count (info); k++)
4369 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4370
4371 bp = streamer_read_bitpack (ib);
4372 if (ipa_get_param_count (info) != 0)
4373 info->analysis_done = true;
4374 info->node_enqueued = false;
4375 for (k = 0; k < ipa_get_param_count (info); k++)
4376 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4377 for (k = 0; k < ipa_get_param_count (info); k++)
4378 {
4379 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4380 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4381 }
4382 for (e = node->callees; e; e = e->next_callee)
4383 {
4384 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4385 int count = streamer_read_uhwi (ib);
4386 bool contexts_computed = count & 1;
4387 count /= 2;
4388
4389 if (!count)
4390 continue;
4391 vec_safe_grow_cleared (args->jump_functions, count);
4392 if (contexts_computed)
4393 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4394
4395 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4396 {
4397 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4398 data_in);
4399 if (contexts_computed)
4400 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4401 }
4402 }
4403 for (e = node->indirect_calls; e; e = e->next_callee)
4404 {
4405 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4406 int count = streamer_read_uhwi (ib);
4407 bool contexts_computed = count & 1;
4408 count /= 2;
4409
4410 if (count)
4411 {
4412 vec_safe_grow_cleared (args->jump_functions, count);
4413 if (contexts_computed)
4414 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4415 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4416 {
4417 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4418 data_in);
4419 if (contexts_computed)
4420 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4421 }
4422 }
4423 ipa_read_indirect_edge_info (ib, data_in, e);
4424 }
4425 }
4426
4427 /* Write jump functions for nodes in SET. */
4428
4429 void
4430 ipa_prop_write_jump_functions (void)
4431 {
4432 struct cgraph_node *node;
4433 struct output_block *ob;
4434 unsigned int count = 0;
4435 lto_symtab_encoder_iterator lsei;
4436 lto_symtab_encoder_t encoder;
4437
4438 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4439 return;
4440
4441 ob = create_output_block (LTO_section_jump_functions);
4442 encoder = ob->decl_state->symtab_node_encoder;
4443 ob->symbol = NULL;
4444 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4445 lsei_next_function_in_partition (&lsei))
4446 {
4447 node = lsei_cgraph_node (lsei);
4448 if (node->has_gimple_body_p ()
4449 && IPA_NODE_REF (node) != NULL)
4450 count++;
4451 }
4452
4453 streamer_write_uhwi (ob, count);
4454
4455 /* Process all of the functions. */
4456 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4457 lsei_next_function_in_partition (&lsei))
4458 {
4459 node = lsei_cgraph_node (lsei);
4460 if (node->has_gimple_body_p ()
4461 && IPA_NODE_REF (node) != NULL)
4462 ipa_write_node_info (ob, node);
4463 }
4464 streamer_write_char_stream (ob->main_stream, 0);
4465 produce_asm (ob, NULL);
4466 destroy_output_block (ob);
4467 }
4468
4469 /* Read section in file FILE_DATA of length LEN with data DATA. */
4470
4471 static void
4472 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4473 size_t len)
4474 {
4475 const struct lto_function_header *header =
4476 (const struct lto_function_header *) data;
4477 const int cfg_offset = sizeof (struct lto_function_header);
4478 const int main_offset = cfg_offset + header->cfg_size;
4479 const int string_offset = main_offset + header->main_size;
4480 struct data_in *data_in;
4481 unsigned int i;
4482 unsigned int count;
4483
4484 lto_input_block ib_main ((const char *) data + main_offset,
4485 header->main_size, file_data->mode_table);
4486
4487 data_in =
4488 lto_data_in_create (file_data, (const char *) data + string_offset,
4489 header->string_size, vNULL);
4490 count = streamer_read_uhwi (&ib_main);
4491
4492 for (i = 0; i < count; i++)
4493 {
4494 unsigned int index;
4495 struct cgraph_node *node;
4496 lto_symtab_encoder_t encoder;
4497
4498 index = streamer_read_uhwi (&ib_main);
4499 encoder = file_data->symtab_node_encoder;
4500 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4501 index));
4502 gcc_assert (node->definition);
4503 ipa_read_node_info (&ib_main, node, data_in);
4504 }
4505 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4506 len);
4507 lto_data_in_delete (data_in);
4508 }
4509
4510 /* Read ipcp jump functions. */
4511
4512 void
4513 ipa_prop_read_jump_functions (void)
4514 {
4515 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4516 struct lto_file_decl_data *file_data;
4517 unsigned int j = 0;
4518
4519 ipa_check_create_node_params ();
4520 ipa_check_create_edge_args ();
4521 ipa_register_cgraph_hooks ();
4522
4523 while ((file_data = file_data_vec[j++]))
4524 {
4525 size_t len;
4526 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4527
4528 if (data)
4529 ipa_prop_read_section (file_data, data, len);
4530 }
4531 }
4532
4533 void
4534 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4535 {
4536 int node_ref;
4537 unsigned int count = 0;
4538 lto_symtab_encoder_t encoder;
4539 struct ipa_agg_replacement_value *aggvals, *av;
4540
4541 aggvals = ipa_get_agg_replacements_for_node (node);
4542 encoder = ob->decl_state->symtab_node_encoder;
4543 node_ref = lto_symtab_encoder_encode (encoder, node);
4544 streamer_write_uhwi (ob, node_ref);
4545
4546 for (av = aggvals; av; av = av->next)
4547 count++;
4548 streamer_write_uhwi (ob, count);
4549
4550 for (av = aggvals; av; av = av->next)
4551 {
4552 struct bitpack_d bp;
4553
4554 streamer_write_uhwi (ob, av->offset);
4555 streamer_write_uhwi (ob, av->index);
4556 stream_write_tree (ob, av->value, true);
4557
4558 bp = bitpack_create (ob->main_stream);
4559 bp_pack_value (&bp, av->by_ref, 1);
4560 streamer_write_bitpack (&bp);
4561 }
4562
4563 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4564 if (ts && vec_safe_length (ts->m_vr) > 0)
4565 {
4566 count = ts->m_vr->length ();
4567 streamer_write_uhwi (ob, count);
4568 for (unsigned i = 0; i < count; ++i)
4569 {
4570 struct bitpack_d bp;
4571 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4572 bp = bitpack_create (ob->main_stream);
4573 bp_pack_value (&bp, parm_vr->known, 1);
4574 streamer_write_bitpack (&bp);
4575 if (parm_vr->known)
4576 {
4577 streamer_write_enum (ob->main_stream, value_rang_type,
4578 VR_LAST, parm_vr->type);
4579 streamer_write_wide_int (ob, parm_vr->min);
4580 streamer_write_wide_int (ob, parm_vr->max);
4581 }
4582 }
4583 }
4584 else
4585 streamer_write_uhwi (ob, 0);
4586
4587 if (ts && vec_safe_length (ts->bits) > 0)
4588 {
4589 count = ts->bits->length ();
4590 streamer_write_uhwi (ob, count);
4591
4592 for (unsigned i = 0; i < count; ++i)
4593 {
4594 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4595 struct bitpack_d bp = bitpack_create (ob->main_stream);
4596 bp_pack_value (&bp, !!bits_jfunc, 1);
4597 streamer_write_bitpack (&bp);
4598 if (bits_jfunc)
4599 {
4600 streamer_write_widest_int (ob, bits_jfunc->value);
4601 streamer_write_widest_int (ob, bits_jfunc->mask);
4602 }
4603 }
4604 }
4605 else
4606 streamer_write_uhwi (ob, 0);
4607 }
4608
4609 /* Stream in the aggregate value replacement chain for NODE from IB. */
4610
4611 static void
4612 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4613 data_in *data_in)
4614 {
4615 struct ipa_agg_replacement_value *aggvals = NULL;
4616 unsigned int count, i;
4617
4618 count = streamer_read_uhwi (ib);
4619 for (i = 0; i <count; i++)
4620 {
4621 struct ipa_agg_replacement_value *av;
4622 struct bitpack_d bp;
4623
4624 av = ggc_alloc<ipa_agg_replacement_value> ();
4625 av->offset = streamer_read_uhwi (ib);
4626 av->index = streamer_read_uhwi (ib);
4627 av->value = stream_read_tree (ib, data_in);
4628 bp = streamer_read_bitpack (ib);
4629 av->by_ref = bp_unpack_value (&bp, 1);
4630 av->next = aggvals;
4631 aggvals = av;
4632 }
4633 ipa_set_node_agg_value_chain (node, aggvals);
4634
4635 count = streamer_read_uhwi (ib);
4636 if (count > 0)
4637 {
4638 ipcp_transformation_initialize ();
4639 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4640 vec_safe_grow_cleared (ts->m_vr, count);
4641 for (i = 0; i < count; i++)
4642 {
4643 ipa_vr *parm_vr;
4644 parm_vr = &(*ts->m_vr)[i];
4645 struct bitpack_d bp;
4646 bp = streamer_read_bitpack (ib);
4647 parm_vr->known = bp_unpack_value (&bp, 1);
4648 if (parm_vr->known)
4649 {
4650 parm_vr->type = streamer_read_enum (ib, value_range_type,
4651 VR_LAST);
4652 parm_vr->min = streamer_read_wide_int (ib);
4653 parm_vr->max = streamer_read_wide_int (ib);
4654 }
4655 }
4656 }
4657 count = streamer_read_uhwi (ib);
4658 if (count > 0)
4659 {
4660 ipcp_transformation_initialize ();
4661 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4662 vec_safe_grow_cleared (ts->bits, count);
4663
4664 for (i = 0; i < count; i++)
4665 {
4666 struct bitpack_d bp = streamer_read_bitpack (ib);
4667 bool known = bp_unpack_value (&bp, 1);
4668 if (known)
4669 {
4670 ipa_bits *bits
4671 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4672 streamer_read_widest_int (ib));
4673 (*ts->bits)[i] = bits;
4674 }
4675 }
4676 }
4677 }
4678
4679 /* Write all aggregate replacement for nodes in set. */
4680
4681 void
4682 ipcp_write_transformation_summaries (void)
4683 {
4684 struct cgraph_node *node;
4685 struct output_block *ob;
4686 unsigned int count = 0;
4687 lto_symtab_encoder_iterator lsei;
4688 lto_symtab_encoder_t encoder;
4689
4690 ob = create_output_block (LTO_section_ipcp_transform);
4691 encoder = ob->decl_state->symtab_node_encoder;
4692 ob->symbol = NULL;
4693 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4694 lsei_next_function_in_partition (&lsei))
4695 {
4696 node = lsei_cgraph_node (lsei);
4697 if (node->has_gimple_body_p ())
4698 count++;
4699 }
4700
4701 streamer_write_uhwi (ob, count);
4702
4703 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4704 lsei_next_function_in_partition (&lsei))
4705 {
4706 node = lsei_cgraph_node (lsei);
4707 if (node->has_gimple_body_p ())
4708 write_ipcp_transformation_info (ob, node);
4709 }
4710 streamer_write_char_stream (ob->main_stream, 0);
4711 produce_asm (ob, NULL);
4712 destroy_output_block (ob);
4713 }
4714
4715 /* Read replacements section in file FILE_DATA of length LEN with data
4716 DATA. */
4717
4718 static void
4719 read_replacements_section (struct lto_file_decl_data *file_data,
4720 const char *data,
4721 size_t len)
4722 {
4723 const struct lto_function_header *header =
4724 (const struct lto_function_header *) data;
4725 const int cfg_offset = sizeof (struct lto_function_header);
4726 const int main_offset = cfg_offset + header->cfg_size;
4727 const int string_offset = main_offset + header->main_size;
4728 struct data_in *data_in;
4729 unsigned int i;
4730 unsigned int count;
4731
4732 lto_input_block ib_main ((const char *) data + main_offset,
4733 header->main_size, file_data->mode_table);
4734
4735 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4736 header->string_size, vNULL);
4737 count = streamer_read_uhwi (&ib_main);
4738
4739 for (i = 0; i < count; i++)
4740 {
4741 unsigned int index;
4742 struct cgraph_node *node;
4743 lto_symtab_encoder_t encoder;
4744
4745 index = streamer_read_uhwi (&ib_main);
4746 encoder = file_data->symtab_node_encoder;
4747 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4748 index));
4749 gcc_assert (node->definition);
4750 read_ipcp_transformation_info (&ib_main, node, data_in);
4751 }
4752 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4753 len);
4754 lto_data_in_delete (data_in);
4755 }
4756
4757 /* Read IPA-CP aggregate replacements. */
4758
4759 void
4760 ipcp_read_transformation_summaries (void)
4761 {
4762 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4763 struct lto_file_decl_data *file_data;
4764 unsigned int j = 0;
4765
4766 while ((file_data = file_data_vec[j++]))
4767 {
4768 size_t len;
4769 const char *data = lto_get_section_data (file_data,
4770 LTO_section_ipcp_transform,
4771 NULL, &len);
4772 if (data)
4773 read_replacements_section (file_data, data, len);
4774 }
4775 }
4776
4777 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4778 NODE. */
4779
4780 static void
4781 adjust_agg_replacement_values (struct cgraph_node *node,
4782 struct ipa_agg_replacement_value *aggval)
4783 {
4784 struct ipa_agg_replacement_value *v;
4785 int i, c = 0, d = 0, *adj;
4786
4787 if (!node->clone.combined_args_to_skip)
4788 return;
4789
4790 for (v = aggval; v; v = v->next)
4791 {
4792 gcc_assert (v->index >= 0);
4793 if (c < v->index)
4794 c = v->index;
4795 }
4796 c++;
4797
4798 adj = XALLOCAVEC (int, c);
4799 for (i = 0; i < c; i++)
4800 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4801 {
4802 adj[i] = -1;
4803 d++;
4804 }
4805 else
4806 adj[i] = i - d;
4807
4808 for (v = aggval; v; v = v->next)
4809 v->index = adj[v->index];
4810 }
4811
4812 /* Dominator walker driving the ipcp modification phase. */
4813
4814 class ipcp_modif_dom_walker : public dom_walker
4815 {
4816 public:
4817 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4818 vec<ipa_param_descriptor, va_gc> *descs,
4819 struct ipa_agg_replacement_value *av,
4820 bool *sc, bool *cc)
4821 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4822 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4823
4824 virtual edge before_dom_children (basic_block);
4825
4826 private:
4827 struct ipa_func_body_info *m_fbi;
4828 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4829 struct ipa_agg_replacement_value *m_aggval;
4830 bool *m_something_changed, *m_cfg_changed;
4831 };
4832
4833 edge
4834 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4835 {
4836 gimple_stmt_iterator gsi;
4837 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4838 {
4839 struct ipa_agg_replacement_value *v;
4840 gimple *stmt = gsi_stmt (gsi);
4841 tree rhs, val, t;
4842 HOST_WIDE_INT offset, size;
4843 int index;
4844 bool by_ref, vce;
4845
4846 if (!gimple_assign_load_p (stmt))
4847 continue;
4848 rhs = gimple_assign_rhs1 (stmt);
4849 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4850 continue;
4851
4852 vce = false;
4853 t = rhs;
4854 while (handled_component_p (t))
4855 {
4856 /* V_C_E can do things like convert an array of integers to one
4857 bigger integer and similar things we do not handle below. */
4858 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4859 {
4860 vce = true;
4861 break;
4862 }
4863 t = TREE_OPERAND (t, 0);
4864 }
4865 if (vce)
4866 continue;
4867
4868 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4869 &offset, &size, &by_ref))
4870 continue;
4871 for (v = m_aggval; v; v = v->next)
4872 if (v->index == index
4873 && v->offset == offset)
4874 break;
4875 if (!v
4876 || v->by_ref != by_ref
4877 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4878 continue;
4879
4880 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4881 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4882 {
4883 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4884 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4885 else if (TYPE_SIZE (TREE_TYPE (rhs))
4886 == TYPE_SIZE (TREE_TYPE (v->value)))
4887 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4888 else
4889 {
4890 if (dump_file)
4891 {
4892 fprintf (dump_file, " const ");
4893 print_generic_expr (dump_file, v->value);
4894 fprintf (dump_file, " can't be converted to type of ");
4895 print_generic_expr (dump_file, rhs);
4896 fprintf (dump_file, "\n");
4897 }
4898 continue;
4899 }
4900 }
4901 else
4902 val = v->value;
4903
4904 if (dump_file && (dump_flags & TDF_DETAILS))
4905 {
4906 fprintf (dump_file, "Modifying stmt:\n ");
4907 print_gimple_stmt (dump_file, stmt, 0);
4908 }
4909 gimple_assign_set_rhs_from_tree (&gsi, val);
4910 update_stmt (stmt);
4911
4912 if (dump_file && (dump_flags & TDF_DETAILS))
4913 {
4914 fprintf (dump_file, "into:\n ");
4915 print_gimple_stmt (dump_file, stmt, 0);
4916 fprintf (dump_file, "\n");
4917 }
4918
4919 *m_something_changed = true;
4920 if (maybe_clean_eh_stmt (stmt)
4921 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4922 *m_cfg_changed = true;
4923 }
4924 return NULL;
4925 }
4926
4927 /* Update bits info of formal parameters as described in
4928 ipcp_transformation. */
4929
4930 static void
4931 ipcp_update_bits (struct cgraph_node *node)
4932 {
4933 tree parm = DECL_ARGUMENTS (node->decl);
4934 tree next_parm = parm;
4935 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4936
4937 if (!ts || vec_safe_length (ts->bits) == 0)
4938 return;
4939
4940 vec<ipa_bits *, va_gc> &bits = *ts->bits;
4941 unsigned count = bits.length ();
4942
4943 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4944 {
4945 if (node->clone.combined_args_to_skip
4946 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4947 continue;
4948
4949 gcc_checking_assert (parm);
4950 next_parm = DECL_CHAIN (parm);
4951
4952 if (!bits[i]
4953 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4954 || POINTER_TYPE_P (TREE_TYPE (parm)))
4955 || !is_gimple_reg (parm))
4956 continue;
4957
4958 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4959 if (!ddef)
4960 continue;
4961
4962 if (dump_file)
4963 {
4964 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4965 print_hex (bits[i]->mask, dump_file);
4966 fprintf (dump_file, "\n");
4967 }
4968
4969 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4970 {
4971 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4972 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4973
4974 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4975 | wide_int::from (bits[i]->value, prec, sgn);
4976 set_nonzero_bits (ddef, nonzero_bits);
4977 }
4978 else
4979 {
4980 unsigned tem = bits[i]->mask.to_uhwi ();
4981 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4982 unsigned align = tem & -tem;
4983 unsigned misalign = bitpos & (align - 1);
4984
4985 if (align > 1)
4986 {
4987 if (dump_file)
4988 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
4989
4990 unsigned old_align, old_misalign;
4991 struct ptr_info_def *pi = get_ptr_info (ddef);
4992 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
4993
4994 if (old_known
4995 && old_align > align)
4996 {
4997 if (dump_file)
4998 {
4999 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5000 if ((old_misalign & (align - 1)) != misalign)
5001 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5002 old_misalign, misalign);
5003 }
5004 continue;
5005 }
5006
5007 if (old_known
5008 && ((misalign & (old_align - 1)) != old_misalign)
5009 && dump_file)
5010 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5011 old_misalign, misalign);
5012
5013 set_ptr_info_alignment (pi, align, misalign);
5014 }
5015 }
5016 }
5017 }
5018
5019 /* Update value range of formal parameters as described in
5020 ipcp_transformation. */
5021
5022 static void
5023 ipcp_update_vr (struct cgraph_node *node)
5024 {
5025 tree fndecl = node->decl;
5026 tree parm = DECL_ARGUMENTS (fndecl);
5027 tree next_parm = parm;
5028 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5029 if (!ts || vec_safe_length (ts->m_vr) == 0)
5030 return;
5031 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5032 unsigned count = vr.length ();
5033
5034 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5035 {
5036 if (node->clone.combined_args_to_skip
5037 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5038 continue;
5039 gcc_checking_assert (parm);
5040 next_parm = DECL_CHAIN (parm);
5041 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5042
5043 if (!ddef || !is_gimple_reg (parm))
5044 continue;
5045
5046 if (vr[i].known
5047 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5048 {
5049 tree type = TREE_TYPE (ddef);
5050 unsigned prec = TYPE_PRECISION (type);
5051 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5052 {
5053 if (dump_file)
5054 {
5055 fprintf (dump_file, "Setting value range of param %u ", i);
5056 fprintf (dump_file, "%s[",
5057 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5058 print_decs (vr[i].min, dump_file);
5059 fprintf (dump_file, ", ");
5060 print_decs (vr[i].max, dump_file);
5061 fprintf (dump_file, "]\n");
5062 }
5063 set_range_info (ddef, vr[i].type,
5064 wide_int_storage::from (vr[i].min, prec,
5065 TYPE_SIGN (type)),
5066 wide_int_storage::from (vr[i].max, prec,
5067 TYPE_SIGN (type)));
5068 }
5069 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5070 && vr[i].type == VR_ANTI_RANGE
5071 && wi::eq_p (vr[i].min, 0)
5072 && wi::eq_p (vr[i].max, 0))
5073 {
5074 if (dump_file)
5075 fprintf (dump_file, "Setting nonnull for %u\n", i);
5076 set_ptr_nonnull (ddef);
5077 }
5078 }
5079 }
5080 }
5081
5082 /* IPCP transformation phase doing propagation of aggregate values. */
5083
5084 unsigned int
5085 ipcp_transform_function (struct cgraph_node *node)
5086 {
5087 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5088 struct ipa_func_body_info fbi;
5089 struct ipa_agg_replacement_value *aggval;
5090 int param_count;
5091 bool cfg_changed = false, something_changed = false;
5092
5093 gcc_checking_assert (cfun);
5094 gcc_checking_assert (current_function_decl);
5095
5096 if (dump_file)
5097 fprintf (dump_file, "Modification phase of node %s\n",
5098 node->dump_name ());
5099
5100 ipcp_update_bits (node);
5101 ipcp_update_vr (node);
5102 aggval = ipa_get_agg_replacements_for_node (node);
5103 if (!aggval)
5104 return 0;
5105 param_count = count_formal_params (node->decl);
5106 if (param_count == 0)
5107 return 0;
5108 adjust_agg_replacement_values (node, aggval);
5109 if (dump_file)
5110 ipa_dump_agg_replacement_values (dump_file, aggval);
5111
5112 fbi.node = node;
5113 fbi.info = NULL;
5114 fbi.bb_infos = vNULL;
5115 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5116 fbi.param_count = param_count;
5117 fbi.aa_walked = 0;
5118
5119 vec_safe_grow_cleared (descriptors, param_count);
5120 ipa_populate_param_decls (node, *descriptors);
5121 calculate_dominance_info (CDI_DOMINATORS);
5122 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5123 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5124
5125 int i;
5126 struct ipa_bb_info *bi;
5127 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5128 free_ipa_bb_info (bi);
5129 fbi.bb_infos.release ();
5130 free_dominance_info (CDI_DOMINATORS);
5131
5132 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5133 s->agg_values = NULL;
5134 s->bits = NULL;
5135 s->m_vr = NULL;
5136
5137 vec_free (descriptors);
5138
5139 if (!something_changed)
5140 return 0;
5141 else if (cfg_changed)
5142 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5143 else
5144 return TODO_update_ssa_only_virtuals;
5145 }
5146
5147 #include "gt-ipa-prop.h"