[PR 78365] Prudent type handling in IPA VR-prop
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->bits.known)
298 {
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
302 }
303 else
304 fprintf (f, " Unknown bits\n");
305
306 if (jump_func->vr_known)
307 {
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
315 }
316 else
317 fprintf (f, " Unknown VR\n");
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
373 }
374 }
375
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
377
378 void
379 ipa_print_all_jump_functions (FILE *f)
380 {
381 struct cgraph_node *node;
382
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
385 {
386 ipa_print_node_jump_functions (f, node);
387 }
388 }
389
390 /* Set jfunc to be a know-really nothing jump function. */
391
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
394 {
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
398 }
399
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407 {
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411 }
412
413 /* Set JFUNC to be a constant jmp function. */
414
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
418 {
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
421
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
424 {
425 struct ipa_cst_ref_desc *rdesc;
426
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
432 }
433 else
434 jfunc->value.constant.rdesc = NULL;
435 }
436
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
441 {
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
447 }
448
449 /* Set JFUNC to be an unary pass through jump function. */
450
451 static void
452 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 enum tree_code operation)
454 {
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = NULL_TREE;
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
460 }
461 /* Set JFUNC to be an arithmetic pass through jump function. */
462
463 static void
464 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
465 tree operand, enum tree_code operation)
466 {
467 jfunc->type = IPA_JF_PASS_THROUGH;
468 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
469 jfunc->value.pass_through.formal_id = formal_id;
470 jfunc->value.pass_through.operation = operation;
471 jfunc->value.pass_through.agg_preserved = false;
472 }
473
474 /* Set JFUNC to be an ancestor jump function. */
475
476 static void
477 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
478 int formal_id, bool agg_preserved)
479 {
480 jfunc->type = IPA_JF_ANCESTOR;
481 jfunc->value.ancestor.formal_id = formal_id;
482 jfunc->value.ancestor.offset = offset;
483 jfunc->value.ancestor.agg_preserved = agg_preserved;
484 }
485
486 /* Get IPA BB information about the given BB. FBI is the context of analyzis
487 of this function body. */
488
489 static struct ipa_bb_info *
490 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
491 {
492 gcc_checking_assert (fbi);
493 return &fbi->bb_infos[bb->index];
494 }
495
496 /* Structure to be passed in between detect_type_change and
497 check_stmt_for_type_change. */
498
499 struct prop_type_change_info
500 {
501 /* Offset into the object where there is the virtual method pointer we are
502 looking for. */
503 HOST_WIDE_INT offset;
504 /* The declaration or SSA_NAME pointer of the base that we are checking for
505 type change. */
506 tree object;
507 /* Set to true if dynamic type change has been detected. */
508 bool type_maybe_changed;
509 };
510
511 /* Return true if STMT can modify a virtual method table pointer.
512
513 This function makes special assumptions about both constructors and
514 destructors which are all the functions that are allowed to alter the VMT
515 pointers. It assumes that destructors begin with assignment into all VMT
516 pointers and that constructors essentially look in the following way:
517
518 1) The very first thing they do is that they call constructors of ancestor
519 sub-objects that have them.
520
521 2) Then VMT pointers of this and all its ancestors is set to new values
522 corresponding to the type corresponding to the constructor.
523
524 3) Only afterwards, other stuff such as constructor of member sub-objects
525 and the code written by the user is run. Only this may include calling
526 virtual functions, directly or indirectly.
527
528 There is no way to call a constructor of an ancestor sub-object in any
529 other way.
530
531 This means that we do not have to care whether constructors get the correct
532 type information because they will always change it (in fact, if we define
533 the type to be given by the VMT pointer, it is undefined).
534
535 The most important fact to derive from the above is that if, for some
536 statement in the section 3, we try to detect whether the dynamic type has
537 changed, we can safely ignore all calls as we examine the function body
538 backwards until we reach statements in section 2 because these calls cannot
539 be ancestor constructors or destructors (if the input is not bogus) and so
540 do not change the dynamic type (this holds true only for automatically
541 allocated objects but at the moment we devirtualize only these). We then
542 must detect that statements in section 2 change the dynamic type and can try
543 to derive the new type. That is enough and we can stop, we will never see
544 the calls into constructors of sub-objects in this code. Therefore we can
545 safely ignore all call statements that we traverse.
546 */
547
548 static bool
549 stmt_may_be_vtbl_ptr_store (gimple *stmt)
550 {
551 if (is_gimple_call (stmt))
552 return false;
553 if (gimple_clobber_p (stmt))
554 return false;
555 else if (is_gimple_assign (stmt))
556 {
557 tree lhs = gimple_assign_lhs (stmt);
558
559 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
560 {
561 if (flag_strict_aliasing
562 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
563 return false;
564
565 if (TREE_CODE (lhs) == COMPONENT_REF
566 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
567 return false;
568 /* In the future we might want to use get_base_ref_and_offset to find
569 if there is a field corresponding to the offset and if so, proceed
570 almost like if it was a component ref. */
571 }
572 }
573 return true;
574 }
575
576 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
577 to check whether a particular statement may modify the virtual table
578 pointerIt stores its result into DATA, which points to a
579 prop_type_change_info structure. */
580
581 static bool
582 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
583 {
584 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
585 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
586
587 if (stmt_may_be_vtbl_ptr_store (stmt))
588 {
589 tci->type_maybe_changed = true;
590 return true;
591 }
592 else
593 return false;
594 }
595
596 /* See if ARG is PARAM_DECl describing instance passed by pointer
597 or reference in FUNCTION. Return false if the dynamic type may change
598 in between beggining of the function until CALL is invoked.
599
600 Generally functions are not allowed to change type of such instances,
601 but they call destructors. We assume that methods can not destroy the THIS
602 pointer. Also as a special cases, constructor and destructors may change
603 type of the THIS pointer. */
604
605 static bool
606 param_type_may_change_p (tree function, tree arg, gimple *call)
607 {
608 /* Pure functions can not do any changes on the dynamic type;
609 that require writting to memory. */
610 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
611 return false;
612 /* We need to check if we are within inlined consturctor
613 or destructor (ideally we would have way to check that the
614 inline cdtor is actually working on ARG, but we don't have
615 easy tie on this, so punt on all non-pure cdtors.
616 We may also record the types of cdtors and once we know type
617 of the instance match them.
618
619 Also code unification optimizations may merge calls from
620 different blocks making return values unreliable. So
621 do nothing during late optimization. */
622 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
623 return true;
624 if (TREE_CODE (arg) == SSA_NAME
625 && SSA_NAME_IS_DEFAULT_DEF (arg)
626 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
627 {
628 /* Normal (non-THIS) argument. */
629 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
630 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
631 /* THIS pointer of an method - here we want to watch constructors
632 and destructors as those definitely may change the dynamic
633 type. */
634 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
635 && !DECL_CXX_CONSTRUCTOR_P (function)
636 && !DECL_CXX_DESTRUCTOR_P (function)
637 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
638 {
639 /* Walk the inline stack and watch out for ctors/dtors. */
640 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
641 block = BLOCK_SUPERCONTEXT (block))
642 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
643 return true;
644 return false;
645 }
646 }
647 return true;
648 }
649
650 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
651 callsite CALL) by looking for assignments to its virtual table pointer. If
652 it is, return true and fill in the jump function JFUNC with relevant type
653 information or set it to unknown. ARG is the object itself (not a pointer
654 to it, unless dereferenced). BASE is the base of the memory access as
655 returned by get_ref_base_and_extent, as is the offset.
656
657 This is helper function for detect_type_change and detect_type_change_ssa
658 that does the heavy work which is usually unnecesary. */
659
660 static bool
661 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
662 gcall *call, struct ipa_jump_func *jfunc,
663 HOST_WIDE_INT offset)
664 {
665 struct prop_type_change_info tci;
666 ao_ref ao;
667 bool entry_reached = false;
668
669 gcc_checking_assert (DECL_P (arg)
670 || TREE_CODE (arg) == MEM_REF
671 || handled_component_p (arg));
672
673 comp_type = TYPE_MAIN_VARIANT (comp_type);
674
675 /* Const calls cannot call virtual methods through VMT and so type changes do
676 not matter. */
677 if (!flag_devirtualize || !gimple_vuse (call)
678 /* Be sure expected_type is polymorphic. */
679 || !comp_type
680 || TREE_CODE (comp_type) != RECORD_TYPE
681 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
682 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
683 return true;
684
685 ao_ref_init (&ao, arg);
686 ao.base = base;
687 ao.offset = offset;
688 ao.size = POINTER_SIZE;
689 ao.max_size = ao.size;
690
691 tci.offset = offset;
692 tci.object = get_base_address (arg);
693 tci.type_maybe_changed = false;
694
695 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
696 &tci, NULL, &entry_reached);
697 if (!tci.type_maybe_changed)
698 return false;
699
700 ipa_set_jf_unknown (jfunc);
701 return true;
702 }
703
704 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
705 If it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset. */
709
710 static bool
711 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
712 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
713 {
714 if (!flag_devirtualize)
715 return false;
716
717 if (TREE_CODE (base) == MEM_REF
718 && !param_type_may_change_p (current_function_decl,
719 TREE_OPERAND (base, 0),
720 call))
721 return false;
722 return detect_type_change_from_memory_writes (arg, base, comp_type,
723 call, jfunc, offset);
724 }
725
726 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
727 SSA name (its dereference will become the base and the offset is assumed to
728 be zero). */
729
730 static bool
731 detect_type_change_ssa (tree arg, tree comp_type,
732 gcall *call, struct ipa_jump_func *jfunc)
733 {
734 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
735 if (!flag_devirtualize
736 || !POINTER_TYPE_P (TREE_TYPE (arg)))
737 return false;
738
739 if (!param_type_may_change_p (current_function_decl, arg, call))
740 return false;
741
742 arg = build2 (MEM_REF, ptr_type_node, arg,
743 build_int_cst (ptr_type_node, 0));
744
745 return detect_type_change_from_memory_writes (arg, arg, comp_type,
746 call, jfunc, 0);
747 }
748
749 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
750 boolean variable pointed to by DATA. */
751
752 static bool
753 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
754 void *data)
755 {
756 bool *b = (bool *) data;
757 *b = true;
758 return true;
759 }
760
761 /* Return true if we have already walked so many statements in AA that we
762 should really just start giving up. */
763
764 static bool
765 aa_overwalked (struct ipa_func_body_info *fbi)
766 {
767 gcc_checking_assert (fbi);
768 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
769 }
770
771 /* Find the nearest valid aa status for parameter specified by INDEX that
772 dominates BB. */
773
774 static struct ipa_param_aa_status *
775 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
776 int index)
777 {
778 while (true)
779 {
780 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
781 if (!bb)
782 return NULL;
783 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
784 if (!bi->param_aa_statuses.is_empty ()
785 && bi->param_aa_statuses[index].valid)
786 return &bi->param_aa_statuses[index];
787 }
788 }
789
790 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
791 structures and/or intialize the result with a dominating description as
792 necessary. */
793
794 static struct ipa_param_aa_status *
795 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
796 int index)
797 {
798 gcc_checking_assert (fbi);
799 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
800 if (bi->param_aa_statuses.is_empty ())
801 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
802 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
803 if (!paa->valid)
804 {
805 gcc_checking_assert (!paa->parm_modified
806 && !paa->ref_modified
807 && !paa->pt_modified);
808 struct ipa_param_aa_status *dom_paa;
809 dom_paa = find_dominating_aa_status (fbi, bb, index);
810 if (dom_paa)
811 *paa = *dom_paa;
812 else
813 paa->valid = true;
814 }
815
816 return paa;
817 }
818
819 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
820 a value known not to be modified in this function before reaching the
821 statement STMT. FBI holds information about the function we have so far
822 gathered but do not survive the summary building stage. */
823
824 static bool
825 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
826 gimple *stmt, tree parm_load)
827 {
828 struct ipa_param_aa_status *paa;
829 bool modified = false;
830 ao_ref refd;
831
832 tree base = get_base_address (parm_load);
833 gcc_assert (TREE_CODE (base) == PARM_DECL);
834 if (TREE_READONLY (base))
835 return true;
836
837 /* FIXME: FBI can be NULL if we are being called from outside
838 ipa_node_analysis or ipcp_transform_function, which currently happens
839 during inlining analysis. It would be great to extend fbi's lifetime and
840 always have it. Currently, we are just not afraid of too much walking in
841 that case. */
842 if (fbi)
843 {
844 if (aa_overwalked (fbi))
845 return false;
846 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
847 if (paa->parm_modified)
848 return false;
849 }
850 else
851 paa = NULL;
852
853 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
854 ao_ref_init (&refd, parm_load);
855 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
856 &modified, NULL);
857 if (fbi)
858 fbi->aa_walked += walked;
859 if (paa && modified)
860 paa->parm_modified = true;
861 return !modified;
862 }
863
864 /* Main worker for load_from_unmodified_param and load_from_param.
865 If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params. Otherwise return -1. */
867
868 static int
869 load_from_param_1 (struct ipa_func_body_info *fbi,
870 vec<ipa_param_descriptor> descriptors,
871 gimple *stmt)
872 {
873 int index;
874 tree op1;
875
876 gcc_checking_assert (is_gimple_assign (stmt));
877 op1 = gimple_assign_rhs1 (stmt);
878 if (TREE_CODE (op1) != PARM_DECL)
879 return -1;
880
881 index = ipa_get_param_decl_index_1 (descriptors, op1);
882 if (index < 0
883 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
884 return -1;
885
886 return index;
887 }
888
889 /* If STMT is an assignment that loads a value from an parameter declaration,
890 return the index of the parameter in ipa_node_params which has not been
891 modified. Otherwise return -1. */
892
893 static int
894 load_from_unmodified_param (struct ipa_func_body_info *fbi,
895 vec<ipa_param_descriptor> descriptors,
896 gimple *stmt)
897 {
898 if (!gimple_assign_single_p (stmt))
899 return -1;
900
901 return load_from_param_1 (fbi, descriptors, stmt);
902 }
903
904 /* If STMT is an assignment that loads a value from an parameter declaration,
905 return the index of the parameter in ipa_node_params. Otherwise return -1. */
906
907 static int
908 load_from_param (struct ipa_func_body_info *fbi,
909 vec<ipa_param_descriptor> descriptors,
910 gimple *stmt)
911 {
912 if (!is_gimple_assign (stmt))
913 return -1;
914
915 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
916 if ((get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
917 && (get_gimple_rhs_class (rhs_code) != GIMPLE_UNARY_RHS))
918 return -1;
919
920 return load_from_param_1 (fbi, descriptors, stmt);
921 }
922
923 /* Return true if memory reference REF (which must be a load through parameter
924 with INDEX) loads data that are known to be unmodified in this function
925 before reaching statement STMT. */
926
927 static bool
928 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
929 int index, gimple *stmt, tree ref)
930 {
931 struct ipa_param_aa_status *paa;
932 bool modified = false;
933 ao_ref refd;
934
935 /* FIXME: FBI can be NULL if we are being called from outside
936 ipa_node_analysis or ipcp_transform_function, which currently happens
937 during inlining analysis. It would be great to extend fbi's lifetime and
938 always have it. Currently, we are just not afraid of too much walking in
939 that case. */
940 if (fbi)
941 {
942 if (aa_overwalked (fbi))
943 return false;
944 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
945 if (paa->ref_modified)
946 return false;
947 }
948 else
949 paa = NULL;
950
951 gcc_checking_assert (gimple_vuse (stmt));
952 ao_ref_init (&refd, ref);
953 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
954 &modified, NULL);
955 if (fbi)
956 fbi->aa_walked += walked;
957 if (paa && modified)
958 paa->ref_modified = true;
959 return !modified;
960 }
961
962 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
963 is known to be unmodified in this function before reaching call statement
964 CALL into which it is passed. FBI describes the function body. */
965
966 static bool
967 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
968 gimple *call, tree parm)
969 {
970 bool modified = false;
971 ao_ref refd;
972
973 /* It's unnecessary to calculate anything about memory contnets for a const
974 function because it is not goin to use it. But do not cache the result
975 either. Also, no such calculations for non-pointers. */
976 if (!gimple_vuse (call)
977 || !POINTER_TYPE_P (TREE_TYPE (parm))
978 || aa_overwalked (fbi))
979 return false;
980
981 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
982 gimple_bb (call),
983 index);
984 if (paa->pt_modified)
985 return false;
986
987 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
989 &modified, NULL);
990 fbi->aa_walked += walked;
991 if (modified)
992 paa->pt_modified = true;
993 return !modified;
994 }
995
996 /* Return true if we can prove that OP is a memory reference loading
997 data from an aggregate passed as a parameter.
998
999 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1000 false if it cannot prove that the value has not been modified before the
1001 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1002 if it cannot prove the value has not been modified, in that case it will
1003 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1004
1005 INFO and PARMS_AINFO describe parameters of the current function (but the
1006 latter can be NULL), STMT is the load statement. If function returns true,
1007 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1008 within the aggregate and whether it is a load from a value passed by
1009 reference respectively. */
1010
1011 bool
1012 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1013 vec<ipa_param_descriptor> descriptors,
1014 gimple *stmt, tree op, int *index_p,
1015 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1016 bool *by_ref_p, bool *guaranteed_unmodified)
1017 {
1018 int index;
1019 HOST_WIDE_INT size, max_size;
1020 bool reverse;
1021 tree base
1022 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1023
1024 if (max_size == -1 || max_size != size || *offset_p < 0)
1025 return false;
1026
1027 if (DECL_P (base))
1028 {
1029 int index = ipa_get_param_decl_index_1 (descriptors, base);
1030 if (index >= 0
1031 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1032 {
1033 *index_p = index;
1034 *by_ref_p = false;
1035 if (size_p)
1036 *size_p = size;
1037 if (guaranteed_unmodified)
1038 *guaranteed_unmodified = true;
1039 return true;
1040 }
1041 return false;
1042 }
1043
1044 if (TREE_CODE (base) != MEM_REF
1045 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1046 || !integer_zerop (TREE_OPERAND (base, 1)))
1047 return false;
1048
1049 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1050 {
1051 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1052 index = ipa_get_param_decl_index_1 (descriptors, parm);
1053 }
1054 else
1055 {
1056 /* This branch catches situations where a pointer parameter is not a
1057 gimple register, for example:
1058
1059 void hip7(S*) (struct S * p)
1060 {
1061 void (*<T2e4>) (struct S *) D.1867;
1062 struct S * p.1;
1063
1064 <bb 2>:
1065 p.1_1 = p;
1066 D.1867_2 = p.1_1->f;
1067 D.1867_2 ();
1068 gdp = &p;
1069 */
1070
1071 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1072 index = load_from_unmodified_param (fbi, descriptors, def);
1073 }
1074
1075 if (index >= 0)
1076 {
1077 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1078 if (!data_preserved && !guaranteed_unmodified)
1079 return false;
1080
1081 *index_p = index;
1082 *by_ref_p = true;
1083 if (size_p)
1084 *size_p = size;
1085 if (guaranteed_unmodified)
1086 *guaranteed_unmodified = data_preserved;
1087 return true;
1088 }
1089 return false;
1090 }
1091
1092 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1093 of an assignment statement STMT, try to determine whether we are actually
1094 handling any of the following cases and construct an appropriate jump
1095 function into JFUNC if so:
1096
1097 1) The passed value is loaded from a formal parameter which is not a gimple
1098 register (most probably because it is addressable, the value has to be
1099 scalar) and we can guarantee the value has not changed. This case can
1100 therefore be described by a simple pass-through jump function. For example:
1101
1102 foo (int a)
1103 {
1104 int a.0;
1105
1106 a.0_2 = a;
1107 bar (a.0_2);
1108
1109 2) The passed value can be described by a simple arithmetic pass-through
1110 jump function. E.g.
1111
1112 foo (int a)
1113 {
1114 int D.2064;
1115
1116 D.2064_4 = a.1(D) + 4;
1117 bar (D.2064_4);
1118
1119 This case can also occur in combination of the previous one, e.g.:
1120
1121 foo (int a, int z)
1122 {
1123 int a.0;
1124 int D.2064;
1125
1126 a.0_3 = a;
1127 D.2064_4 = a.0_3 + 4;
1128 foo (D.2064_4);
1129
1130 3) The passed value is an address of an object within another one (which
1131 also passed by reference). Such situations are described by an ancestor
1132 jump function and describe situations such as:
1133
1134 B::foo() (struct B * const this)
1135 {
1136 struct A * D.1845;
1137
1138 D.1845_2 = &this_1(D)->D.1748;
1139 A::bar (D.1845_2);
1140
1141 INFO is the structure describing individual parameters access different
1142 stages of IPA optimizations. PARMS_AINFO contains the information that is
1143 only needed for intraprocedural analysis. */
1144
1145 static void
1146 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1147 struct ipa_node_params *info,
1148 struct ipa_jump_func *jfunc,
1149 gcall *call, gimple *stmt, tree name,
1150 tree param_type)
1151 {
1152 HOST_WIDE_INT offset, size, max_size;
1153 tree op1, tc_ssa, base, ssa;
1154 bool reverse;
1155 int index;
1156 gimple *stmt2 = stmt;
1157
1158 op1 = gimple_assign_rhs1 (stmt);
1159
1160 if (TREE_CODE (op1) == SSA_NAME)
1161 {
1162 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1163 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1164 else
1165 {
1166 index = load_from_param (fbi, info->descriptors,
1167 SSA_NAME_DEF_STMT (op1));
1168 stmt2 = SSA_NAME_DEF_STMT (op1);
1169 }
1170 tc_ssa = op1;
1171 }
1172 else
1173 {
1174 index = load_from_param (fbi, info->descriptors, stmt);
1175 tc_ssa = gimple_assign_lhs (stmt);
1176 }
1177
1178 if (index >= 0)
1179 {
1180 switch (gimple_assign_rhs_class (stmt))
1181 {
1182 case GIMPLE_BINARY_RHS:
1183 {
1184 tree op2 = gimple_assign_rhs2 (stmt);
1185 if (!is_gimple_ip_invariant (op2)
1186 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1187 != tcc_comparison)
1188 && !useless_type_conversion_p (TREE_TYPE (name),
1189 TREE_TYPE (op1))))
1190 return;
1191
1192 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1193 gimple_assign_rhs_code (stmt));
1194 break;
1195 }
1196 case GIMPLE_SINGLE_RHS:
1197 {
1198 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1199 tc_ssa);
1200 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1201 break;
1202 }
1203 case GIMPLE_UNARY_RHS:
1204 if (is_gimple_assign (stmt2)
1205 && gimple_assign_rhs_class (stmt2) == GIMPLE_UNARY_RHS
1206 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt2)))
1207 ipa_set_jf_unary_pass_through (jfunc, index,
1208 gimple_assign_rhs_code (stmt2));
1209 default:;
1210 }
1211 return;
1212 }
1213
1214 if (TREE_CODE (op1) != ADDR_EXPR)
1215 return;
1216 op1 = TREE_OPERAND (op1, 0);
1217 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1218 return;
1219 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1220 if (TREE_CODE (base) != MEM_REF
1221 /* If this is a varying address, punt. */
1222 || max_size == -1
1223 || max_size != size)
1224 return;
1225 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1226 ssa = TREE_OPERAND (base, 0);
1227 if (TREE_CODE (ssa) != SSA_NAME
1228 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1229 || offset < 0)
1230 return;
1231
1232 /* Dynamic types are changed in constructors and destructors. */
1233 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1234 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1235 ipa_set_ancestor_jf (jfunc, offset, index,
1236 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1237 }
1238
1239 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1240 it looks like:
1241
1242 iftmp.1_3 = &obj_2(D)->D.1762;
1243
1244 The base of the MEM_REF must be a default definition SSA NAME of a
1245 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1246 whole MEM_REF expression is returned and the offset calculated from any
1247 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1248 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1249
1250 static tree
1251 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1252 {
1253 HOST_WIDE_INT size, max_size;
1254 tree expr, parm, obj;
1255 bool reverse;
1256
1257 if (!gimple_assign_single_p (assign))
1258 return NULL_TREE;
1259 expr = gimple_assign_rhs1 (assign);
1260
1261 if (TREE_CODE (expr) != ADDR_EXPR)
1262 return NULL_TREE;
1263 expr = TREE_OPERAND (expr, 0);
1264 obj = expr;
1265 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1266
1267 if (TREE_CODE (expr) != MEM_REF
1268 /* If this is a varying address, punt. */
1269 || max_size == -1
1270 || max_size != size
1271 || *offset < 0)
1272 return NULL_TREE;
1273 parm = TREE_OPERAND (expr, 0);
1274 if (TREE_CODE (parm) != SSA_NAME
1275 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1276 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1277 return NULL_TREE;
1278
1279 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1280 *obj_p = obj;
1281 return expr;
1282 }
1283
1284
1285 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1286 statement PHI, try to find out whether NAME is in fact a
1287 multiple-inheritance typecast from a descendant into an ancestor of a formal
1288 parameter and thus can be described by an ancestor jump function and if so,
1289 write the appropriate function into JFUNC.
1290
1291 Essentially we want to match the following pattern:
1292
1293 if (obj_2(D) != 0B)
1294 goto <bb 3>;
1295 else
1296 goto <bb 4>;
1297
1298 <bb 3>:
1299 iftmp.1_3 = &obj_2(D)->D.1762;
1300
1301 <bb 4>:
1302 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1303 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1304 return D.1879_6; */
1305
1306 static void
1307 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1308 struct ipa_node_params *info,
1309 struct ipa_jump_func *jfunc,
1310 gcall *call, gphi *phi)
1311 {
1312 HOST_WIDE_INT offset;
1313 gimple *assign, *cond;
1314 basic_block phi_bb, assign_bb, cond_bb;
1315 tree tmp, parm, expr, obj;
1316 int index, i;
1317
1318 if (gimple_phi_num_args (phi) != 2)
1319 return;
1320
1321 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1322 tmp = PHI_ARG_DEF (phi, 0);
1323 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1324 tmp = PHI_ARG_DEF (phi, 1);
1325 else
1326 return;
1327 if (TREE_CODE (tmp) != SSA_NAME
1328 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1329 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1330 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1331 return;
1332
1333 assign = SSA_NAME_DEF_STMT (tmp);
1334 assign_bb = gimple_bb (assign);
1335 if (!single_pred_p (assign_bb))
1336 return;
1337 expr = get_ancestor_addr_info (assign, &obj, &offset);
1338 if (!expr)
1339 return;
1340 parm = TREE_OPERAND (expr, 0);
1341 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1342 if (index < 0)
1343 return;
1344
1345 cond_bb = single_pred (assign_bb);
1346 cond = last_stmt (cond_bb);
1347 if (!cond
1348 || gimple_code (cond) != GIMPLE_COND
1349 || gimple_cond_code (cond) != NE_EXPR
1350 || gimple_cond_lhs (cond) != parm
1351 || !integer_zerop (gimple_cond_rhs (cond)))
1352 return;
1353
1354 phi_bb = gimple_bb (phi);
1355 for (i = 0; i < 2; i++)
1356 {
1357 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1358 if (pred != assign_bb && pred != cond_bb)
1359 return;
1360 }
1361
1362 ipa_set_ancestor_jf (jfunc, offset, index,
1363 parm_ref_data_pass_through_p (fbi, index, call, parm));
1364 }
1365
1366 /* Inspect the given TYPE and return true iff it has the same structure (the
1367 same number of fields of the same types) as a C++ member pointer. If
1368 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1369 corresponding fields there. */
1370
1371 static bool
1372 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1373 {
1374 tree fld;
1375
1376 if (TREE_CODE (type) != RECORD_TYPE)
1377 return false;
1378
1379 fld = TYPE_FIELDS (type);
1380 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1382 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1383 return false;
1384
1385 if (method_ptr)
1386 *method_ptr = fld;
1387
1388 fld = DECL_CHAIN (fld);
1389 if (!fld || INTEGRAL_TYPE_P (fld)
1390 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1391 return false;
1392 if (delta)
1393 *delta = fld;
1394
1395 if (DECL_CHAIN (fld))
1396 return false;
1397
1398 return true;
1399 }
1400
1401 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1402 return the rhs of its defining statement. Otherwise return RHS as it
1403 is. */
1404
1405 static inline tree
1406 get_ssa_def_if_simple_copy (tree rhs)
1407 {
1408 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1409 {
1410 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1411
1412 if (gimple_assign_single_p (def_stmt))
1413 rhs = gimple_assign_rhs1 (def_stmt);
1414 else
1415 break;
1416 }
1417 return rhs;
1418 }
1419
1420 /* Simple linked list, describing known contents of an aggregate beforere
1421 call. */
1422
1423 struct ipa_known_agg_contents_list
1424 {
1425 /* Offset and size of the described part of the aggregate. */
1426 HOST_WIDE_INT offset, size;
1427 /* Known constant value or NULL if the contents is known to be unknown. */
1428 tree constant;
1429 /* Pointer to the next structure in the list. */
1430 struct ipa_known_agg_contents_list *next;
1431 };
1432
1433 /* Find the proper place in linked list of ipa_known_agg_contents_list
1434 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1435 unless there is a partial overlap, in which case return NULL, or such
1436 element is already there, in which case set *ALREADY_THERE to true. */
1437
1438 static struct ipa_known_agg_contents_list **
1439 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1440 HOST_WIDE_INT lhs_offset,
1441 HOST_WIDE_INT lhs_size,
1442 bool *already_there)
1443 {
1444 struct ipa_known_agg_contents_list **p = list;
1445 while (*p && (*p)->offset < lhs_offset)
1446 {
1447 if ((*p)->offset + (*p)->size > lhs_offset)
1448 return NULL;
1449 p = &(*p)->next;
1450 }
1451
1452 if (*p && (*p)->offset < lhs_offset + lhs_size)
1453 {
1454 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1455 /* We already know this value is subsequently overwritten with
1456 something else. */
1457 *already_there = true;
1458 else
1459 /* Otherwise this is a partial overlap which we cannot
1460 represent. */
1461 return NULL;
1462 }
1463 return p;
1464 }
1465
1466 /* Build aggregate jump function from LIST, assuming there are exactly
1467 CONST_COUNT constant entries there and that th offset of the passed argument
1468 is ARG_OFFSET and store it into JFUNC. */
1469
1470 static void
1471 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1472 int const_count, HOST_WIDE_INT arg_offset,
1473 struct ipa_jump_func *jfunc)
1474 {
1475 vec_alloc (jfunc->agg.items, const_count);
1476 while (list)
1477 {
1478 if (list->constant)
1479 {
1480 struct ipa_agg_jf_item item;
1481 item.offset = list->offset - arg_offset;
1482 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1483 item.value = unshare_expr_without_location (list->constant);
1484 jfunc->agg.items->quick_push (item);
1485 }
1486 list = list->next;
1487 }
1488 }
1489
1490 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1491 in ARG is filled in with constant values. ARG can either be an aggregate
1492 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1493 aggregate. JFUNC is the jump function into which the constants are
1494 subsequently stored. */
1495
1496 static void
1497 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1498 tree arg_type,
1499 struct ipa_jump_func *jfunc)
1500 {
1501 struct ipa_known_agg_contents_list *list = NULL;
1502 int item_count = 0, const_count = 0;
1503 HOST_WIDE_INT arg_offset, arg_size;
1504 gimple_stmt_iterator gsi;
1505 tree arg_base;
1506 bool check_ref, by_ref;
1507 ao_ref r;
1508
1509 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1510 return;
1511
1512 /* The function operates in three stages. First, we prepare check_ref, r,
1513 arg_base and arg_offset based on what is actually passed as an actual
1514 argument. */
1515
1516 if (POINTER_TYPE_P (arg_type))
1517 {
1518 by_ref = true;
1519 if (TREE_CODE (arg) == SSA_NAME)
1520 {
1521 tree type_size;
1522 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1523 return;
1524 check_ref = true;
1525 arg_base = arg;
1526 arg_offset = 0;
1527 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1528 arg_size = tree_to_uhwi (type_size);
1529 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1530 }
1531 else if (TREE_CODE (arg) == ADDR_EXPR)
1532 {
1533 HOST_WIDE_INT arg_max_size;
1534 bool reverse;
1535
1536 arg = TREE_OPERAND (arg, 0);
1537 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1538 &arg_max_size, &reverse);
1539 if (arg_max_size == -1
1540 || arg_max_size != arg_size
1541 || arg_offset < 0)
1542 return;
1543 if (DECL_P (arg_base))
1544 {
1545 check_ref = false;
1546 ao_ref_init (&r, arg_base);
1547 }
1548 else
1549 return;
1550 }
1551 else
1552 return;
1553 }
1554 else
1555 {
1556 HOST_WIDE_INT arg_max_size;
1557 bool reverse;
1558
1559 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1560
1561 by_ref = false;
1562 check_ref = false;
1563 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1564 &arg_max_size, &reverse);
1565 if (arg_max_size == -1
1566 || arg_max_size != arg_size
1567 || arg_offset < 0)
1568 return;
1569
1570 ao_ref_init (&r, arg);
1571 }
1572
1573 /* Second stage walks back the BB, looks at individual statements and as long
1574 as it is confident of how the statements affect contents of the
1575 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1576 describing it. */
1577 gsi = gsi_for_stmt (call);
1578 gsi_prev (&gsi);
1579 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1580 {
1581 struct ipa_known_agg_contents_list *n, **p;
1582 gimple *stmt = gsi_stmt (gsi);
1583 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1584 tree lhs, rhs, lhs_base;
1585 bool reverse;
1586
1587 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1588 continue;
1589 if (!gimple_assign_single_p (stmt))
1590 break;
1591
1592 lhs = gimple_assign_lhs (stmt);
1593 rhs = gimple_assign_rhs1 (stmt);
1594 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1595 || TREE_CODE (lhs) == BIT_FIELD_REF
1596 || contains_bitfld_component_ref_p (lhs))
1597 break;
1598
1599 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1600 &lhs_max_size, &reverse);
1601 if (lhs_max_size == -1
1602 || lhs_max_size != lhs_size)
1603 break;
1604
1605 if (check_ref)
1606 {
1607 if (TREE_CODE (lhs_base) != MEM_REF
1608 || TREE_OPERAND (lhs_base, 0) != arg_base
1609 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1610 break;
1611 }
1612 else if (lhs_base != arg_base)
1613 {
1614 if (DECL_P (lhs_base))
1615 continue;
1616 else
1617 break;
1618 }
1619
1620 bool already_there = false;
1621 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1622 &already_there);
1623 if (!p)
1624 break;
1625 if (already_there)
1626 continue;
1627
1628 rhs = get_ssa_def_if_simple_copy (rhs);
1629 n = XALLOCA (struct ipa_known_agg_contents_list);
1630 n->size = lhs_size;
1631 n->offset = lhs_offset;
1632 if (is_gimple_ip_invariant (rhs))
1633 {
1634 n->constant = rhs;
1635 const_count++;
1636 }
1637 else
1638 n->constant = NULL_TREE;
1639 n->next = *p;
1640 *p = n;
1641
1642 item_count++;
1643 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1644 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1645 break;
1646 }
1647
1648 /* Third stage just goes over the list and creates an appropriate vector of
1649 ipa_agg_jf_item structures out of it, of sourse only if there are
1650 any known constants to begin with. */
1651
1652 if (const_count)
1653 {
1654 jfunc->agg.by_ref = by_ref;
1655 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1656 }
1657 }
1658
1659 /* Return the Ith param type of callee associated with call graph
1660 edge E. */
1661
1662 tree
1663 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1664 {
1665 int n;
1666 tree type = (e->callee
1667 ? TREE_TYPE (e->callee->decl)
1668 : gimple_call_fntype (e->call_stmt));
1669 tree t = TYPE_ARG_TYPES (type);
1670
1671 for (n = 0; n < i; n++)
1672 {
1673 if (!t)
1674 break;
1675 t = TREE_CHAIN (t);
1676 }
1677 if (t)
1678 return TREE_VALUE (t);
1679 if (!e->callee)
1680 return NULL;
1681 t = DECL_ARGUMENTS (e->callee->decl);
1682 for (n = 0; n < i; n++)
1683 {
1684 if (!t)
1685 return NULL;
1686 t = TREE_CHAIN (t);
1687 }
1688 if (t)
1689 return TREE_TYPE (t);
1690 return NULL;
1691 }
1692
1693 /* Compute jump function for all arguments of callsite CS and insert the
1694 information in the jump_functions array in the ipa_edge_args corresponding
1695 to this callsite. */
1696
1697 static void
1698 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1699 struct cgraph_edge *cs)
1700 {
1701 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1702 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1703 gcall *call = cs->call_stmt;
1704 int n, arg_num = gimple_call_num_args (call);
1705 bool useful_context = false;
1706
1707 if (arg_num == 0 || args->jump_functions)
1708 return;
1709 vec_safe_grow_cleared (args->jump_functions, arg_num);
1710 if (flag_devirtualize)
1711 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1712
1713 if (gimple_call_internal_p (call))
1714 return;
1715 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1716 return;
1717
1718 for (n = 0; n < arg_num; n++)
1719 {
1720 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1721 tree arg = gimple_call_arg (call, n);
1722 tree param_type = ipa_get_callee_param_type (cs, n);
1723 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1724 {
1725 tree instance;
1726 struct ipa_polymorphic_call_context context (cs->caller->decl,
1727 arg, cs->call_stmt,
1728 &instance);
1729 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1730 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1731 if (!context.useless_p ())
1732 useful_context = true;
1733 }
1734
1735 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1736 {
1737 bool addr_nonzero = false;
1738 bool strict_overflow = false;
1739
1740 if (TREE_CODE (arg) == SSA_NAME
1741 && param_type
1742 && get_ptr_nonnull (arg))
1743 addr_nonzero = true;
1744 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1745 addr_nonzero = true;
1746
1747 if (addr_nonzero)
1748 {
1749 jfunc->vr_known = true;
1750 jfunc->m_vr.type = VR_ANTI_RANGE;
1751 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1752 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1753 jfunc->m_vr.equiv = NULL;
1754 }
1755 else
1756 gcc_assert (!jfunc->vr_known);
1757 }
1758 else
1759 {
1760 wide_int min, max;
1761 value_range_type type;
1762 if (TREE_CODE (arg) == SSA_NAME
1763 && param_type
1764 && (type = get_range_info (arg, &min, &max))
1765 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1766 {
1767 value_range vr;
1768
1769 vr.type = type;
1770 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1771 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1772 vr.equiv = NULL;
1773 extract_range_from_unary_expr (&jfunc->m_vr,
1774 NOP_EXPR,
1775 param_type,
1776 &vr, TREE_TYPE (arg));
1777 if (jfunc->m_vr.type == VR_RANGE
1778 || jfunc->m_vr.type == VR_ANTI_RANGE)
1779 jfunc->vr_known = true;
1780 else
1781 jfunc->vr_known = false;
1782 }
1783 else
1784 gcc_assert (!jfunc->vr_known);
1785 }
1786
1787 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1788 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1789 {
1790 jfunc->bits.known = true;
1791
1792 if (TREE_CODE (arg) == SSA_NAME)
1793 {
1794 jfunc->bits.value = 0;
1795 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1796 TYPE_SIGN (TREE_TYPE (arg)));
1797 }
1798 else
1799 {
1800 jfunc->bits.value = wi::to_widest (arg);
1801 jfunc->bits.mask = 0;
1802 }
1803 }
1804 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1805 {
1806 unsigned HOST_WIDE_INT bitpos;
1807 unsigned align;
1808
1809 jfunc->bits.known = true;
1810 get_pointer_alignment_1 (arg, &align, &bitpos);
1811 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1812 .and_not (align / BITS_PER_UNIT - 1);
1813 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1814 }
1815 else
1816 gcc_assert (!jfunc->bits.known);
1817
1818 if (is_gimple_ip_invariant (arg)
1819 || (VAR_P (arg)
1820 && is_global_var (arg)
1821 && TREE_READONLY (arg)))
1822 ipa_set_jf_constant (jfunc, arg, cs);
1823 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1824 && TREE_CODE (arg) == PARM_DECL)
1825 {
1826 int index = ipa_get_param_decl_index (info, arg);
1827
1828 gcc_assert (index >=0);
1829 /* Aggregate passed by value, check for pass-through, otherwise we
1830 will attempt to fill in aggregate contents later in this
1831 for cycle. */
1832 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1833 {
1834 ipa_set_jf_simple_pass_through (jfunc, index, false);
1835 continue;
1836 }
1837 }
1838 else if (TREE_CODE (arg) == SSA_NAME)
1839 {
1840 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1841 {
1842 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1843 if (index >= 0)
1844 {
1845 bool agg_p;
1846 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1847 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1848 }
1849 }
1850 else
1851 {
1852 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1853 if (is_gimple_assign (stmt))
1854 compute_complex_assign_jump_func (fbi, info, jfunc,
1855 call, stmt, arg, param_type);
1856 else if (gimple_code (stmt) == GIMPLE_PHI)
1857 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1858 call,
1859 as_a <gphi *> (stmt));
1860 }
1861 }
1862
1863 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1864 passed (because type conversions are ignored in gimple). Usually we can
1865 safely get type from function declaration, but in case of K&R prototypes or
1866 variadic functions we can try our luck with type of the pointer passed.
1867 TODO: Since we look for actual initialization of the memory object, we may better
1868 work out the type based on the memory stores we find. */
1869 if (!param_type)
1870 param_type = TREE_TYPE (arg);
1871
1872 if ((jfunc->type != IPA_JF_PASS_THROUGH
1873 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1874 && (jfunc->type != IPA_JF_ANCESTOR
1875 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1876 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1877 || POINTER_TYPE_P (param_type)))
1878 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1879 }
1880 if (!useful_context)
1881 vec_free (args->polymorphic_call_contexts);
1882 }
1883
1884 /* Compute jump functions for all edges - both direct and indirect - outgoing
1885 from BB. */
1886
1887 static void
1888 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1889 {
1890 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1891 int i;
1892 struct cgraph_edge *cs;
1893
1894 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1895 {
1896 struct cgraph_node *callee = cs->callee;
1897
1898 if (callee)
1899 {
1900 callee->ultimate_alias_target ();
1901 /* We do not need to bother analyzing calls to unknown functions
1902 unless they may become known during lto/whopr. */
1903 if (!callee->definition && !flag_lto)
1904 continue;
1905 }
1906 ipa_compute_jump_functions_for_edge (fbi, cs);
1907 }
1908 }
1909
1910 /* If STMT looks like a statement loading a value from a member pointer formal
1911 parameter, return that parameter and store the offset of the field to
1912 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1913 might be clobbered). If USE_DELTA, then we look for a use of the delta
1914 field rather than the pfn. */
1915
1916 static tree
1917 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1918 HOST_WIDE_INT *offset_p)
1919 {
1920 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1921
1922 if (!gimple_assign_single_p (stmt))
1923 return NULL_TREE;
1924
1925 rhs = gimple_assign_rhs1 (stmt);
1926 if (TREE_CODE (rhs) == COMPONENT_REF)
1927 {
1928 ref_field = TREE_OPERAND (rhs, 1);
1929 rhs = TREE_OPERAND (rhs, 0);
1930 }
1931 else
1932 ref_field = NULL_TREE;
1933 if (TREE_CODE (rhs) != MEM_REF)
1934 return NULL_TREE;
1935 rec = TREE_OPERAND (rhs, 0);
1936 if (TREE_CODE (rec) != ADDR_EXPR)
1937 return NULL_TREE;
1938 rec = TREE_OPERAND (rec, 0);
1939 if (TREE_CODE (rec) != PARM_DECL
1940 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1941 return NULL_TREE;
1942 ref_offset = TREE_OPERAND (rhs, 1);
1943
1944 if (use_delta)
1945 fld = delta_field;
1946 else
1947 fld = ptr_field;
1948 if (offset_p)
1949 *offset_p = int_bit_position (fld);
1950
1951 if (ref_field)
1952 {
1953 if (integer_nonzerop (ref_offset))
1954 return NULL_TREE;
1955 return ref_field == fld ? rec : NULL_TREE;
1956 }
1957 else
1958 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1959 : NULL_TREE;
1960 }
1961
1962 /* Returns true iff T is an SSA_NAME defined by a statement. */
1963
1964 static bool
1965 ipa_is_ssa_with_stmt_def (tree t)
1966 {
1967 if (TREE_CODE (t) == SSA_NAME
1968 && !SSA_NAME_IS_DEFAULT_DEF (t))
1969 return true;
1970 else
1971 return false;
1972 }
1973
1974 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1975 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1976 indirect call graph edge. */
1977
1978 static struct cgraph_edge *
1979 ipa_note_param_call (struct cgraph_node *node, int param_index,
1980 gcall *stmt)
1981 {
1982 struct cgraph_edge *cs;
1983
1984 cs = node->get_edge (stmt);
1985 cs->indirect_info->param_index = param_index;
1986 cs->indirect_info->agg_contents = 0;
1987 cs->indirect_info->member_ptr = 0;
1988 cs->indirect_info->guaranteed_unmodified = 0;
1989 return cs;
1990 }
1991
1992 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1993 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1994 intermediate information about each formal parameter. Currently it checks
1995 whether the call calls a pointer that is a formal parameter and if so, the
1996 parameter is marked with the called flag and an indirect call graph edge
1997 describing the call is created. This is very simple for ordinary pointers
1998 represented in SSA but not-so-nice when it comes to member pointers. The
1999 ugly part of this function does nothing more than trying to match the
2000 pattern of such a call. An example of such a pattern is the gimple dump
2001 below, the call is on the last line:
2002
2003 <bb 2>:
2004 f$__delta_5 = f.__delta;
2005 f$__pfn_24 = f.__pfn;
2006
2007 or
2008 <bb 2>:
2009 f$__delta_5 = MEM[(struct *)&f];
2010 f$__pfn_24 = MEM[(struct *)&f + 4B];
2011
2012 and a few lines below:
2013
2014 <bb 5>
2015 D.2496_3 = (int) f$__pfn_24;
2016 D.2497_4 = D.2496_3 & 1;
2017 if (D.2497_4 != 0)
2018 goto <bb 3>;
2019 else
2020 goto <bb 4>;
2021
2022 <bb 6>:
2023 D.2500_7 = (unsigned int) f$__delta_5;
2024 D.2501_8 = &S + D.2500_7;
2025 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2026 D.2503_10 = *D.2502_9;
2027 D.2504_12 = f$__pfn_24 + -1;
2028 D.2505_13 = (unsigned int) D.2504_12;
2029 D.2506_14 = D.2503_10 + D.2505_13;
2030 D.2507_15 = *D.2506_14;
2031 iftmp.11_16 = (String:: *) D.2507_15;
2032
2033 <bb 7>:
2034 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2035 D.2500_19 = (unsigned int) f$__delta_5;
2036 D.2508_20 = &S + D.2500_19;
2037 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2038
2039 Such patterns are results of simple calls to a member pointer:
2040
2041 int doprinting (int (MyString::* f)(int) const)
2042 {
2043 MyString S ("somestring");
2044
2045 return (S.*f)(4);
2046 }
2047
2048 Moreover, the function also looks for called pointers loaded from aggregates
2049 passed by value or reference. */
2050
2051 static void
2052 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2053 tree target)
2054 {
2055 struct ipa_node_params *info = fbi->info;
2056 HOST_WIDE_INT offset;
2057 bool by_ref;
2058
2059 if (SSA_NAME_IS_DEFAULT_DEF (target))
2060 {
2061 tree var = SSA_NAME_VAR (target);
2062 int index = ipa_get_param_decl_index (info, var);
2063 if (index >= 0)
2064 ipa_note_param_call (fbi->node, index, call);
2065 return;
2066 }
2067
2068 int index;
2069 gimple *def = SSA_NAME_DEF_STMT (target);
2070 bool guaranteed_unmodified;
2071 if (gimple_assign_single_p (def)
2072 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2073 gimple_assign_rhs1 (def), &index, &offset,
2074 NULL, &by_ref, &guaranteed_unmodified))
2075 {
2076 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2077 cs->indirect_info->offset = offset;
2078 cs->indirect_info->agg_contents = 1;
2079 cs->indirect_info->by_ref = by_ref;
2080 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2081 return;
2082 }
2083
2084 /* Now we need to try to match the complex pattern of calling a member
2085 pointer. */
2086 if (gimple_code (def) != GIMPLE_PHI
2087 || gimple_phi_num_args (def) != 2
2088 || !POINTER_TYPE_P (TREE_TYPE (target))
2089 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2090 return;
2091
2092 /* First, we need to check whether one of these is a load from a member
2093 pointer that is a parameter to this function. */
2094 tree n1 = PHI_ARG_DEF (def, 0);
2095 tree n2 = PHI_ARG_DEF (def, 1);
2096 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2097 return;
2098 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2099 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2100
2101 tree rec;
2102 basic_block bb, virt_bb;
2103 basic_block join = gimple_bb (def);
2104 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2105 {
2106 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2107 return;
2108
2109 bb = EDGE_PRED (join, 0)->src;
2110 virt_bb = gimple_bb (d2);
2111 }
2112 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2113 {
2114 bb = EDGE_PRED (join, 1)->src;
2115 virt_bb = gimple_bb (d1);
2116 }
2117 else
2118 return;
2119
2120 /* Second, we need to check that the basic blocks are laid out in the way
2121 corresponding to the pattern. */
2122
2123 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2124 || single_pred (virt_bb) != bb
2125 || single_succ (virt_bb) != join)
2126 return;
2127
2128 /* Third, let's see that the branching is done depending on the least
2129 significant bit of the pfn. */
2130
2131 gimple *branch = last_stmt (bb);
2132 if (!branch || gimple_code (branch) != GIMPLE_COND)
2133 return;
2134
2135 if ((gimple_cond_code (branch) != NE_EXPR
2136 && gimple_cond_code (branch) != EQ_EXPR)
2137 || !integer_zerop (gimple_cond_rhs (branch)))
2138 return;
2139
2140 tree cond = gimple_cond_lhs (branch);
2141 if (!ipa_is_ssa_with_stmt_def (cond))
2142 return;
2143
2144 def = SSA_NAME_DEF_STMT (cond);
2145 if (!is_gimple_assign (def)
2146 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2147 || !integer_onep (gimple_assign_rhs2 (def)))
2148 return;
2149
2150 cond = gimple_assign_rhs1 (def);
2151 if (!ipa_is_ssa_with_stmt_def (cond))
2152 return;
2153
2154 def = SSA_NAME_DEF_STMT (cond);
2155
2156 if (is_gimple_assign (def)
2157 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2158 {
2159 cond = gimple_assign_rhs1 (def);
2160 if (!ipa_is_ssa_with_stmt_def (cond))
2161 return;
2162 def = SSA_NAME_DEF_STMT (cond);
2163 }
2164
2165 tree rec2;
2166 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2167 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2168 == ptrmemfunc_vbit_in_delta),
2169 NULL);
2170 if (rec != rec2)
2171 return;
2172
2173 index = ipa_get_param_decl_index (info, rec);
2174 if (index >= 0
2175 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2176 {
2177 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2178 cs->indirect_info->offset = offset;
2179 cs->indirect_info->agg_contents = 1;
2180 cs->indirect_info->member_ptr = 1;
2181 cs->indirect_info->guaranteed_unmodified = 1;
2182 }
2183
2184 return;
2185 }
2186
2187 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2188 object referenced in the expression is a formal parameter of the caller
2189 FBI->node (described by FBI->info), create a call note for the
2190 statement. */
2191
2192 static void
2193 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2194 gcall *call, tree target)
2195 {
2196 tree obj = OBJ_TYPE_REF_OBJECT (target);
2197 int index;
2198 HOST_WIDE_INT anc_offset;
2199
2200 if (!flag_devirtualize)
2201 return;
2202
2203 if (TREE_CODE (obj) != SSA_NAME)
2204 return;
2205
2206 struct ipa_node_params *info = fbi->info;
2207 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2208 {
2209 struct ipa_jump_func jfunc;
2210 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2211 return;
2212
2213 anc_offset = 0;
2214 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2215 gcc_assert (index >= 0);
2216 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2217 call, &jfunc))
2218 return;
2219 }
2220 else
2221 {
2222 struct ipa_jump_func jfunc;
2223 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2224 tree expr;
2225
2226 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2227 if (!expr)
2228 return;
2229 index = ipa_get_param_decl_index (info,
2230 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2231 gcc_assert (index >= 0);
2232 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2233 call, &jfunc, anc_offset))
2234 return;
2235 }
2236
2237 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2238 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2239 ii->offset = anc_offset;
2240 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2241 ii->otr_type = obj_type_ref_class (target);
2242 ii->polymorphic = 1;
2243 }
2244
2245 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2246 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2247 containing intermediate information about each formal parameter. */
2248
2249 static void
2250 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2251 {
2252 tree target = gimple_call_fn (call);
2253
2254 if (!target
2255 || (TREE_CODE (target) != SSA_NAME
2256 && !virtual_method_call_p (target)))
2257 return;
2258
2259 struct cgraph_edge *cs = fbi->node->get_edge (call);
2260 /* If we previously turned the call into a direct call, there is
2261 no need to analyze. */
2262 if (cs && !cs->indirect_unknown_callee)
2263 return;
2264
2265 if (cs->indirect_info->polymorphic && flag_devirtualize)
2266 {
2267 tree instance;
2268 tree target = gimple_call_fn (call);
2269 ipa_polymorphic_call_context context (current_function_decl,
2270 target, call, &instance);
2271
2272 gcc_checking_assert (cs->indirect_info->otr_type
2273 == obj_type_ref_class (target));
2274 gcc_checking_assert (cs->indirect_info->otr_token
2275 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2276
2277 cs->indirect_info->vptr_changed
2278 = !context.get_dynamic_type (instance,
2279 OBJ_TYPE_REF_OBJECT (target),
2280 obj_type_ref_class (target), call);
2281 cs->indirect_info->context = context;
2282 }
2283
2284 if (TREE_CODE (target) == SSA_NAME)
2285 ipa_analyze_indirect_call_uses (fbi, call, target);
2286 else if (virtual_method_call_p (target))
2287 ipa_analyze_virtual_call_uses (fbi, call, target);
2288 }
2289
2290
2291 /* Analyze the call statement STMT with respect to formal parameters (described
2292 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2293 formal parameters are called. */
2294
2295 static void
2296 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2297 {
2298 if (is_gimple_call (stmt))
2299 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2300 }
2301
2302 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2303 If OP is a parameter declaration, mark it as used in the info structure
2304 passed in DATA. */
2305
2306 static bool
2307 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2308 {
2309 struct ipa_node_params *info = (struct ipa_node_params *) data;
2310
2311 op = get_base_address (op);
2312 if (op
2313 && TREE_CODE (op) == PARM_DECL)
2314 {
2315 int index = ipa_get_param_decl_index (info, op);
2316 gcc_assert (index >= 0);
2317 ipa_set_param_used (info, index, true);
2318 }
2319
2320 return false;
2321 }
2322
2323 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2324 the findings in various structures of the associated ipa_node_params
2325 structure, such as parameter flags, notes etc. FBI holds various data about
2326 the function being analyzed. */
2327
2328 static void
2329 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2330 {
2331 gimple_stmt_iterator gsi;
2332 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2333 {
2334 gimple *stmt = gsi_stmt (gsi);
2335
2336 if (is_gimple_debug (stmt))
2337 continue;
2338
2339 ipa_analyze_stmt_uses (fbi, stmt);
2340 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2341 visit_ref_for_mod_analysis,
2342 visit_ref_for_mod_analysis,
2343 visit_ref_for_mod_analysis);
2344 }
2345 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2346 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2347 visit_ref_for_mod_analysis,
2348 visit_ref_for_mod_analysis,
2349 visit_ref_for_mod_analysis);
2350 }
2351
2352 /* Calculate controlled uses of parameters of NODE. */
2353
2354 static void
2355 ipa_analyze_controlled_uses (struct cgraph_node *node)
2356 {
2357 struct ipa_node_params *info = IPA_NODE_REF (node);
2358
2359 for (int i = 0; i < ipa_get_param_count (info); i++)
2360 {
2361 tree parm = ipa_get_param (info, i);
2362 int controlled_uses = 0;
2363
2364 /* For SSA regs see if parameter is used. For non-SSA we compute
2365 the flag during modification analysis. */
2366 if (is_gimple_reg (parm))
2367 {
2368 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2369 parm);
2370 if (ddef && !has_zero_uses (ddef))
2371 {
2372 imm_use_iterator imm_iter;
2373 use_operand_p use_p;
2374
2375 ipa_set_param_used (info, i, true);
2376 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2377 if (!is_gimple_call (USE_STMT (use_p)))
2378 {
2379 if (!is_gimple_debug (USE_STMT (use_p)))
2380 {
2381 controlled_uses = IPA_UNDESCRIBED_USE;
2382 break;
2383 }
2384 }
2385 else
2386 controlled_uses++;
2387 }
2388 else
2389 controlled_uses = 0;
2390 }
2391 else
2392 controlled_uses = IPA_UNDESCRIBED_USE;
2393 ipa_set_controlled_uses (info, i, controlled_uses);
2394 }
2395 }
2396
2397 /* Free stuff in BI. */
2398
2399 static void
2400 free_ipa_bb_info (struct ipa_bb_info *bi)
2401 {
2402 bi->cg_edges.release ();
2403 bi->param_aa_statuses.release ();
2404 }
2405
2406 /* Dominator walker driving the analysis. */
2407
2408 class analysis_dom_walker : public dom_walker
2409 {
2410 public:
2411 analysis_dom_walker (struct ipa_func_body_info *fbi)
2412 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2413
2414 virtual edge before_dom_children (basic_block);
2415
2416 private:
2417 struct ipa_func_body_info *m_fbi;
2418 };
2419
2420 edge
2421 analysis_dom_walker::before_dom_children (basic_block bb)
2422 {
2423 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2424 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2425 return NULL;
2426 }
2427
2428 /* Release body info FBI. */
2429
2430 void
2431 ipa_release_body_info (struct ipa_func_body_info *fbi)
2432 {
2433 int i;
2434 struct ipa_bb_info *bi;
2435
2436 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2437 free_ipa_bb_info (bi);
2438 fbi->bb_infos.release ();
2439 }
2440
2441 /* Initialize the array describing properties of formal parameters
2442 of NODE, analyze their uses and compute jump functions associated
2443 with actual arguments of calls from within NODE. */
2444
2445 void
2446 ipa_analyze_node (struct cgraph_node *node)
2447 {
2448 struct ipa_func_body_info fbi;
2449 struct ipa_node_params *info;
2450
2451 ipa_check_create_node_params ();
2452 ipa_check_create_edge_args ();
2453 info = IPA_NODE_REF (node);
2454
2455 if (info->analysis_done)
2456 return;
2457 info->analysis_done = 1;
2458
2459 if (ipa_func_spec_opts_forbid_analysis_p (node))
2460 {
2461 for (int i = 0; i < ipa_get_param_count (info); i++)
2462 {
2463 ipa_set_param_used (info, i, true);
2464 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2465 }
2466 return;
2467 }
2468
2469 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2470 push_cfun (func);
2471 calculate_dominance_info (CDI_DOMINATORS);
2472 ipa_initialize_node_params (node);
2473 ipa_analyze_controlled_uses (node);
2474
2475 fbi.node = node;
2476 fbi.info = IPA_NODE_REF (node);
2477 fbi.bb_infos = vNULL;
2478 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2479 fbi.param_count = ipa_get_param_count (info);
2480 fbi.aa_walked = 0;
2481
2482 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2483 {
2484 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2485 bi->cg_edges.safe_push (cs);
2486 }
2487
2488 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2489 {
2490 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2491 bi->cg_edges.safe_push (cs);
2492 }
2493
2494 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2495
2496 ipa_release_body_info (&fbi);
2497 free_dominance_info (CDI_DOMINATORS);
2498 pop_cfun ();
2499 }
2500
2501 /* Update the jump functions associated with call graph edge E when the call
2502 graph edge CS is being inlined, assuming that E->caller is already (possibly
2503 indirectly) inlined into CS->callee and that E has not been inlined. */
2504
2505 static void
2506 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2507 struct cgraph_edge *e)
2508 {
2509 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2510 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2511 int count = ipa_get_cs_argument_count (args);
2512 int i;
2513
2514 for (i = 0; i < count; i++)
2515 {
2516 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2517 struct ipa_polymorphic_call_context *dst_ctx
2518 = ipa_get_ith_polymorhic_call_context (args, i);
2519
2520 if (dst->type == IPA_JF_ANCESTOR)
2521 {
2522 struct ipa_jump_func *src;
2523 int dst_fid = dst->value.ancestor.formal_id;
2524 struct ipa_polymorphic_call_context *src_ctx
2525 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2526
2527 /* Variable number of arguments can cause havoc if we try to access
2528 one that does not exist in the inlined edge. So make sure we
2529 don't. */
2530 if (dst_fid >= ipa_get_cs_argument_count (top))
2531 {
2532 ipa_set_jf_unknown (dst);
2533 continue;
2534 }
2535
2536 src = ipa_get_ith_jump_func (top, dst_fid);
2537
2538 if (src_ctx && !src_ctx->useless_p ())
2539 {
2540 struct ipa_polymorphic_call_context ctx = *src_ctx;
2541
2542 /* TODO: Make type preserved safe WRT contexts. */
2543 if (!ipa_get_jf_ancestor_type_preserved (dst))
2544 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2545 ctx.offset_by (dst->value.ancestor.offset);
2546 if (!ctx.useless_p ())
2547 {
2548 if (!dst_ctx)
2549 {
2550 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2551 count);
2552 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2553 }
2554
2555 dst_ctx->combine_with (ctx);
2556 }
2557 }
2558
2559 if (src->agg.items
2560 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2561 {
2562 struct ipa_agg_jf_item *item;
2563 int j;
2564
2565 /* Currently we do not produce clobber aggregate jump functions,
2566 replace with merging when we do. */
2567 gcc_assert (!dst->agg.items);
2568
2569 dst->agg.items = vec_safe_copy (src->agg.items);
2570 dst->agg.by_ref = src->agg.by_ref;
2571 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2572 item->offset -= dst->value.ancestor.offset;
2573 }
2574
2575 if (src->type == IPA_JF_PASS_THROUGH
2576 && src->value.pass_through.operation == NOP_EXPR)
2577 {
2578 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2579 dst->value.ancestor.agg_preserved &=
2580 src->value.pass_through.agg_preserved;
2581 }
2582 else if (src->type == IPA_JF_PASS_THROUGH
2583 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2584 {
2585 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2586 dst->value.ancestor.agg_preserved = false;
2587 }
2588 else if (src->type == IPA_JF_ANCESTOR)
2589 {
2590 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2591 dst->value.ancestor.offset += src->value.ancestor.offset;
2592 dst->value.ancestor.agg_preserved &=
2593 src->value.ancestor.agg_preserved;
2594 }
2595 else
2596 ipa_set_jf_unknown (dst);
2597 }
2598 else if (dst->type == IPA_JF_PASS_THROUGH)
2599 {
2600 struct ipa_jump_func *src;
2601 /* We must check range due to calls with variable number of arguments
2602 and we cannot combine jump functions with operations. */
2603 if (dst->value.pass_through.operation == NOP_EXPR
2604 && (dst->value.pass_through.formal_id
2605 < ipa_get_cs_argument_count (top)))
2606 {
2607 int dst_fid = dst->value.pass_through.formal_id;
2608 src = ipa_get_ith_jump_func (top, dst_fid);
2609 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2610 struct ipa_polymorphic_call_context *src_ctx
2611 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2612
2613 if (src_ctx && !src_ctx->useless_p ())
2614 {
2615 struct ipa_polymorphic_call_context ctx = *src_ctx;
2616
2617 /* TODO: Make type preserved safe WRT contexts. */
2618 if (!ipa_get_jf_pass_through_type_preserved (dst))
2619 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2620 if (!ctx.useless_p ())
2621 {
2622 if (!dst_ctx)
2623 {
2624 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2625 count);
2626 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2627 }
2628 dst_ctx->combine_with (ctx);
2629 }
2630 }
2631 switch (src->type)
2632 {
2633 case IPA_JF_UNKNOWN:
2634 ipa_set_jf_unknown (dst);
2635 break;
2636 case IPA_JF_CONST:
2637 ipa_set_jf_cst_copy (dst, src);
2638 break;
2639
2640 case IPA_JF_PASS_THROUGH:
2641 {
2642 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2643 enum tree_code operation;
2644 operation = ipa_get_jf_pass_through_operation (src);
2645
2646 if (operation == NOP_EXPR)
2647 {
2648 bool agg_p;
2649 agg_p = dst_agg_p
2650 && ipa_get_jf_pass_through_agg_preserved (src);
2651 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2652 }
2653 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2654 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2655 else
2656 {
2657 tree operand = ipa_get_jf_pass_through_operand (src);
2658 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2659 operation);
2660 }
2661 break;
2662 }
2663 case IPA_JF_ANCESTOR:
2664 {
2665 bool agg_p;
2666 agg_p = dst_agg_p
2667 && ipa_get_jf_ancestor_agg_preserved (src);
2668 ipa_set_ancestor_jf (dst,
2669 ipa_get_jf_ancestor_offset (src),
2670 ipa_get_jf_ancestor_formal_id (src),
2671 agg_p);
2672 break;
2673 }
2674 default:
2675 gcc_unreachable ();
2676 }
2677
2678 if (src->agg.items
2679 && (dst_agg_p || !src->agg.by_ref))
2680 {
2681 /* Currently we do not produce clobber aggregate jump
2682 functions, replace with merging when we do. */
2683 gcc_assert (!dst->agg.items);
2684
2685 dst->agg.by_ref = src->agg.by_ref;
2686 dst->agg.items = vec_safe_copy (src->agg.items);
2687 }
2688 }
2689 else
2690 ipa_set_jf_unknown (dst);
2691 }
2692 }
2693 }
2694
2695 /* If TARGET is an addr_expr of a function declaration, make it the
2696 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2697 Otherwise, return NULL. */
2698
2699 struct cgraph_edge *
2700 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2701 bool speculative)
2702 {
2703 struct cgraph_node *callee;
2704 struct inline_edge_summary *es = inline_edge_summary (ie);
2705 bool unreachable = false;
2706
2707 if (TREE_CODE (target) == ADDR_EXPR)
2708 target = TREE_OPERAND (target, 0);
2709 if (TREE_CODE (target) != FUNCTION_DECL)
2710 {
2711 target = canonicalize_constructor_val (target, NULL);
2712 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2713 {
2714 /* Member pointer call that goes through a VMT lookup. */
2715 if (ie->indirect_info->member_ptr
2716 /* Or if target is not an invariant expression and we do not
2717 know if it will evaulate to function at runtime.
2718 This can happen when folding through &VAR, where &VAR
2719 is IP invariant, but VAR itself is not.
2720
2721 TODO: Revisit this when GCC 5 is branched. It seems that
2722 member_ptr check is not needed and that we may try to fold
2723 the expression and see if VAR is readonly. */
2724 || !is_gimple_ip_invariant (target))
2725 {
2726 if (dump_enabled_p ())
2727 {
2728 location_t loc = gimple_location_safe (ie->call_stmt);
2729 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2730 "discovered direct call non-invariant "
2731 "%s/%i\n",
2732 ie->caller->name (), ie->caller->order);
2733 }
2734 return NULL;
2735 }
2736
2737
2738 if (dump_enabled_p ())
2739 {
2740 location_t loc = gimple_location_safe (ie->call_stmt);
2741 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2742 "discovered direct call to non-function in %s/%i, "
2743 "making it __builtin_unreachable\n",
2744 ie->caller->name (), ie->caller->order);
2745 }
2746
2747 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2748 callee = cgraph_node::get_create (target);
2749 unreachable = true;
2750 }
2751 else
2752 callee = cgraph_node::get (target);
2753 }
2754 else
2755 callee = cgraph_node::get (target);
2756
2757 /* Because may-edges are not explicitely represented and vtable may be external,
2758 we may create the first reference to the object in the unit. */
2759 if (!callee || callee->global.inlined_to)
2760 {
2761
2762 /* We are better to ensure we can refer to it.
2763 In the case of static functions we are out of luck, since we already
2764 removed its body. In the case of public functions we may or may
2765 not introduce the reference. */
2766 if (!canonicalize_constructor_val (target, NULL)
2767 || !TREE_PUBLIC (target))
2768 {
2769 if (dump_file)
2770 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2771 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2772 xstrdup_for_dump (ie->caller->name ()),
2773 ie->caller->order,
2774 xstrdup_for_dump (ie->callee->name ()),
2775 ie->callee->order);
2776 return NULL;
2777 }
2778 callee = cgraph_node::get_create (target);
2779 }
2780
2781 /* If the edge is already speculated. */
2782 if (speculative && ie->speculative)
2783 {
2784 struct cgraph_edge *e2;
2785 struct ipa_ref *ref;
2786 ie->speculative_call_info (e2, ie, ref);
2787 if (e2->callee->ultimate_alias_target ()
2788 != callee->ultimate_alias_target ())
2789 {
2790 if (dump_file)
2791 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2792 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2793 xstrdup_for_dump (ie->caller->name ()),
2794 ie->caller->order,
2795 xstrdup_for_dump (callee->name ()),
2796 callee->order,
2797 xstrdup_for_dump (e2->callee->name ()),
2798 e2->callee->order);
2799 }
2800 else
2801 {
2802 if (dump_file)
2803 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2804 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2805 xstrdup_for_dump (ie->caller->name ()),
2806 ie->caller->order,
2807 xstrdup_for_dump (callee->name ()),
2808 callee->order);
2809 }
2810 return NULL;
2811 }
2812
2813 if (!dbg_cnt (devirt))
2814 return NULL;
2815
2816 ipa_check_create_node_params ();
2817
2818 /* We can not make edges to inline clones. It is bug that someone removed
2819 the cgraph node too early. */
2820 gcc_assert (!callee->global.inlined_to);
2821
2822 if (dump_file && !unreachable)
2823 {
2824 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2825 "(%s/%i -> %s/%i), for stmt ",
2826 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2827 speculative ? "speculative" : "known",
2828 xstrdup_for_dump (ie->caller->name ()),
2829 ie->caller->order,
2830 xstrdup_for_dump (callee->name ()),
2831 callee->order);
2832 if (ie->call_stmt)
2833 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2834 else
2835 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2836 }
2837 if (dump_enabled_p ())
2838 {
2839 location_t loc = gimple_location_safe (ie->call_stmt);
2840
2841 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2842 "converting indirect call in %s to direct call to %s\n",
2843 ie->caller->name (), callee->name ());
2844 }
2845 if (!speculative)
2846 {
2847 struct cgraph_edge *orig = ie;
2848 ie = ie->make_direct (callee);
2849 /* If we resolved speculative edge the cost is already up to date
2850 for direct call (adjusted by inline_edge_duplication_hook). */
2851 if (ie == orig)
2852 {
2853 es = inline_edge_summary (ie);
2854 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2855 - eni_size_weights.call_cost);
2856 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2857 - eni_time_weights.call_cost);
2858 }
2859 }
2860 else
2861 {
2862 if (!callee->can_be_discarded_p ())
2863 {
2864 cgraph_node *alias;
2865 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2866 if (alias)
2867 callee = alias;
2868 }
2869 /* make_speculative will update ie's cost to direct call cost. */
2870 ie = ie->make_speculative
2871 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2872 }
2873
2874 return ie;
2875 }
2876
2877 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2878 CONSTRUCTOR and return it. Return NULL if the search fails for some
2879 reason. */
2880
2881 static tree
2882 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2883 {
2884 tree type = TREE_TYPE (constructor);
2885 if (TREE_CODE (type) != ARRAY_TYPE
2886 && TREE_CODE (type) != RECORD_TYPE)
2887 return NULL;
2888
2889 unsigned ix;
2890 tree index, val;
2891 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2892 {
2893 HOST_WIDE_INT elt_offset;
2894 if (TREE_CODE (type) == ARRAY_TYPE)
2895 {
2896 offset_int off;
2897 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2898 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2899
2900 if (index)
2901 {
2902 off = wi::to_offset (index);
2903 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2904 {
2905 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2906 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2907 off = wi::sext (off - wi::to_offset (low_bound),
2908 TYPE_PRECISION (TREE_TYPE (index)));
2909 }
2910 off *= wi::to_offset (unit_size);
2911 }
2912 else
2913 off = wi::to_offset (unit_size) * ix;
2914
2915 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2916 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2917 continue;
2918 elt_offset = off.to_shwi ();
2919 }
2920 else if (TREE_CODE (type) == RECORD_TYPE)
2921 {
2922 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2923 if (DECL_BIT_FIELD (index))
2924 continue;
2925 elt_offset = int_bit_position (index);
2926 }
2927 else
2928 gcc_unreachable ();
2929
2930 if (elt_offset > req_offset)
2931 return NULL;
2932
2933 if (TREE_CODE (val) == CONSTRUCTOR)
2934 return find_constructor_constant_at_offset (val,
2935 req_offset - elt_offset);
2936
2937 if (elt_offset == req_offset
2938 && is_gimple_reg_type (TREE_TYPE (val))
2939 && is_gimple_ip_invariant (val))
2940 return val;
2941 }
2942 return NULL;
2943 }
2944
2945 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2946 invariant from a static constructor and if so, return it. Otherwise return
2947 NULL. */
2948
2949 static tree
2950 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2951 {
2952 if (by_ref)
2953 {
2954 if (TREE_CODE (scalar) != ADDR_EXPR)
2955 return NULL;
2956 scalar = TREE_OPERAND (scalar, 0);
2957 }
2958
2959 if (!VAR_P (scalar)
2960 || !is_global_var (scalar)
2961 || !TREE_READONLY (scalar)
2962 || !DECL_INITIAL (scalar)
2963 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2964 return NULL;
2965
2966 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2967 }
2968
2969 /* Retrieve value from aggregate jump function AGG or static initializer of
2970 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2971 none. BY_REF specifies whether the value has to be passed by reference or
2972 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2973 to is set to true if the value comes from an initializer of a constant. */
2974
2975 tree
2976 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2977 HOST_WIDE_INT offset, bool by_ref,
2978 bool *from_global_constant)
2979 {
2980 struct ipa_agg_jf_item *item;
2981 int i;
2982
2983 if (scalar)
2984 {
2985 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2986 if (res)
2987 {
2988 if (from_global_constant)
2989 *from_global_constant = true;
2990 return res;
2991 }
2992 }
2993
2994 if (!agg
2995 || by_ref != agg->by_ref)
2996 return NULL;
2997
2998 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2999 if (item->offset == offset)
3000 {
3001 /* Currently we do not have clobber values, return NULL for them once
3002 we do. */
3003 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3004 if (from_global_constant)
3005 *from_global_constant = false;
3006 return item->value;
3007 }
3008 return NULL;
3009 }
3010
3011 /* Remove a reference to SYMBOL from the list of references of a node given by
3012 reference description RDESC. Return true if the reference has been
3013 successfully found and removed. */
3014
3015 static bool
3016 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3017 {
3018 struct ipa_ref *to_del;
3019 struct cgraph_edge *origin;
3020
3021 origin = rdesc->cs;
3022 if (!origin)
3023 return false;
3024 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3025 origin->lto_stmt_uid);
3026 if (!to_del)
3027 return false;
3028
3029 to_del->remove_reference ();
3030 if (dump_file)
3031 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3032 xstrdup_for_dump (origin->caller->name ()),
3033 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3034 return true;
3035 }
3036
3037 /* If JFUNC has a reference description with refcount different from
3038 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3039 NULL. JFUNC must be a constant jump function. */
3040
3041 static struct ipa_cst_ref_desc *
3042 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3043 {
3044 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3045 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3046 return rdesc;
3047 else
3048 return NULL;
3049 }
3050
3051 /* If the value of constant jump function JFUNC is an address of a function
3052 declaration, return the associated call graph node. Otherwise return
3053 NULL. */
3054
3055 static cgraph_node *
3056 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3057 {
3058 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3059 tree cst = ipa_get_jf_constant (jfunc);
3060 if (TREE_CODE (cst) != ADDR_EXPR
3061 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3062 return NULL;
3063
3064 return cgraph_node::get (TREE_OPERAND (cst, 0));
3065 }
3066
3067
3068 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3069 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3070 the edge specified in the rdesc. Return false if either the symbol or the
3071 reference could not be found, otherwise return true. */
3072
3073 static bool
3074 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3075 {
3076 struct ipa_cst_ref_desc *rdesc;
3077 if (jfunc->type == IPA_JF_CONST
3078 && (rdesc = jfunc_rdesc_usable (jfunc))
3079 && --rdesc->refcount == 0)
3080 {
3081 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3082 if (!symbol)
3083 return false;
3084
3085 return remove_described_reference (symbol, rdesc);
3086 }
3087 return true;
3088 }
3089
3090 /* Try to find a destination for indirect edge IE that corresponds to a simple
3091 call or a call of a member function pointer and where the destination is a
3092 pointer formal parameter described by jump function JFUNC. If it can be
3093 determined, return the newly direct edge, otherwise return NULL.
3094 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3095
3096 static struct cgraph_edge *
3097 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3098 struct ipa_jump_func *jfunc,
3099 struct ipa_node_params *new_root_info)
3100 {
3101 struct cgraph_edge *cs;
3102 tree target;
3103 bool agg_contents = ie->indirect_info->agg_contents;
3104 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3105 if (agg_contents)
3106 {
3107 bool from_global_constant;
3108 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3109 ie->indirect_info->offset,
3110 ie->indirect_info->by_ref,
3111 &from_global_constant);
3112 if (target
3113 && !from_global_constant
3114 && !ie->indirect_info->guaranteed_unmodified)
3115 return NULL;
3116 }
3117 else
3118 target = scalar;
3119 if (!target)
3120 return NULL;
3121 cs = ipa_make_edge_direct_to_target (ie, target);
3122
3123 if (cs && !agg_contents)
3124 {
3125 bool ok;
3126 gcc_checking_assert (cs->callee
3127 && (cs != ie
3128 || jfunc->type != IPA_JF_CONST
3129 || !cgraph_node_for_jfunc (jfunc)
3130 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3131 ok = try_decrement_rdesc_refcount (jfunc);
3132 gcc_checking_assert (ok);
3133 }
3134
3135 return cs;
3136 }
3137
3138 /* Return the target to be used in cases of impossible devirtualization. IE
3139 and target (the latter can be NULL) are dumped when dumping is enabled. */
3140
3141 tree
3142 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3143 {
3144 if (dump_file)
3145 {
3146 if (target)
3147 fprintf (dump_file,
3148 "Type inconsistent devirtualization: %s/%i->%s\n",
3149 ie->caller->name (), ie->caller->order,
3150 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3151 else
3152 fprintf (dump_file,
3153 "No devirtualization target in %s/%i\n",
3154 ie->caller->name (), ie->caller->order);
3155 }
3156 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3157 cgraph_node::get_create (new_target);
3158 return new_target;
3159 }
3160
3161 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3162 call based on a formal parameter which is described by jump function JFUNC
3163 and if it can be determined, make it direct and return the direct edge.
3164 Otherwise, return NULL. CTX describes the polymorphic context that the
3165 parameter the call is based on brings along with it. */
3166
3167 static struct cgraph_edge *
3168 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3169 struct ipa_jump_func *jfunc,
3170 struct ipa_polymorphic_call_context ctx)
3171 {
3172 tree target = NULL;
3173 bool speculative = false;
3174
3175 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3176 return NULL;
3177
3178 gcc_assert (!ie->indirect_info->by_ref);
3179
3180 /* Try to do lookup via known virtual table pointer value. */
3181 if (!ie->indirect_info->vptr_changed
3182 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3183 {
3184 tree vtable;
3185 unsigned HOST_WIDE_INT offset;
3186 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3187 : NULL;
3188 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3189 ie->indirect_info->offset,
3190 true);
3191 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3192 {
3193 bool can_refer;
3194 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3195 vtable, offset, &can_refer);
3196 if (can_refer)
3197 {
3198 if (!t
3199 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3200 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3201 || !possible_polymorphic_call_target_p
3202 (ie, cgraph_node::get (t)))
3203 {
3204 /* Do not speculate builtin_unreachable, it is stupid! */
3205 if (!ie->indirect_info->vptr_changed)
3206 target = ipa_impossible_devirt_target (ie, target);
3207 else
3208 target = NULL;
3209 }
3210 else
3211 {
3212 target = t;
3213 speculative = ie->indirect_info->vptr_changed;
3214 }
3215 }
3216 }
3217 }
3218
3219 ipa_polymorphic_call_context ie_context (ie);
3220 vec <cgraph_node *>targets;
3221 bool final;
3222
3223 ctx.offset_by (ie->indirect_info->offset);
3224 if (ie->indirect_info->vptr_changed)
3225 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3226 ie->indirect_info->otr_type);
3227 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3228 targets = possible_polymorphic_call_targets
3229 (ie->indirect_info->otr_type,
3230 ie->indirect_info->otr_token,
3231 ctx, &final);
3232 if (final && targets.length () <= 1)
3233 {
3234 speculative = false;
3235 if (targets.length () == 1)
3236 target = targets[0]->decl;
3237 else
3238 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3239 }
3240 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3241 && !ie->speculative && ie->maybe_hot_p ())
3242 {
3243 cgraph_node *n;
3244 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3245 ie->indirect_info->otr_token,
3246 ie->indirect_info->context);
3247 if (n)
3248 {
3249 target = n->decl;
3250 speculative = true;
3251 }
3252 }
3253
3254 if (target)
3255 {
3256 if (!possible_polymorphic_call_target_p
3257 (ie, cgraph_node::get_create (target)))
3258 {
3259 if (speculative)
3260 return NULL;
3261 target = ipa_impossible_devirt_target (ie, target);
3262 }
3263 return ipa_make_edge_direct_to_target (ie, target, speculative);
3264 }
3265 else
3266 return NULL;
3267 }
3268
3269 /* Update the param called notes associated with NODE when CS is being inlined,
3270 assuming NODE is (potentially indirectly) inlined into CS->callee.
3271 Moreover, if the callee is discovered to be constant, create a new cgraph
3272 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3273 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3274
3275 static bool
3276 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3277 struct cgraph_node *node,
3278 vec<cgraph_edge *> *new_edges)
3279 {
3280 struct ipa_edge_args *top;
3281 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3282 struct ipa_node_params *new_root_info;
3283 bool res = false;
3284
3285 ipa_check_create_edge_args ();
3286 top = IPA_EDGE_REF (cs);
3287 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3288 ? cs->caller->global.inlined_to
3289 : cs->caller);
3290
3291 for (ie = node->indirect_calls; ie; ie = next_ie)
3292 {
3293 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3294 struct ipa_jump_func *jfunc;
3295 int param_index;
3296 cgraph_node *spec_target = NULL;
3297
3298 next_ie = ie->next_callee;
3299
3300 if (ici->param_index == -1)
3301 continue;
3302
3303 /* We must check range due to calls with variable number of arguments: */
3304 if (ici->param_index >= ipa_get_cs_argument_count (top))
3305 {
3306 ici->param_index = -1;
3307 continue;
3308 }
3309
3310 param_index = ici->param_index;
3311 jfunc = ipa_get_ith_jump_func (top, param_index);
3312
3313 if (ie->speculative)
3314 {
3315 struct cgraph_edge *de;
3316 struct ipa_ref *ref;
3317 ie->speculative_call_info (de, ie, ref);
3318 spec_target = de->callee;
3319 }
3320
3321 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3322 new_direct_edge = NULL;
3323 else if (ici->polymorphic)
3324 {
3325 ipa_polymorphic_call_context ctx;
3326 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3327 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3328 }
3329 else
3330 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3331 new_root_info);
3332 /* If speculation was removed, then we need to do nothing. */
3333 if (new_direct_edge && new_direct_edge != ie
3334 && new_direct_edge->callee == spec_target)
3335 {
3336 new_direct_edge->indirect_inlining_edge = 1;
3337 top = IPA_EDGE_REF (cs);
3338 res = true;
3339 if (!new_direct_edge->speculative)
3340 continue;
3341 }
3342 else if (new_direct_edge)
3343 {
3344 new_direct_edge->indirect_inlining_edge = 1;
3345 if (new_direct_edge->call_stmt)
3346 new_direct_edge->call_stmt_cannot_inline_p
3347 = !gimple_check_call_matching_types (
3348 new_direct_edge->call_stmt,
3349 new_direct_edge->callee->decl, false);
3350 if (new_edges)
3351 {
3352 new_edges->safe_push (new_direct_edge);
3353 res = true;
3354 }
3355 top = IPA_EDGE_REF (cs);
3356 /* If speculative edge was introduced we still need to update
3357 call info of the indirect edge. */
3358 if (!new_direct_edge->speculative)
3359 continue;
3360 }
3361 if (jfunc->type == IPA_JF_PASS_THROUGH
3362 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3363 {
3364 if (ici->agg_contents
3365 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3366 && !ici->polymorphic)
3367 ici->param_index = -1;
3368 else
3369 {
3370 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3371 if (ici->polymorphic
3372 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3373 ici->vptr_changed = true;
3374 }
3375 }
3376 else if (jfunc->type == IPA_JF_ANCESTOR)
3377 {
3378 if (ici->agg_contents
3379 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3380 && !ici->polymorphic)
3381 ici->param_index = -1;
3382 else
3383 {
3384 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3385 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3386 if (ici->polymorphic
3387 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3388 ici->vptr_changed = true;
3389 }
3390 }
3391 else
3392 /* Either we can find a destination for this edge now or never. */
3393 ici->param_index = -1;
3394 }
3395
3396 return res;
3397 }
3398
3399 /* Recursively traverse subtree of NODE (including node) made of inlined
3400 cgraph_edges when CS has been inlined and invoke
3401 update_indirect_edges_after_inlining on all nodes and
3402 update_jump_functions_after_inlining on all non-inlined edges that lead out
3403 of this subtree. Newly discovered indirect edges will be added to
3404 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3405 created. */
3406
3407 static bool
3408 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3409 struct cgraph_node *node,
3410 vec<cgraph_edge *> *new_edges)
3411 {
3412 struct cgraph_edge *e;
3413 bool res;
3414
3415 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3416
3417 for (e = node->callees; e; e = e->next_callee)
3418 if (!e->inline_failed)
3419 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3420 else
3421 update_jump_functions_after_inlining (cs, e);
3422 for (e = node->indirect_calls; e; e = e->next_callee)
3423 update_jump_functions_after_inlining (cs, e);
3424
3425 return res;
3426 }
3427
3428 /* Combine two controlled uses counts as done during inlining. */
3429
3430 static int
3431 combine_controlled_uses_counters (int c, int d)
3432 {
3433 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3434 return IPA_UNDESCRIBED_USE;
3435 else
3436 return c + d - 1;
3437 }
3438
3439 /* Propagate number of controlled users from CS->caleee to the new root of the
3440 tree of inlined nodes. */
3441
3442 static void
3443 propagate_controlled_uses (struct cgraph_edge *cs)
3444 {
3445 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3446 struct cgraph_node *new_root = cs->caller->global.inlined_to
3447 ? cs->caller->global.inlined_to : cs->caller;
3448 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3449 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3450 int count, i;
3451
3452 count = MIN (ipa_get_cs_argument_count (args),
3453 ipa_get_param_count (old_root_info));
3454 for (i = 0; i < count; i++)
3455 {
3456 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3457 struct ipa_cst_ref_desc *rdesc;
3458
3459 if (jf->type == IPA_JF_PASS_THROUGH)
3460 {
3461 int src_idx, c, d;
3462 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3463 c = ipa_get_controlled_uses (new_root_info, src_idx);
3464 d = ipa_get_controlled_uses (old_root_info, i);
3465
3466 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3467 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3468 c = combine_controlled_uses_counters (c, d);
3469 ipa_set_controlled_uses (new_root_info, src_idx, c);
3470 if (c == 0 && new_root_info->ipcp_orig_node)
3471 {
3472 struct cgraph_node *n;
3473 struct ipa_ref *ref;
3474 tree t = new_root_info->known_csts[src_idx];
3475
3476 if (t && TREE_CODE (t) == ADDR_EXPR
3477 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3478 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3479 && (ref = new_root->find_reference (n, NULL, 0)))
3480 {
3481 if (dump_file)
3482 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3483 "reference from %s/%i to %s/%i.\n",
3484 xstrdup_for_dump (new_root->name ()),
3485 new_root->order,
3486 xstrdup_for_dump (n->name ()), n->order);
3487 ref->remove_reference ();
3488 }
3489 }
3490 }
3491 else if (jf->type == IPA_JF_CONST
3492 && (rdesc = jfunc_rdesc_usable (jf)))
3493 {
3494 int d = ipa_get_controlled_uses (old_root_info, i);
3495 int c = rdesc->refcount;
3496 rdesc->refcount = combine_controlled_uses_counters (c, d);
3497 if (rdesc->refcount == 0)
3498 {
3499 tree cst = ipa_get_jf_constant (jf);
3500 struct cgraph_node *n;
3501 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3502 && TREE_CODE (TREE_OPERAND (cst, 0))
3503 == FUNCTION_DECL);
3504 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3505 if (n)
3506 {
3507 struct cgraph_node *clone;
3508 bool ok;
3509 ok = remove_described_reference (n, rdesc);
3510 gcc_checking_assert (ok);
3511
3512 clone = cs->caller;
3513 while (clone->global.inlined_to
3514 && clone != rdesc->cs->caller
3515 && IPA_NODE_REF (clone)->ipcp_orig_node)
3516 {
3517 struct ipa_ref *ref;
3518 ref = clone->find_reference (n, NULL, 0);
3519 if (ref)
3520 {
3521 if (dump_file)
3522 fprintf (dump_file, "ipa-prop: Removing "
3523 "cloning-created reference "
3524 "from %s/%i to %s/%i.\n",
3525 xstrdup_for_dump (clone->name ()),
3526 clone->order,
3527 xstrdup_for_dump (n->name ()),
3528 n->order);
3529 ref->remove_reference ();
3530 }
3531 clone = clone->callers->caller;
3532 }
3533 }
3534 }
3535 }
3536 }
3537
3538 for (i = ipa_get_param_count (old_root_info);
3539 i < ipa_get_cs_argument_count (args);
3540 i++)
3541 {
3542 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3543
3544 if (jf->type == IPA_JF_CONST)
3545 {
3546 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3547 if (rdesc)
3548 rdesc->refcount = IPA_UNDESCRIBED_USE;
3549 }
3550 else if (jf->type == IPA_JF_PASS_THROUGH)
3551 ipa_set_controlled_uses (new_root_info,
3552 jf->value.pass_through.formal_id,
3553 IPA_UNDESCRIBED_USE);
3554 }
3555 }
3556
3557 /* Update jump functions and call note functions on inlining the call site CS.
3558 CS is expected to lead to a node already cloned by
3559 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3560 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3561 created. */
3562
3563 bool
3564 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3565 vec<cgraph_edge *> *new_edges)
3566 {
3567 bool changed;
3568 /* Do nothing if the preparation phase has not been carried out yet
3569 (i.e. during early inlining). */
3570 if (!ipa_node_params_sum)
3571 return false;
3572 gcc_assert (ipa_edge_args_vector);
3573
3574 propagate_controlled_uses (cs);
3575 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3576
3577 return changed;
3578 }
3579
3580 /* Frees all dynamically allocated structures that the argument info points
3581 to. */
3582
3583 void
3584 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3585 {
3586 vec_free (args->jump_functions);
3587 memset (args, 0, sizeof (*args));
3588 }
3589
3590 /* Free all ipa_edge structures. */
3591
3592 void
3593 ipa_free_all_edge_args (void)
3594 {
3595 int i;
3596 struct ipa_edge_args *args;
3597
3598 if (!ipa_edge_args_vector)
3599 return;
3600
3601 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3602 ipa_free_edge_args_substructures (args);
3603
3604 vec_free (ipa_edge_args_vector);
3605 }
3606
3607 /* Frees all dynamically allocated structures that the param info points
3608 to. */
3609
3610 ipa_node_params::~ipa_node_params ()
3611 {
3612 descriptors.release ();
3613 free (lattices);
3614 /* Lattice values and their sources are deallocated with their alocation
3615 pool. */
3616 known_csts.release ();
3617 known_contexts.release ();
3618
3619 lattices = NULL;
3620 ipcp_orig_node = NULL;
3621 analysis_done = 0;
3622 node_enqueued = 0;
3623 do_clone_for_all_contexts = 0;
3624 is_all_contexts_clone = 0;
3625 node_dead = 0;
3626 }
3627
3628 /* Free all ipa_node_params structures. */
3629
3630 void
3631 ipa_free_all_node_params (void)
3632 {
3633 delete ipa_node_params_sum;
3634 ipa_node_params_sum = NULL;
3635 }
3636
3637 /* Grow ipcp_transformations if necessary. */
3638
3639 void
3640 ipcp_grow_transformations_if_necessary (void)
3641 {
3642 if (vec_safe_length (ipcp_transformations)
3643 <= (unsigned) symtab->cgraph_max_uid)
3644 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3645 }
3646
3647 /* Set the aggregate replacements of NODE to be AGGVALS. */
3648
3649 void
3650 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3651 struct ipa_agg_replacement_value *aggvals)
3652 {
3653 ipcp_grow_transformations_if_necessary ();
3654 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3655 }
3656
3657 /* Hook that is called by cgraph.c when an edge is removed. */
3658
3659 static void
3660 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3661 {
3662 struct ipa_edge_args *args;
3663
3664 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3665 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3666 return;
3667
3668 args = IPA_EDGE_REF (cs);
3669 if (args->jump_functions)
3670 {
3671 struct ipa_jump_func *jf;
3672 int i;
3673 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3674 {
3675 struct ipa_cst_ref_desc *rdesc;
3676 try_decrement_rdesc_refcount (jf);
3677 if (jf->type == IPA_JF_CONST
3678 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3679 && rdesc->cs == cs)
3680 rdesc->cs = NULL;
3681 }
3682 }
3683
3684 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3685 }
3686
3687 /* Hook that is called by cgraph.c when an edge is duplicated. */
3688
3689 static void
3690 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3691 void *)
3692 {
3693 struct ipa_edge_args *old_args, *new_args;
3694 unsigned int i;
3695
3696 ipa_check_create_edge_args ();
3697
3698 old_args = IPA_EDGE_REF (src);
3699 new_args = IPA_EDGE_REF (dst);
3700
3701 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3702 if (old_args->polymorphic_call_contexts)
3703 new_args->polymorphic_call_contexts
3704 = vec_safe_copy (old_args->polymorphic_call_contexts);
3705
3706 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3707 {
3708 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3709 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3710
3711 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3712
3713 if (src_jf->type == IPA_JF_CONST)
3714 {
3715 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3716
3717 if (!src_rdesc)
3718 dst_jf->value.constant.rdesc = NULL;
3719 else if (src->caller == dst->caller)
3720 {
3721 struct ipa_ref *ref;
3722 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3723 gcc_checking_assert (n);
3724 ref = src->caller->find_reference (n, src->call_stmt,
3725 src->lto_stmt_uid);
3726 gcc_checking_assert (ref);
3727 dst->caller->clone_reference (ref, ref->stmt);
3728
3729 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3730 dst_rdesc->cs = dst;
3731 dst_rdesc->refcount = src_rdesc->refcount;
3732 dst_rdesc->next_duplicate = NULL;
3733 dst_jf->value.constant.rdesc = dst_rdesc;
3734 }
3735 else if (src_rdesc->cs == src)
3736 {
3737 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3738 dst_rdesc->cs = dst;
3739 dst_rdesc->refcount = src_rdesc->refcount;
3740 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3741 src_rdesc->next_duplicate = dst_rdesc;
3742 dst_jf->value.constant.rdesc = dst_rdesc;
3743 }
3744 else
3745 {
3746 struct ipa_cst_ref_desc *dst_rdesc;
3747 /* This can happen during inlining, when a JFUNC can refer to a
3748 reference taken in a function up in the tree of inline clones.
3749 We need to find the duplicate that refers to our tree of
3750 inline clones. */
3751
3752 gcc_assert (dst->caller->global.inlined_to);
3753 for (dst_rdesc = src_rdesc->next_duplicate;
3754 dst_rdesc;
3755 dst_rdesc = dst_rdesc->next_duplicate)
3756 {
3757 struct cgraph_node *top;
3758 top = dst_rdesc->cs->caller->global.inlined_to
3759 ? dst_rdesc->cs->caller->global.inlined_to
3760 : dst_rdesc->cs->caller;
3761 if (dst->caller->global.inlined_to == top)
3762 break;
3763 }
3764 gcc_assert (dst_rdesc);
3765 dst_jf->value.constant.rdesc = dst_rdesc;
3766 }
3767 }
3768 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3769 && src->caller == dst->caller)
3770 {
3771 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3772 ? dst->caller->global.inlined_to : dst->caller;
3773 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3774 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3775
3776 int c = ipa_get_controlled_uses (root_info, idx);
3777 if (c != IPA_UNDESCRIBED_USE)
3778 {
3779 c++;
3780 ipa_set_controlled_uses (root_info, idx, c);
3781 }
3782 }
3783 }
3784 }
3785
3786 /* Analyze newly added function into callgraph. */
3787
3788 static void
3789 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3790 {
3791 if (node->has_gimple_body_p ())
3792 ipa_analyze_node (node);
3793 }
3794
3795 /* Hook that is called by summary when a node is duplicated. */
3796
3797 void
3798 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3799 ipa_node_params *old_info,
3800 ipa_node_params *new_info)
3801 {
3802 ipa_agg_replacement_value *old_av, *new_av;
3803
3804 new_info->descriptors = old_info->descriptors.copy ();
3805 new_info->lattices = NULL;
3806 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3807
3808 new_info->analysis_done = old_info->analysis_done;
3809 new_info->node_enqueued = old_info->node_enqueued;
3810 new_info->versionable = old_info->versionable;
3811
3812 old_av = ipa_get_agg_replacements_for_node (src);
3813 if (old_av)
3814 {
3815 new_av = NULL;
3816 while (old_av)
3817 {
3818 struct ipa_agg_replacement_value *v;
3819
3820 v = ggc_alloc<ipa_agg_replacement_value> ();
3821 memcpy (v, old_av, sizeof (*v));
3822 v->next = new_av;
3823 new_av = v;
3824 old_av = old_av->next;
3825 }
3826 ipa_set_node_agg_value_chain (dst, new_av);
3827 }
3828
3829 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3830
3831 if (src_trans)
3832 {
3833 ipcp_grow_transformations_if_necessary ();
3834 src_trans = ipcp_get_transformation_summary (src);
3835 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3836 vec<ipa_vr, va_gc> *&dst_vr
3837 = ipcp_get_transformation_summary (dst)->m_vr;
3838 if (vec_safe_length (src_trans->m_vr) > 0)
3839 {
3840 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3841 for (unsigned i = 0; i < src_vr->length (); ++i)
3842 dst_vr->quick_push ((*src_vr)[i]);
3843 }
3844 }
3845
3846 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3847 {
3848 ipcp_grow_transformations_if_necessary ();
3849 src_trans = ipcp_get_transformation_summary (src);
3850 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3851 vec<ipa_bits, va_gc> *&dst_bits
3852 = ipcp_get_transformation_summary (dst)->bits;
3853 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3854 for (unsigned i = 0; i < src_bits->length (); ++i)
3855 dst_bits->quick_push ((*src_bits)[i]);
3856 }
3857 }
3858
3859 /* Register our cgraph hooks if they are not already there. */
3860
3861 void
3862 ipa_register_cgraph_hooks (void)
3863 {
3864 ipa_check_create_node_params ();
3865
3866 if (!edge_removal_hook_holder)
3867 edge_removal_hook_holder =
3868 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3869 if (!edge_duplication_hook_holder)
3870 edge_duplication_hook_holder =
3871 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3872 function_insertion_hook_holder =
3873 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3874 }
3875
3876 /* Unregister our cgraph hooks if they are not already there. */
3877
3878 static void
3879 ipa_unregister_cgraph_hooks (void)
3880 {
3881 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3882 edge_removal_hook_holder = NULL;
3883 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3884 edge_duplication_hook_holder = NULL;
3885 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3886 function_insertion_hook_holder = NULL;
3887 }
3888
3889 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3890 longer needed after ipa-cp. */
3891
3892 void
3893 ipa_free_all_structures_after_ipa_cp (void)
3894 {
3895 if (!optimize && !in_lto_p)
3896 {
3897 ipa_free_all_edge_args ();
3898 ipa_free_all_node_params ();
3899 ipcp_sources_pool.release ();
3900 ipcp_cst_values_pool.release ();
3901 ipcp_poly_ctx_values_pool.release ();
3902 ipcp_agg_lattice_pool.release ();
3903 ipa_unregister_cgraph_hooks ();
3904 ipa_refdesc_pool.release ();
3905 }
3906 }
3907
3908 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3909 longer needed after indirect inlining. */
3910
3911 void
3912 ipa_free_all_structures_after_iinln (void)
3913 {
3914 ipa_free_all_edge_args ();
3915 ipa_free_all_node_params ();
3916 ipa_unregister_cgraph_hooks ();
3917 ipcp_sources_pool.release ();
3918 ipcp_cst_values_pool.release ();
3919 ipcp_poly_ctx_values_pool.release ();
3920 ipcp_agg_lattice_pool.release ();
3921 ipa_refdesc_pool.release ();
3922 }
3923
3924 /* Print ipa_tree_map data structures of all functions in the
3925 callgraph to F. */
3926
3927 void
3928 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3929 {
3930 int i, count;
3931 struct ipa_node_params *info;
3932
3933 if (!node->definition)
3934 return;
3935 info = IPA_NODE_REF (node);
3936 fprintf (f, " function %s/%i parameter descriptors:\n",
3937 node->name (), node->order);
3938 count = ipa_get_param_count (info);
3939 for (i = 0; i < count; i++)
3940 {
3941 int c;
3942
3943 fprintf (f, " ");
3944 ipa_dump_param (f, info, i);
3945 if (ipa_is_param_used (info, i))
3946 fprintf (f, " used");
3947 c = ipa_get_controlled_uses (info, i);
3948 if (c == IPA_UNDESCRIBED_USE)
3949 fprintf (f, " undescribed_use");
3950 else
3951 fprintf (f, " controlled_uses=%i", c);
3952 fprintf (f, "\n");
3953 }
3954 }
3955
3956 /* Print ipa_tree_map data structures of all functions in the
3957 callgraph to F. */
3958
3959 void
3960 ipa_print_all_params (FILE * f)
3961 {
3962 struct cgraph_node *node;
3963
3964 fprintf (f, "\nFunction parameters:\n");
3965 FOR_EACH_FUNCTION (node)
3966 ipa_print_node_params (f, node);
3967 }
3968
3969 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3970
3971 vec<tree>
3972 ipa_get_vector_of_formal_parms (tree fndecl)
3973 {
3974 vec<tree> args;
3975 int count;
3976 tree parm;
3977
3978 gcc_assert (!flag_wpa);
3979 count = count_formal_params (fndecl);
3980 args.create (count);
3981 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3982 args.quick_push (parm);
3983
3984 return args;
3985 }
3986
3987 /* Return a heap allocated vector containing types of formal parameters of
3988 function type FNTYPE. */
3989
3990 vec<tree>
3991 ipa_get_vector_of_formal_parm_types (tree fntype)
3992 {
3993 vec<tree> types;
3994 int count = 0;
3995 tree t;
3996
3997 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3998 count++;
3999
4000 types.create (count);
4001 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4002 types.quick_push (TREE_VALUE (t));
4003
4004 return types;
4005 }
4006
4007 /* Modify the function declaration FNDECL and its type according to the plan in
4008 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4009 to reflect the actual parameters being modified which are determined by the
4010 base_index field. */
4011
4012 void
4013 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4014 {
4015 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4016 tree orig_type = TREE_TYPE (fndecl);
4017 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4018
4019 /* The following test is an ugly hack, some functions simply don't have any
4020 arguments in their type. This is probably a bug but well... */
4021 bool care_for_types = (old_arg_types != NULL_TREE);
4022 bool last_parm_void;
4023 vec<tree> otypes;
4024 if (care_for_types)
4025 {
4026 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4027 == void_type_node);
4028 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4029 if (last_parm_void)
4030 gcc_assert (oparms.length () + 1 == otypes.length ());
4031 else
4032 gcc_assert (oparms.length () == otypes.length ());
4033 }
4034 else
4035 {
4036 last_parm_void = false;
4037 otypes.create (0);
4038 }
4039
4040 int len = adjustments.length ();
4041 tree *link = &DECL_ARGUMENTS (fndecl);
4042 tree new_arg_types = NULL;
4043 for (int i = 0; i < len; i++)
4044 {
4045 struct ipa_parm_adjustment *adj;
4046 gcc_assert (link);
4047
4048 adj = &adjustments[i];
4049 tree parm;
4050 if (adj->op == IPA_PARM_OP_NEW)
4051 parm = NULL;
4052 else
4053 parm = oparms[adj->base_index];
4054 adj->base = parm;
4055
4056 if (adj->op == IPA_PARM_OP_COPY)
4057 {
4058 if (care_for_types)
4059 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4060 new_arg_types);
4061 *link = parm;
4062 link = &DECL_CHAIN (parm);
4063 }
4064 else if (adj->op != IPA_PARM_OP_REMOVE)
4065 {
4066 tree new_parm;
4067 tree ptype;
4068
4069 if (adj->by_ref)
4070 ptype = build_pointer_type (adj->type);
4071 else
4072 {
4073 ptype = adj->type;
4074 if (is_gimple_reg_type (ptype))
4075 {
4076 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4077 if (TYPE_ALIGN (ptype) != malign)
4078 ptype = build_aligned_type (ptype, malign);
4079 }
4080 }
4081
4082 if (care_for_types)
4083 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4084
4085 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4086 ptype);
4087 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4088 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4089 DECL_ARTIFICIAL (new_parm) = 1;
4090 DECL_ARG_TYPE (new_parm) = ptype;
4091 DECL_CONTEXT (new_parm) = fndecl;
4092 TREE_USED (new_parm) = 1;
4093 DECL_IGNORED_P (new_parm) = 1;
4094 layout_decl (new_parm, 0);
4095
4096 if (adj->op == IPA_PARM_OP_NEW)
4097 adj->base = NULL;
4098 else
4099 adj->base = parm;
4100 adj->new_decl = new_parm;
4101
4102 *link = new_parm;
4103 link = &DECL_CHAIN (new_parm);
4104 }
4105 }
4106
4107 *link = NULL_TREE;
4108
4109 tree new_reversed = NULL;
4110 if (care_for_types)
4111 {
4112 new_reversed = nreverse (new_arg_types);
4113 if (last_parm_void)
4114 {
4115 if (new_reversed)
4116 TREE_CHAIN (new_arg_types) = void_list_node;
4117 else
4118 new_reversed = void_list_node;
4119 }
4120 }
4121
4122 /* Use copy_node to preserve as much as possible from original type
4123 (debug info, attribute lists etc.)
4124 Exception is METHOD_TYPEs must have THIS argument.
4125 When we are asked to remove it, we need to build new FUNCTION_TYPE
4126 instead. */
4127 tree new_type = NULL;
4128 if (TREE_CODE (orig_type) != METHOD_TYPE
4129 || (adjustments[0].op == IPA_PARM_OP_COPY
4130 && adjustments[0].base_index == 0))
4131 {
4132 new_type = build_distinct_type_copy (orig_type);
4133 TYPE_ARG_TYPES (new_type) = new_reversed;
4134 }
4135 else
4136 {
4137 new_type
4138 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4139 new_reversed));
4140 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4141 DECL_VINDEX (fndecl) = NULL_TREE;
4142 }
4143
4144 /* When signature changes, we need to clear builtin info. */
4145 if (DECL_BUILT_IN (fndecl))
4146 {
4147 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4148 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4149 }
4150
4151 TREE_TYPE (fndecl) = new_type;
4152 DECL_VIRTUAL_P (fndecl) = 0;
4153 DECL_LANG_SPECIFIC (fndecl) = NULL;
4154 otypes.release ();
4155 oparms.release ();
4156 }
4157
4158 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4159 If this is a directly recursive call, CS must be NULL. Otherwise it must
4160 contain the corresponding call graph edge. */
4161
4162 void
4163 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4164 ipa_parm_adjustment_vec adjustments)
4165 {
4166 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4167 vec<tree> vargs;
4168 vec<tree, va_gc> **debug_args = NULL;
4169 gcall *new_stmt;
4170 gimple_stmt_iterator gsi, prev_gsi;
4171 tree callee_decl;
4172 int i, len;
4173
4174 len = adjustments.length ();
4175 vargs.create (len);
4176 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4177 current_node->remove_stmt_references (stmt);
4178
4179 gsi = gsi_for_stmt (stmt);
4180 prev_gsi = gsi;
4181 gsi_prev (&prev_gsi);
4182 for (i = 0; i < len; i++)
4183 {
4184 struct ipa_parm_adjustment *adj;
4185
4186 adj = &adjustments[i];
4187
4188 if (adj->op == IPA_PARM_OP_COPY)
4189 {
4190 tree arg = gimple_call_arg (stmt, adj->base_index);
4191
4192 vargs.quick_push (arg);
4193 }
4194 else if (adj->op != IPA_PARM_OP_REMOVE)
4195 {
4196 tree expr, base, off;
4197 location_t loc;
4198 unsigned int deref_align = 0;
4199 bool deref_base = false;
4200
4201 /* We create a new parameter out of the value of the old one, we can
4202 do the following kind of transformations:
4203
4204 - A scalar passed by reference is converted to a scalar passed by
4205 value. (adj->by_ref is false and the type of the original
4206 actual argument is a pointer to a scalar).
4207
4208 - A part of an aggregate is passed instead of the whole aggregate.
4209 The part can be passed either by value or by reference, this is
4210 determined by value of adj->by_ref. Moreover, the code below
4211 handles both situations when the original aggregate is passed by
4212 value (its type is not a pointer) and when it is passed by
4213 reference (it is a pointer to an aggregate).
4214
4215 When the new argument is passed by reference (adj->by_ref is true)
4216 it must be a part of an aggregate and therefore we form it by
4217 simply taking the address of a reference inside the original
4218 aggregate. */
4219
4220 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4221 base = gimple_call_arg (stmt, adj->base_index);
4222 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4223 : EXPR_LOCATION (base);
4224
4225 if (TREE_CODE (base) != ADDR_EXPR
4226 && POINTER_TYPE_P (TREE_TYPE (base)))
4227 off = build_int_cst (adj->alias_ptr_type,
4228 adj->offset / BITS_PER_UNIT);
4229 else
4230 {
4231 HOST_WIDE_INT base_offset;
4232 tree prev_base;
4233 bool addrof;
4234
4235 if (TREE_CODE (base) == ADDR_EXPR)
4236 {
4237 base = TREE_OPERAND (base, 0);
4238 addrof = true;
4239 }
4240 else
4241 addrof = false;
4242 prev_base = base;
4243 base = get_addr_base_and_unit_offset (base, &base_offset);
4244 /* Aggregate arguments can have non-invariant addresses. */
4245 if (!base)
4246 {
4247 base = build_fold_addr_expr (prev_base);
4248 off = build_int_cst (adj->alias_ptr_type,
4249 adj->offset / BITS_PER_UNIT);
4250 }
4251 else if (TREE_CODE (base) == MEM_REF)
4252 {
4253 if (!addrof)
4254 {
4255 deref_base = true;
4256 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4257 }
4258 off = build_int_cst (adj->alias_ptr_type,
4259 base_offset
4260 + adj->offset / BITS_PER_UNIT);
4261 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4262 off);
4263 base = TREE_OPERAND (base, 0);
4264 }
4265 else
4266 {
4267 off = build_int_cst (adj->alias_ptr_type,
4268 base_offset
4269 + adj->offset / BITS_PER_UNIT);
4270 base = build_fold_addr_expr (base);
4271 }
4272 }
4273
4274 if (!adj->by_ref)
4275 {
4276 tree type = adj->type;
4277 unsigned int align;
4278 unsigned HOST_WIDE_INT misalign;
4279
4280 if (deref_base)
4281 {
4282 align = deref_align;
4283 misalign = 0;
4284 }
4285 else
4286 {
4287 get_pointer_alignment_1 (base, &align, &misalign);
4288 if (TYPE_ALIGN (type) > align)
4289 align = TYPE_ALIGN (type);
4290 }
4291 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4292 * BITS_PER_UNIT);
4293 misalign = misalign & (align - 1);
4294 if (misalign != 0)
4295 align = least_bit_hwi (misalign);
4296 if (align < TYPE_ALIGN (type))
4297 type = build_aligned_type (type, align);
4298 base = force_gimple_operand_gsi (&gsi, base,
4299 true, NULL, true, GSI_SAME_STMT);
4300 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4301 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4302 /* If expr is not a valid gimple call argument emit
4303 a load into a temporary. */
4304 if (is_gimple_reg_type (TREE_TYPE (expr)))
4305 {
4306 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4307 if (gimple_in_ssa_p (cfun))
4308 {
4309 gimple_set_vuse (tem, gimple_vuse (stmt));
4310 expr = make_ssa_name (TREE_TYPE (expr), tem);
4311 }
4312 else
4313 expr = create_tmp_reg (TREE_TYPE (expr));
4314 gimple_assign_set_lhs (tem, expr);
4315 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4316 }
4317 }
4318 else
4319 {
4320 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4321 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4322 expr = build_fold_addr_expr (expr);
4323 expr = force_gimple_operand_gsi (&gsi, expr,
4324 true, NULL, true, GSI_SAME_STMT);
4325 }
4326 vargs.quick_push (expr);
4327 }
4328 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4329 {
4330 unsigned int ix;
4331 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4332 gimple *def_temp;
4333
4334 arg = gimple_call_arg (stmt, adj->base_index);
4335 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4336 {
4337 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4338 continue;
4339 arg = fold_convert_loc (gimple_location (stmt),
4340 TREE_TYPE (origin), arg);
4341 }
4342 if (debug_args == NULL)
4343 debug_args = decl_debug_args_insert (callee_decl);
4344 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4345 if (ddecl == origin)
4346 {
4347 ddecl = (**debug_args)[ix + 1];
4348 break;
4349 }
4350 if (ddecl == NULL)
4351 {
4352 ddecl = make_node (DEBUG_EXPR_DECL);
4353 DECL_ARTIFICIAL (ddecl) = 1;
4354 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4355 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4356
4357 vec_safe_push (*debug_args, origin);
4358 vec_safe_push (*debug_args, ddecl);
4359 }
4360 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4361 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4362 }
4363 }
4364
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4366 {
4367 fprintf (dump_file, "replacing stmt:");
4368 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4369 }
4370
4371 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4372 vargs.release ();
4373 if (gimple_call_lhs (stmt))
4374 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4375
4376 gimple_set_block (new_stmt, gimple_block (stmt));
4377 if (gimple_has_location (stmt))
4378 gimple_set_location (new_stmt, gimple_location (stmt));
4379 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4380 gimple_call_copy_flags (new_stmt, stmt);
4381 if (gimple_in_ssa_p (cfun))
4382 {
4383 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4384 if (gimple_vdef (stmt))
4385 {
4386 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4387 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4388 }
4389 }
4390
4391 if (dump_file && (dump_flags & TDF_DETAILS))
4392 {
4393 fprintf (dump_file, "with stmt:");
4394 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4395 fprintf (dump_file, "\n");
4396 }
4397 gsi_replace (&gsi, new_stmt, true);
4398 if (cs)
4399 cs->set_call_stmt (new_stmt);
4400 do
4401 {
4402 current_node->record_stmt_references (gsi_stmt (gsi));
4403 gsi_prev (&gsi);
4404 }
4405 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4406 }
4407
4408 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4409 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4410 specifies whether the function should care about type incompatibility the
4411 current and new expressions. If it is false, the function will leave
4412 incompatibility issues to the caller. Return true iff the expression
4413 was modified. */
4414
4415 bool
4416 ipa_modify_expr (tree *expr, bool convert,
4417 ipa_parm_adjustment_vec adjustments)
4418 {
4419 struct ipa_parm_adjustment *cand
4420 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4421 if (!cand)
4422 return false;
4423
4424 tree src;
4425 if (cand->by_ref)
4426 {
4427 src = build_simple_mem_ref (cand->new_decl);
4428 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4429 }
4430 else
4431 src = cand->new_decl;
4432
4433 if (dump_file && (dump_flags & TDF_DETAILS))
4434 {
4435 fprintf (dump_file, "About to replace expr ");
4436 print_generic_expr (dump_file, *expr, 0);
4437 fprintf (dump_file, " with ");
4438 print_generic_expr (dump_file, src, 0);
4439 fprintf (dump_file, "\n");
4440 }
4441
4442 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4443 {
4444 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4445 *expr = vce;
4446 }
4447 else
4448 *expr = src;
4449 return true;
4450 }
4451
4452 /* If T is an SSA_NAME, return NULL if it is not a default def or
4453 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4454 the base variable is always returned, regardless if it is a default
4455 def. Return T if it is not an SSA_NAME. */
4456
4457 static tree
4458 get_ssa_base_param (tree t, bool ignore_default_def)
4459 {
4460 if (TREE_CODE (t) == SSA_NAME)
4461 {
4462 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4463 return SSA_NAME_VAR (t);
4464 else
4465 return NULL_TREE;
4466 }
4467 return t;
4468 }
4469
4470 /* Given an expression, return an adjustment entry specifying the
4471 transformation to be done on EXPR. If no suitable adjustment entry
4472 was found, returns NULL.
4473
4474 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4475 default def, otherwise bail on them.
4476
4477 If CONVERT is non-NULL, this function will set *CONVERT if the
4478 expression provided is a component reference. ADJUSTMENTS is the
4479 adjustments vector. */
4480
4481 ipa_parm_adjustment *
4482 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4483 ipa_parm_adjustment_vec adjustments,
4484 bool ignore_default_def)
4485 {
4486 if (TREE_CODE (**expr) == BIT_FIELD_REF
4487 || TREE_CODE (**expr) == IMAGPART_EXPR
4488 || TREE_CODE (**expr) == REALPART_EXPR)
4489 {
4490 *expr = &TREE_OPERAND (**expr, 0);
4491 if (convert)
4492 *convert = true;
4493 }
4494
4495 HOST_WIDE_INT offset, size, max_size;
4496 bool reverse;
4497 tree base
4498 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4499 if (!base || size == -1 || max_size == -1)
4500 return NULL;
4501
4502 if (TREE_CODE (base) == MEM_REF)
4503 {
4504 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4505 base = TREE_OPERAND (base, 0);
4506 }
4507
4508 base = get_ssa_base_param (base, ignore_default_def);
4509 if (!base || TREE_CODE (base) != PARM_DECL)
4510 return NULL;
4511
4512 struct ipa_parm_adjustment *cand = NULL;
4513 unsigned int len = adjustments.length ();
4514 for (unsigned i = 0; i < len; i++)
4515 {
4516 struct ipa_parm_adjustment *adj = &adjustments[i];
4517
4518 if (adj->base == base
4519 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4520 {
4521 cand = adj;
4522 break;
4523 }
4524 }
4525
4526 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4527 return NULL;
4528 return cand;
4529 }
4530
4531 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4532
4533 static bool
4534 index_in_adjustments_multiple_times_p (int base_index,
4535 ipa_parm_adjustment_vec adjustments)
4536 {
4537 int i, len = adjustments.length ();
4538 bool one = false;
4539
4540 for (i = 0; i < len; i++)
4541 {
4542 struct ipa_parm_adjustment *adj;
4543 adj = &adjustments[i];
4544
4545 if (adj->base_index == base_index)
4546 {
4547 if (one)
4548 return true;
4549 else
4550 one = true;
4551 }
4552 }
4553 return false;
4554 }
4555
4556
4557 /* Return adjustments that should have the same effect on function parameters
4558 and call arguments as if they were first changed according to adjustments in
4559 INNER and then by adjustments in OUTER. */
4560
4561 ipa_parm_adjustment_vec
4562 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4563 ipa_parm_adjustment_vec outer)
4564 {
4565 int i, outlen = outer.length ();
4566 int inlen = inner.length ();
4567 int removals = 0;
4568 ipa_parm_adjustment_vec adjustments, tmp;
4569
4570 tmp.create (inlen);
4571 for (i = 0; i < inlen; i++)
4572 {
4573 struct ipa_parm_adjustment *n;
4574 n = &inner[i];
4575
4576 if (n->op == IPA_PARM_OP_REMOVE)
4577 removals++;
4578 else
4579 {
4580 /* FIXME: Handling of new arguments are not implemented yet. */
4581 gcc_assert (n->op != IPA_PARM_OP_NEW);
4582 tmp.quick_push (*n);
4583 }
4584 }
4585
4586 adjustments.create (outlen + removals);
4587 for (i = 0; i < outlen; i++)
4588 {
4589 struct ipa_parm_adjustment r;
4590 struct ipa_parm_adjustment *out = &outer[i];
4591 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4592
4593 memset (&r, 0, sizeof (r));
4594 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4595 if (out->op == IPA_PARM_OP_REMOVE)
4596 {
4597 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4598 {
4599 r.op = IPA_PARM_OP_REMOVE;
4600 adjustments.quick_push (r);
4601 }
4602 continue;
4603 }
4604 else
4605 {
4606 /* FIXME: Handling of new arguments are not implemented yet. */
4607 gcc_assert (out->op != IPA_PARM_OP_NEW);
4608 }
4609
4610 r.base_index = in->base_index;
4611 r.type = out->type;
4612
4613 /* FIXME: Create nonlocal value too. */
4614
4615 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4616 r.op = IPA_PARM_OP_COPY;
4617 else if (in->op == IPA_PARM_OP_COPY)
4618 r.offset = out->offset;
4619 else if (out->op == IPA_PARM_OP_COPY)
4620 r.offset = in->offset;
4621 else
4622 r.offset = in->offset + out->offset;
4623 adjustments.quick_push (r);
4624 }
4625
4626 for (i = 0; i < inlen; i++)
4627 {
4628 struct ipa_parm_adjustment *n = &inner[i];
4629
4630 if (n->op == IPA_PARM_OP_REMOVE)
4631 adjustments.quick_push (*n);
4632 }
4633
4634 tmp.release ();
4635 return adjustments;
4636 }
4637
4638 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4639 friendly way, assuming they are meant to be applied to FNDECL. */
4640
4641 void
4642 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4643 tree fndecl)
4644 {
4645 int i, len = adjustments.length ();
4646 bool first = true;
4647 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4648
4649 fprintf (file, "IPA param adjustments: ");
4650 for (i = 0; i < len; i++)
4651 {
4652 struct ipa_parm_adjustment *adj;
4653 adj = &adjustments[i];
4654
4655 if (!first)
4656 fprintf (file, " ");
4657 else
4658 first = false;
4659
4660 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4661 print_generic_expr (file, parms[adj->base_index], 0);
4662 if (adj->base)
4663 {
4664 fprintf (file, ", base: ");
4665 print_generic_expr (file, adj->base, 0);
4666 }
4667 if (adj->new_decl)
4668 {
4669 fprintf (file, ", new_decl: ");
4670 print_generic_expr (file, adj->new_decl, 0);
4671 }
4672 if (adj->new_ssa_base)
4673 {
4674 fprintf (file, ", new_ssa_base: ");
4675 print_generic_expr (file, adj->new_ssa_base, 0);
4676 }
4677
4678 if (adj->op == IPA_PARM_OP_COPY)
4679 fprintf (file, ", copy_param");
4680 else if (adj->op == IPA_PARM_OP_REMOVE)
4681 fprintf (file, ", remove_param");
4682 else
4683 fprintf (file, ", offset %li", (long) adj->offset);
4684 if (adj->by_ref)
4685 fprintf (file, ", by_ref");
4686 print_node_brief (file, ", type: ", adj->type, 0);
4687 fprintf (file, "\n");
4688 }
4689 parms.release ();
4690 }
4691
4692 /* Dump the AV linked list. */
4693
4694 void
4695 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4696 {
4697 bool comma = false;
4698 fprintf (f, " Aggregate replacements:");
4699 for (; av; av = av->next)
4700 {
4701 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4702 av->index, av->offset);
4703 print_generic_expr (f, av->value, 0);
4704 comma = true;
4705 }
4706 fprintf (f, "\n");
4707 }
4708
4709 /* Stream out jump function JUMP_FUNC to OB. */
4710
4711 static void
4712 ipa_write_jump_function (struct output_block *ob,
4713 struct ipa_jump_func *jump_func)
4714 {
4715 struct ipa_agg_jf_item *item;
4716 struct bitpack_d bp;
4717 int i, count;
4718
4719 streamer_write_uhwi (ob, jump_func->type);
4720 switch (jump_func->type)
4721 {
4722 case IPA_JF_UNKNOWN:
4723 break;
4724 case IPA_JF_CONST:
4725 gcc_assert (
4726 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4727 stream_write_tree (ob, jump_func->value.constant.value, true);
4728 break;
4729 case IPA_JF_PASS_THROUGH:
4730 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4731 if (jump_func->value.pass_through.operation == NOP_EXPR)
4732 {
4733 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4734 bp = bitpack_create (ob->main_stream);
4735 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4736 streamer_write_bitpack (&bp);
4737 }
4738 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4739 == tcc_unary)
4740 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4741 else
4742 {
4743 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4744 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4745 }
4746 break;
4747 case IPA_JF_ANCESTOR:
4748 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4749 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4750 bp = bitpack_create (ob->main_stream);
4751 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4752 streamer_write_bitpack (&bp);
4753 break;
4754 }
4755
4756 count = vec_safe_length (jump_func->agg.items);
4757 streamer_write_uhwi (ob, count);
4758 if (count)
4759 {
4760 bp = bitpack_create (ob->main_stream);
4761 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4762 streamer_write_bitpack (&bp);
4763 }
4764
4765 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4766 {
4767 streamer_write_uhwi (ob, item->offset);
4768 stream_write_tree (ob, item->value, true);
4769 }
4770
4771 bp = bitpack_create (ob->main_stream);
4772 bp_pack_value (&bp, jump_func->bits.known, 1);
4773 streamer_write_bitpack (&bp);
4774 if (jump_func->bits.known)
4775 {
4776 streamer_write_widest_int (ob, jump_func->bits.value);
4777 streamer_write_widest_int (ob, jump_func->bits.mask);
4778 }
4779 bp_pack_value (&bp, jump_func->vr_known, 1);
4780 streamer_write_bitpack (&bp);
4781 if (jump_func->vr_known)
4782 {
4783 streamer_write_enum (ob->main_stream, value_rang_type,
4784 VR_LAST, jump_func->m_vr.type);
4785 stream_write_tree (ob, jump_func->m_vr.min, true);
4786 stream_write_tree (ob, jump_func->m_vr.max, true);
4787 }
4788 }
4789
4790 /* Read in jump function JUMP_FUNC from IB. */
4791
4792 static void
4793 ipa_read_jump_function (struct lto_input_block *ib,
4794 struct ipa_jump_func *jump_func,
4795 struct cgraph_edge *cs,
4796 struct data_in *data_in)
4797 {
4798 enum jump_func_type jftype;
4799 enum tree_code operation;
4800 int i, count;
4801
4802 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4803 switch (jftype)
4804 {
4805 case IPA_JF_UNKNOWN:
4806 ipa_set_jf_unknown (jump_func);
4807 break;
4808 case IPA_JF_CONST:
4809 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4810 break;
4811 case IPA_JF_PASS_THROUGH:
4812 operation = (enum tree_code) streamer_read_uhwi (ib);
4813 if (operation == NOP_EXPR)
4814 {
4815 int formal_id = streamer_read_uhwi (ib);
4816 struct bitpack_d bp = streamer_read_bitpack (ib);
4817 bool agg_preserved = bp_unpack_value (&bp, 1);
4818 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4819 }
4820 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4821 {
4822 int formal_id = streamer_read_uhwi (ib);
4823 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4824 }
4825 else
4826 {
4827 tree operand = stream_read_tree (ib, data_in);
4828 int formal_id = streamer_read_uhwi (ib);
4829 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4830 operation);
4831 }
4832 break;
4833 case IPA_JF_ANCESTOR:
4834 {
4835 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4836 int formal_id = streamer_read_uhwi (ib);
4837 struct bitpack_d bp = streamer_read_bitpack (ib);
4838 bool agg_preserved = bp_unpack_value (&bp, 1);
4839 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4840 break;
4841 }
4842 }
4843
4844 count = streamer_read_uhwi (ib);
4845 vec_alloc (jump_func->agg.items, count);
4846 if (count)
4847 {
4848 struct bitpack_d bp = streamer_read_bitpack (ib);
4849 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4850 }
4851 for (i = 0; i < count; i++)
4852 {
4853 struct ipa_agg_jf_item item;
4854 item.offset = streamer_read_uhwi (ib);
4855 item.value = stream_read_tree (ib, data_in);
4856 jump_func->agg.items->quick_push (item);
4857 }
4858
4859 struct bitpack_d bp = streamer_read_bitpack (ib);
4860 bool bits_known = bp_unpack_value (&bp, 1);
4861 if (bits_known)
4862 {
4863 jump_func->bits.known = true;
4864 jump_func->bits.value = streamer_read_widest_int (ib);
4865 jump_func->bits.mask = streamer_read_widest_int (ib);
4866 }
4867 else
4868 jump_func->bits.known = false;
4869
4870 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4871 bool vr_known = bp_unpack_value (&vr_bp, 1);
4872 if (vr_known)
4873 {
4874 jump_func->vr_known = true;
4875 jump_func->m_vr.type = streamer_read_enum (ib,
4876 value_range_type,
4877 VR_LAST);
4878 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4879 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4880 }
4881 else
4882 jump_func->vr_known = false;
4883 }
4884
4885 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4886 relevant to indirect inlining to OB. */
4887
4888 static void
4889 ipa_write_indirect_edge_info (struct output_block *ob,
4890 struct cgraph_edge *cs)
4891 {
4892 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4893 struct bitpack_d bp;
4894
4895 streamer_write_hwi (ob, ii->param_index);
4896 bp = bitpack_create (ob->main_stream);
4897 bp_pack_value (&bp, ii->polymorphic, 1);
4898 bp_pack_value (&bp, ii->agg_contents, 1);
4899 bp_pack_value (&bp, ii->member_ptr, 1);
4900 bp_pack_value (&bp, ii->by_ref, 1);
4901 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4902 bp_pack_value (&bp, ii->vptr_changed, 1);
4903 streamer_write_bitpack (&bp);
4904 if (ii->agg_contents || ii->polymorphic)
4905 streamer_write_hwi (ob, ii->offset);
4906 else
4907 gcc_assert (ii->offset == 0);
4908
4909 if (ii->polymorphic)
4910 {
4911 streamer_write_hwi (ob, ii->otr_token);
4912 stream_write_tree (ob, ii->otr_type, true);
4913 ii->context.stream_out (ob);
4914 }
4915 }
4916
4917 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4918 relevant to indirect inlining from IB. */
4919
4920 static void
4921 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4922 struct data_in *data_in,
4923 struct cgraph_edge *cs)
4924 {
4925 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4926 struct bitpack_d bp;
4927
4928 ii->param_index = (int) streamer_read_hwi (ib);
4929 bp = streamer_read_bitpack (ib);
4930 ii->polymorphic = bp_unpack_value (&bp, 1);
4931 ii->agg_contents = bp_unpack_value (&bp, 1);
4932 ii->member_ptr = bp_unpack_value (&bp, 1);
4933 ii->by_ref = bp_unpack_value (&bp, 1);
4934 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4935 ii->vptr_changed = bp_unpack_value (&bp, 1);
4936 if (ii->agg_contents || ii->polymorphic)
4937 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4938 else
4939 ii->offset = 0;
4940 if (ii->polymorphic)
4941 {
4942 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4943 ii->otr_type = stream_read_tree (ib, data_in);
4944 ii->context.stream_in (ib, data_in);
4945 }
4946 }
4947
4948 /* Stream out NODE info to OB. */
4949
4950 static void
4951 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4952 {
4953 int node_ref;
4954 lto_symtab_encoder_t encoder;
4955 struct ipa_node_params *info = IPA_NODE_REF (node);
4956 int j;
4957 struct cgraph_edge *e;
4958 struct bitpack_d bp;
4959
4960 encoder = ob->decl_state->symtab_node_encoder;
4961 node_ref = lto_symtab_encoder_encode (encoder, node);
4962 streamer_write_uhwi (ob, node_ref);
4963
4964 streamer_write_uhwi (ob, ipa_get_param_count (info));
4965 for (j = 0; j < ipa_get_param_count (info); j++)
4966 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4967 bp = bitpack_create (ob->main_stream);
4968 gcc_assert (info->analysis_done
4969 || ipa_get_param_count (info) == 0);
4970 gcc_assert (!info->node_enqueued);
4971 gcc_assert (!info->ipcp_orig_node);
4972 for (j = 0; j < ipa_get_param_count (info); j++)
4973 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4974 streamer_write_bitpack (&bp);
4975 for (j = 0; j < ipa_get_param_count (info); j++)
4976 {
4977 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4978 stream_write_tree (ob, ipa_get_type (info, j), true);
4979 }
4980 for (e = node->callees; e; e = e->next_callee)
4981 {
4982 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4983
4984 streamer_write_uhwi (ob,
4985 ipa_get_cs_argument_count (args) * 2
4986 + (args->polymorphic_call_contexts != NULL));
4987 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4988 {
4989 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4990 if (args->polymorphic_call_contexts != NULL)
4991 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4992 }
4993 }
4994 for (e = node->indirect_calls; e; e = e->next_callee)
4995 {
4996 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4997
4998 streamer_write_uhwi (ob,
4999 ipa_get_cs_argument_count (args) * 2
5000 + (args->polymorphic_call_contexts != NULL));
5001 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5002 {
5003 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5004 if (args->polymorphic_call_contexts != NULL)
5005 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5006 }
5007 ipa_write_indirect_edge_info (ob, e);
5008 }
5009 }
5010
5011 /* Stream in NODE info from IB. */
5012
5013 static void
5014 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5015 struct data_in *data_in)
5016 {
5017 struct ipa_node_params *info = IPA_NODE_REF (node);
5018 int k;
5019 struct cgraph_edge *e;
5020 struct bitpack_d bp;
5021
5022 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5023
5024 for (k = 0; k < ipa_get_param_count (info); k++)
5025 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
5026
5027 bp = streamer_read_bitpack (ib);
5028 if (ipa_get_param_count (info) != 0)
5029 info->analysis_done = true;
5030 info->node_enqueued = false;
5031 for (k = 0; k < ipa_get_param_count (info); k++)
5032 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5033 for (k = 0; k < ipa_get_param_count (info); k++)
5034 {
5035 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5036 info->descriptors[k].decl_or_type = stream_read_tree (ib, data_in);
5037 }
5038 for (e = node->callees; e; e = e->next_callee)
5039 {
5040 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5041 int count = streamer_read_uhwi (ib);
5042 bool contexts_computed = count & 1;
5043 count /= 2;
5044
5045 if (!count)
5046 continue;
5047 vec_safe_grow_cleared (args->jump_functions, count);
5048 if (contexts_computed)
5049 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5050
5051 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5052 {
5053 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5054 data_in);
5055 if (contexts_computed)
5056 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5057 }
5058 }
5059 for (e = node->indirect_calls; e; e = e->next_callee)
5060 {
5061 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5062 int count = streamer_read_uhwi (ib);
5063 bool contexts_computed = count & 1;
5064 count /= 2;
5065
5066 if (count)
5067 {
5068 vec_safe_grow_cleared (args->jump_functions, count);
5069 if (contexts_computed)
5070 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5071 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5072 {
5073 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5074 data_in);
5075 if (contexts_computed)
5076 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5077 }
5078 }
5079 ipa_read_indirect_edge_info (ib, data_in, e);
5080 }
5081 }
5082
5083 /* Write jump functions for nodes in SET. */
5084
5085 void
5086 ipa_prop_write_jump_functions (void)
5087 {
5088 struct cgraph_node *node;
5089 struct output_block *ob;
5090 unsigned int count = 0;
5091 lto_symtab_encoder_iterator lsei;
5092 lto_symtab_encoder_t encoder;
5093
5094 if (!ipa_node_params_sum)
5095 return;
5096
5097 ob = create_output_block (LTO_section_jump_functions);
5098 encoder = ob->decl_state->symtab_node_encoder;
5099 ob->symbol = NULL;
5100 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5101 lsei_next_function_in_partition (&lsei))
5102 {
5103 node = lsei_cgraph_node (lsei);
5104 if (node->has_gimple_body_p ()
5105 && IPA_NODE_REF (node) != NULL)
5106 count++;
5107 }
5108
5109 streamer_write_uhwi (ob, count);
5110
5111 /* Process all of the functions. */
5112 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5113 lsei_next_function_in_partition (&lsei))
5114 {
5115 node = lsei_cgraph_node (lsei);
5116 if (node->has_gimple_body_p ()
5117 && IPA_NODE_REF (node) != NULL)
5118 ipa_write_node_info (ob, node);
5119 }
5120 streamer_write_char_stream (ob->main_stream, 0);
5121 produce_asm (ob, NULL);
5122 destroy_output_block (ob);
5123 }
5124
5125 /* Read section in file FILE_DATA of length LEN with data DATA. */
5126
5127 static void
5128 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5129 size_t len)
5130 {
5131 const struct lto_function_header *header =
5132 (const struct lto_function_header *) data;
5133 const int cfg_offset = sizeof (struct lto_function_header);
5134 const int main_offset = cfg_offset + header->cfg_size;
5135 const int string_offset = main_offset + header->main_size;
5136 struct data_in *data_in;
5137 unsigned int i;
5138 unsigned int count;
5139
5140 lto_input_block ib_main ((const char *) data + main_offset,
5141 header->main_size, file_data->mode_table);
5142
5143 data_in =
5144 lto_data_in_create (file_data, (const char *) data + string_offset,
5145 header->string_size, vNULL);
5146 count = streamer_read_uhwi (&ib_main);
5147
5148 for (i = 0; i < count; i++)
5149 {
5150 unsigned int index;
5151 struct cgraph_node *node;
5152 lto_symtab_encoder_t encoder;
5153
5154 index = streamer_read_uhwi (&ib_main);
5155 encoder = file_data->symtab_node_encoder;
5156 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5157 index));
5158 gcc_assert (node->definition);
5159 ipa_read_node_info (&ib_main, node, data_in);
5160 }
5161 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5162 len);
5163 lto_data_in_delete (data_in);
5164 }
5165
5166 /* Read ipcp jump functions. */
5167
5168 void
5169 ipa_prop_read_jump_functions (void)
5170 {
5171 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5172 struct lto_file_decl_data *file_data;
5173 unsigned int j = 0;
5174
5175 ipa_check_create_node_params ();
5176 ipa_check_create_edge_args ();
5177 ipa_register_cgraph_hooks ();
5178
5179 while ((file_data = file_data_vec[j++]))
5180 {
5181 size_t len;
5182 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5183
5184 if (data)
5185 ipa_prop_read_section (file_data, data, len);
5186 }
5187 }
5188
5189 /* After merging units, we can get mismatch in argument counts.
5190 Also decl merging might've rendered parameter lists obsolete.
5191 Also compute called_with_variable_arg info. */
5192
5193 void
5194 ipa_update_after_lto_read (void)
5195 {
5196 ipa_check_create_node_params ();
5197 ipa_check_create_edge_args ();
5198 }
5199
5200 void
5201 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5202 {
5203 int node_ref;
5204 unsigned int count = 0;
5205 lto_symtab_encoder_t encoder;
5206 struct ipa_agg_replacement_value *aggvals, *av;
5207
5208 aggvals = ipa_get_agg_replacements_for_node (node);
5209 encoder = ob->decl_state->symtab_node_encoder;
5210 node_ref = lto_symtab_encoder_encode (encoder, node);
5211 streamer_write_uhwi (ob, node_ref);
5212
5213 for (av = aggvals; av; av = av->next)
5214 count++;
5215 streamer_write_uhwi (ob, count);
5216
5217 for (av = aggvals; av; av = av->next)
5218 {
5219 struct bitpack_d bp;
5220
5221 streamer_write_uhwi (ob, av->offset);
5222 streamer_write_uhwi (ob, av->index);
5223 stream_write_tree (ob, av->value, true);
5224
5225 bp = bitpack_create (ob->main_stream);
5226 bp_pack_value (&bp, av->by_ref, 1);
5227 streamer_write_bitpack (&bp);
5228 }
5229
5230 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5231 if (ts && vec_safe_length (ts->m_vr) > 0)
5232 {
5233 count = ts->m_vr->length ();
5234 streamer_write_uhwi (ob, count);
5235 for (unsigned i = 0; i < count; ++i)
5236 {
5237 struct bitpack_d bp;
5238 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5239 bp = bitpack_create (ob->main_stream);
5240 bp_pack_value (&bp, parm_vr->known, 1);
5241 streamer_write_bitpack (&bp);
5242 if (parm_vr->known)
5243 {
5244 streamer_write_enum (ob->main_stream, value_rang_type,
5245 VR_LAST, parm_vr->type);
5246 streamer_write_wide_int (ob, parm_vr->min);
5247 streamer_write_wide_int (ob, parm_vr->max);
5248 }
5249 }
5250 }
5251 else
5252 streamer_write_uhwi (ob, 0);
5253
5254 if (ts && vec_safe_length (ts->bits) > 0)
5255 {
5256 count = ts->bits->length ();
5257 streamer_write_uhwi (ob, count);
5258
5259 for (unsigned i = 0; i < count; ++i)
5260 {
5261 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5262 struct bitpack_d bp = bitpack_create (ob->main_stream);
5263 bp_pack_value (&bp, bits_jfunc.known, 1);
5264 streamer_write_bitpack (&bp);
5265 if (bits_jfunc.known)
5266 {
5267 streamer_write_widest_int (ob, bits_jfunc.value);
5268 streamer_write_widest_int (ob, bits_jfunc.mask);
5269 }
5270 }
5271 }
5272 else
5273 streamer_write_uhwi (ob, 0);
5274 }
5275
5276 /* Stream in the aggregate value replacement chain for NODE from IB. */
5277
5278 static void
5279 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5280 data_in *data_in)
5281 {
5282 struct ipa_agg_replacement_value *aggvals = NULL;
5283 unsigned int count, i;
5284
5285 count = streamer_read_uhwi (ib);
5286 for (i = 0; i <count; i++)
5287 {
5288 struct ipa_agg_replacement_value *av;
5289 struct bitpack_d bp;
5290
5291 av = ggc_alloc<ipa_agg_replacement_value> ();
5292 av->offset = streamer_read_uhwi (ib);
5293 av->index = streamer_read_uhwi (ib);
5294 av->value = stream_read_tree (ib, data_in);
5295 bp = streamer_read_bitpack (ib);
5296 av->by_ref = bp_unpack_value (&bp, 1);
5297 av->next = aggvals;
5298 aggvals = av;
5299 }
5300 ipa_set_node_agg_value_chain (node, aggvals);
5301
5302 count = streamer_read_uhwi (ib);
5303 if (count > 0)
5304 {
5305 ipcp_grow_transformations_if_necessary ();
5306
5307 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5308 vec_safe_grow_cleared (ts->m_vr, count);
5309 for (i = 0; i < count; i++)
5310 {
5311 ipa_vr *parm_vr;
5312 parm_vr = &(*ts->m_vr)[i];
5313 struct bitpack_d bp;
5314 bp = streamer_read_bitpack (ib);
5315 parm_vr->known = bp_unpack_value (&bp, 1);
5316 if (parm_vr->known)
5317 {
5318 parm_vr->type = streamer_read_enum (ib, value_range_type,
5319 VR_LAST);
5320 parm_vr->min = streamer_read_wide_int (ib);
5321 parm_vr->max = streamer_read_wide_int (ib);
5322 }
5323 }
5324 }
5325 count = streamer_read_uhwi (ib);
5326 if (count > 0)
5327 {
5328 ipcp_grow_transformations_if_necessary ();
5329
5330 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5331 vec_safe_grow_cleared (ts->bits, count);
5332
5333 for (i = 0; i < count; i++)
5334 {
5335 ipa_bits& bits_jfunc = (*ts->bits)[i];
5336 struct bitpack_d bp = streamer_read_bitpack (ib);
5337 bits_jfunc.known = bp_unpack_value (&bp, 1);
5338 if (bits_jfunc.known)
5339 {
5340 bits_jfunc.value = streamer_read_widest_int (ib);
5341 bits_jfunc.mask = streamer_read_widest_int (ib);
5342 }
5343 }
5344 }
5345 }
5346
5347 /* Write all aggregate replacement for nodes in set. */
5348
5349 void
5350 ipcp_write_transformation_summaries (void)
5351 {
5352 struct cgraph_node *node;
5353 struct output_block *ob;
5354 unsigned int count = 0;
5355 lto_symtab_encoder_iterator lsei;
5356 lto_symtab_encoder_t encoder;
5357
5358 ob = create_output_block (LTO_section_ipcp_transform);
5359 encoder = ob->decl_state->symtab_node_encoder;
5360 ob->symbol = NULL;
5361 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5362 lsei_next_function_in_partition (&lsei))
5363 {
5364 node = lsei_cgraph_node (lsei);
5365 if (node->has_gimple_body_p ())
5366 count++;
5367 }
5368
5369 streamer_write_uhwi (ob, count);
5370
5371 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5372 lsei_next_function_in_partition (&lsei))
5373 {
5374 node = lsei_cgraph_node (lsei);
5375 if (node->has_gimple_body_p ())
5376 write_ipcp_transformation_info (ob, node);
5377 }
5378 streamer_write_char_stream (ob->main_stream, 0);
5379 produce_asm (ob, NULL);
5380 destroy_output_block (ob);
5381 }
5382
5383 /* Read replacements section in file FILE_DATA of length LEN with data
5384 DATA. */
5385
5386 static void
5387 read_replacements_section (struct lto_file_decl_data *file_data,
5388 const char *data,
5389 size_t len)
5390 {
5391 const struct lto_function_header *header =
5392 (const struct lto_function_header *) data;
5393 const int cfg_offset = sizeof (struct lto_function_header);
5394 const int main_offset = cfg_offset + header->cfg_size;
5395 const int string_offset = main_offset + header->main_size;
5396 struct data_in *data_in;
5397 unsigned int i;
5398 unsigned int count;
5399
5400 lto_input_block ib_main ((const char *) data + main_offset,
5401 header->main_size, file_data->mode_table);
5402
5403 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5404 header->string_size, vNULL);
5405 count = streamer_read_uhwi (&ib_main);
5406
5407 for (i = 0; i < count; i++)
5408 {
5409 unsigned int index;
5410 struct cgraph_node *node;
5411 lto_symtab_encoder_t encoder;
5412
5413 index = streamer_read_uhwi (&ib_main);
5414 encoder = file_data->symtab_node_encoder;
5415 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5416 index));
5417 gcc_assert (node->definition);
5418 read_ipcp_transformation_info (&ib_main, node, data_in);
5419 }
5420 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5421 len);
5422 lto_data_in_delete (data_in);
5423 }
5424
5425 /* Read IPA-CP aggregate replacements. */
5426
5427 void
5428 ipcp_read_transformation_summaries (void)
5429 {
5430 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5431 struct lto_file_decl_data *file_data;
5432 unsigned int j = 0;
5433
5434 while ((file_data = file_data_vec[j++]))
5435 {
5436 size_t len;
5437 const char *data = lto_get_section_data (file_data,
5438 LTO_section_ipcp_transform,
5439 NULL, &len);
5440 if (data)
5441 read_replacements_section (file_data, data, len);
5442 }
5443 }
5444
5445 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5446 NODE. */
5447
5448 static void
5449 adjust_agg_replacement_values (struct cgraph_node *node,
5450 struct ipa_agg_replacement_value *aggval)
5451 {
5452 struct ipa_agg_replacement_value *v;
5453 int i, c = 0, d = 0, *adj;
5454
5455 if (!node->clone.combined_args_to_skip)
5456 return;
5457
5458 for (v = aggval; v; v = v->next)
5459 {
5460 gcc_assert (v->index >= 0);
5461 if (c < v->index)
5462 c = v->index;
5463 }
5464 c++;
5465
5466 adj = XALLOCAVEC (int, c);
5467 for (i = 0; i < c; i++)
5468 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5469 {
5470 adj[i] = -1;
5471 d++;
5472 }
5473 else
5474 adj[i] = i - d;
5475
5476 for (v = aggval; v; v = v->next)
5477 v->index = adj[v->index];
5478 }
5479
5480 /* Dominator walker driving the ipcp modification phase. */
5481
5482 class ipcp_modif_dom_walker : public dom_walker
5483 {
5484 public:
5485 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5486 vec<ipa_param_descriptor> descs,
5487 struct ipa_agg_replacement_value *av,
5488 bool *sc, bool *cc)
5489 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5490 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5491
5492 virtual edge before_dom_children (basic_block);
5493
5494 private:
5495 struct ipa_func_body_info *m_fbi;
5496 vec<ipa_param_descriptor> m_descriptors;
5497 struct ipa_agg_replacement_value *m_aggval;
5498 bool *m_something_changed, *m_cfg_changed;
5499 };
5500
5501 edge
5502 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5503 {
5504 gimple_stmt_iterator gsi;
5505 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5506 {
5507 struct ipa_agg_replacement_value *v;
5508 gimple *stmt = gsi_stmt (gsi);
5509 tree rhs, val, t;
5510 HOST_WIDE_INT offset, size;
5511 int index;
5512 bool by_ref, vce;
5513
5514 if (!gimple_assign_load_p (stmt))
5515 continue;
5516 rhs = gimple_assign_rhs1 (stmt);
5517 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5518 continue;
5519
5520 vce = false;
5521 t = rhs;
5522 while (handled_component_p (t))
5523 {
5524 /* V_C_E can do things like convert an array of integers to one
5525 bigger integer and similar things we do not handle below. */
5526 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5527 {
5528 vce = true;
5529 break;
5530 }
5531 t = TREE_OPERAND (t, 0);
5532 }
5533 if (vce)
5534 continue;
5535
5536 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5537 &offset, &size, &by_ref))
5538 continue;
5539 for (v = m_aggval; v; v = v->next)
5540 if (v->index == index
5541 && v->offset == offset)
5542 break;
5543 if (!v
5544 || v->by_ref != by_ref
5545 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5546 continue;
5547
5548 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5549 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5550 {
5551 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5552 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5553 else if (TYPE_SIZE (TREE_TYPE (rhs))
5554 == TYPE_SIZE (TREE_TYPE (v->value)))
5555 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5556 else
5557 {
5558 if (dump_file)
5559 {
5560 fprintf (dump_file, " const ");
5561 print_generic_expr (dump_file, v->value, 0);
5562 fprintf (dump_file, " can't be converted to type of ");
5563 print_generic_expr (dump_file, rhs, 0);
5564 fprintf (dump_file, "\n");
5565 }
5566 continue;
5567 }
5568 }
5569 else
5570 val = v->value;
5571
5572 if (dump_file && (dump_flags & TDF_DETAILS))
5573 {
5574 fprintf (dump_file, "Modifying stmt:\n ");
5575 print_gimple_stmt (dump_file, stmt, 0, 0);
5576 }
5577 gimple_assign_set_rhs_from_tree (&gsi, val);
5578 update_stmt (stmt);
5579
5580 if (dump_file && (dump_flags & TDF_DETAILS))
5581 {
5582 fprintf (dump_file, "into:\n ");
5583 print_gimple_stmt (dump_file, stmt, 0, 0);
5584 fprintf (dump_file, "\n");
5585 }
5586
5587 *m_something_changed = true;
5588 if (maybe_clean_eh_stmt (stmt)
5589 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5590 *m_cfg_changed = true;
5591 }
5592 return NULL;
5593 }
5594
5595 /* Update bits info of formal parameters as described in
5596 ipcp_transformation_summary. */
5597
5598 static void
5599 ipcp_update_bits (struct cgraph_node *node)
5600 {
5601 tree parm = DECL_ARGUMENTS (node->decl);
5602 tree next_parm = parm;
5603 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5604
5605 if (!ts || vec_safe_length (ts->bits) == 0)
5606 return;
5607
5608 vec<ipa_bits, va_gc> &bits = *ts->bits;
5609 unsigned count = bits.length ();
5610
5611 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5612 {
5613 if (node->clone.combined_args_to_skip
5614 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5615 continue;
5616
5617 gcc_checking_assert (parm);
5618 next_parm = DECL_CHAIN (parm);
5619
5620 if (!bits[i].known
5621 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5622 || !is_gimple_reg (parm))
5623 continue;
5624
5625 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5626 if (!ddef)
5627 continue;
5628
5629 if (dump_file)
5630 {
5631 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5632 print_hex (bits[i].mask, dump_file);
5633 fprintf (dump_file, "\n");
5634 }
5635
5636 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5637 {
5638 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5639 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5640
5641 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5642 | wide_int::from (bits[i].value, prec, sgn);
5643 set_nonzero_bits (ddef, nonzero_bits);
5644 }
5645 else
5646 {
5647 unsigned tem = bits[i].mask.to_uhwi ();
5648 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5649 unsigned align = tem & -tem;
5650 unsigned misalign = bitpos & (align - 1);
5651
5652 if (align > 1)
5653 {
5654 if (dump_file)
5655 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5656
5657 unsigned old_align, old_misalign;
5658 struct ptr_info_def *pi = get_ptr_info (ddef);
5659 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5660
5661 if (old_known
5662 && old_align > align)
5663 {
5664 if (dump_file)
5665 {
5666 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5667 if ((old_misalign & (align - 1)) != misalign)
5668 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5669 old_misalign, misalign);
5670 }
5671 continue;
5672 }
5673
5674 if (old_known
5675 && ((misalign & (old_align - 1)) != old_misalign)
5676 && dump_file)
5677 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5678 old_misalign, misalign);
5679
5680 set_ptr_info_alignment (pi, align, misalign);
5681 }
5682 }
5683 }
5684 }
5685
5686 /* Update value range of formal parameters as described in
5687 ipcp_transformation_summary. */
5688
5689 static void
5690 ipcp_update_vr (struct cgraph_node *node)
5691 {
5692 tree fndecl = node->decl;
5693 tree parm = DECL_ARGUMENTS (fndecl);
5694 tree next_parm = parm;
5695 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5696 if (!ts || vec_safe_length (ts->m_vr) == 0)
5697 return;
5698 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5699 unsigned count = vr.length ();
5700
5701 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5702 {
5703 if (node->clone.combined_args_to_skip
5704 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5705 continue;
5706 gcc_checking_assert (parm);
5707 next_parm = DECL_CHAIN (parm);
5708 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5709
5710 if (!ddef || !is_gimple_reg (parm))
5711 continue;
5712
5713 if (vr[i].known
5714 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5715 {
5716 tree type = TREE_TYPE (ddef);
5717 unsigned prec = TYPE_PRECISION (type);
5718 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5719 {
5720 if (dump_file)
5721 {
5722 fprintf (dump_file, "Setting value range of param %u ", i);
5723 fprintf (dump_file, "%s[",
5724 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5725 print_decs (vr[i].min, dump_file);
5726 fprintf (dump_file, ", ");
5727 print_decs (vr[i].max, dump_file);
5728 fprintf (dump_file, "]\n");
5729 }
5730 set_range_info (ddef, vr[i].type,
5731 wide_int_storage::from (vr[i].min, prec,
5732 TYPE_SIGN (type)),
5733 wide_int_storage::from (vr[i].max, prec,
5734 TYPE_SIGN (type)));
5735 }
5736 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5737 && vr[i].type == VR_ANTI_RANGE
5738 && wi::eq_p (vr[i].min, 0)
5739 && wi::eq_p (vr[i].max, 0))
5740 {
5741 if (dump_file)
5742 fprintf (dump_file, "Setting nonnull for %u\n", i);
5743 set_ptr_nonnull (ddef);
5744 }
5745 }
5746 }
5747 }
5748
5749 /* IPCP transformation phase doing propagation of aggregate values. */
5750
5751 unsigned int
5752 ipcp_transform_function (struct cgraph_node *node)
5753 {
5754 vec<ipa_param_descriptor> descriptors = vNULL;
5755 struct ipa_func_body_info fbi;
5756 struct ipa_agg_replacement_value *aggval;
5757 int param_count;
5758 bool cfg_changed = false, something_changed = false;
5759
5760 gcc_checking_assert (cfun);
5761 gcc_checking_assert (current_function_decl);
5762
5763 if (dump_file)
5764 fprintf (dump_file, "Modification phase of node %s/%i\n",
5765 node->name (), node->order);
5766
5767 ipcp_update_bits (node);
5768 ipcp_update_vr (node);
5769 aggval = ipa_get_agg_replacements_for_node (node);
5770 if (!aggval)
5771 return 0;
5772 param_count = count_formal_params (node->decl);
5773 if (param_count == 0)
5774 return 0;
5775 adjust_agg_replacement_values (node, aggval);
5776 if (dump_file)
5777 ipa_dump_agg_replacement_values (dump_file, aggval);
5778
5779 fbi.node = node;
5780 fbi.info = NULL;
5781 fbi.bb_infos = vNULL;
5782 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5783 fbi.param_count = param_count;
5784 fbi.aa_walked = 0;
5785
5786 descriptors.safe_grow_cleared (param_count);
5787 ipa_populate_param_decls (node, descriptors);
5788 calculate_dominance_info (CDI_DOMINATORS);
5789 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5790 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5791
5792 int i;
5793 struct ipa_bb_info *bi;
5794 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5795 free_ipa_bb_info (bi);
5796 fbi.bb_infos.release ();
5797 free_dominance_info (CDI_DOMINATORS);
5798 (*ipcp_transformations)[node->uid].agg_values = NULL;
5799 (*ipcp_transformations)[node->uid].bits = NULL;
5800 (*ipcp_transformations)[node->uid].m_vr = NULL;
5801
5802 descriptors.release ();
5803
5804 if (!something_changed)
5805 return 0;
5806 else if (cfg_changed)
5807 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5808 else
5809 return TODO_update_ssa_only_virtuals;
5810 }