Fix memory leaks in IPA CP (PR ipa/79337).
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
103 tree ptree)
104 {
105 int i, count;
106
107 count = vec_safe_length (descriptors);
108 for (i = 0; i < count; i++)
109 if ((*descriptors)[i].decl_or_type == ptree)
110 return i;
111
112 return -1;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 int
119 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 {
121 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
122 }
123
124 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
125 NODE. */
126
127 static void
128 ipa_populate_param_decls (struct cgraph_node *node,
129 vec<ipa_param_descriptor, va_gc> &descriptors)
130 {
131 tree fndecl;
132 tree fnargs;
133 tree parm;
134 int param_num;
135
136 fndecl = node->decl;
137 gcc_assert (gimple_has_body_p (fndecl));
138 fnargs = DECL_ARGUMENTS (fndecl);
139 param_num = 0;
140 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 {
142 descriptors[param_num].decl_or_type = parm;
143 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
144 true);
145 param_num++;
146 }
147 }
148
149 /* Return how many formal parameters FNDECL has. */
150
151 int
152 count_formal_params (tree fndecl)
153 {
154 tree parm;
155 int count = 0;
156 gcc_assert (gimple_has_body_p (fndecl));
157
158 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
159 count++;
160
161 return count;
162 }
163
164 /* Return the declaration of Ith formal parameter of the function corresponding
165 to INFO. Note there is no setter function as this array is built just once
166 using ipa_initialize_node_params. */
167
168 void
169 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 {
171 fprintf (file, "param #%i", i);
172 if ((*info->descriptors)[i].decl_or_type)
173 {
174 fprintf (file, " ");
175 print_generic_expr (file, (*info->descriptors)[i].decl_or_type, 0);
176 }
177 }
178
179 /* Initialize the ipa_node_params structure associated with NODE
180 to hold PARAM_COUNT parameters. */
181
182 void
183 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 {
185 struct ipa_node_params *info = IPA_NODE_REF (node);
186
187 if (!info->descriptors && param_count)
188 vec_safe_grow_cleared (info->descriptors, param_count);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->descriptors)
201 {
202 ipa_alloc_node_params (node, count_formal_params (node->decl));
203 ipa_populate_param_decls (node, *info->descriptors);
204 }
205 }
206
207 /* Print the jump functions associated with call graph edge CS to file F. */
208
209 static void
210 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 {
212 int i, count;
213
214 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
215 for (i = 0; i < count; i++)
216 {
217 struct ipa_jump_func *jump_func;
218 enum jump_func_type type;
219
220 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
221 type = jump_func->type;
222
223 fprintf (f, " param %d: ", i);
224 if (type == IPA_JF_UNKNOWN)
225 fprintf (f, "UNKNOWN\n");
226 else if (type == IPA_JF_CONST)
227 {
228 tree val = jump_func->value.constant.value;
229 fprintf (f, "CONST: ");
230 print_generic_expr (f, val, 0);
231 if (TREE_CODE (val) == ADDR_EXPR
232 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 {
234 fprintf (f, " -> ");
235 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
236 0);
237 }
238 fprintf (f, "\n");
239 }
240 else if (type == IPA_JF_PASS_THROUGH)
241 {
242 fprintf (f, "PASS THROUGH: ");
243 fprintf (f, "%d, op %s",
244 jump_func->value.pass_through.formal_id,
245 get_tree_code_name(jump_func->value.pass_through.operation));
246 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 {
248 fprintf (f, " ");
249 print_generic_expr (f,
250 jump_func->value.pass_through.operand, 0);
251 }
252 if (jump_func->value.pass_through.agg_preserved)
253 fprintf (f, ", agg_preserved");
254 fprintf (f, "\n");
255 }
256 else if (type == IPA_JF_ANCESTOR)
257 {
258 fprintf (f, "ANCESTOR: ");
259 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
260 jump_func->value.ancestor.formal_id,
261 jump_func->value.ancestor.offset);
262 if (jump_func->value.ancestor.agg_preserved)
263 fprintf (f, ", agg_preserved");
264 fprintf (f, "\n");
265 }
266
267 if (jump_func->agg.items)
268 {
269 struct ipa_agg_jf_item *item;
270 int j;
271
272 fprintf (f, " Aggregate passed by %s:\n",
273 jump_func->agg.by_ref ? "reference" : "value");
274 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 {
276 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
277 item->offset);
278 if (TYPE_P (item->value))
279 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
280 tree_to_uhwi (TYPE_SIZE (item->value)));
281 else
282 {
283 fprintf (f, "cst: ");
284 print_generic_expr (f, item->value, 0);
285 }
286 fprintf (f, "\n");
287 }
288 }
289
290 struct ipa_polymorphic_call_context *ctx
291 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
292 if (ctx && !ctx->useless_p ())
293 {
294 fprintf (f, " Context: ");
295 ctx->dump (dump_file);
296 }
297
298 if (jump_func->bits.known)
299 {
300 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
301 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
302 fprintf (f, "\n");
303 }
304 else
305 fprintf (f, " Unknown bits\n");
306
307 if (jump_func->vr_known)
308 {
309 fprintf (f, " VR ");
310 fprintf (f, "%s[",
311 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
312 print_decs (jump_func->m_vr.min, f);
313 fprintf (f, ", ");
314 print_decs (jump_func->m_vr.max, f);
315 fprintf (f, "]\n");
316 }
317 else
318 fprintf (f, " Unknown VR\n");
319 }
320 }
321
322
323 /* Print the jump functions of all arguments on all call graph edges going from
324 NODE to file F. */
325
326 void
327 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 {
329 struct cgraph_edge *cs;
330
331 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
332 node->order);
333 for (cs = node->callees; cs; cs = cs->next_callee)
334 {
335 if (!ipa_edge_args_info_available_for_edge_p (cs))
336 continue;
337
338 fprintf (f, " callsite %s/%i -> %s/%i : \n",
339 xstrdup_for_dump (node->name ()), node->order,
340 xstrdup_for_dump (cs->callee->name ()),
341 cs->callee->order);
342 ipa_print_node_jump_functions_for_edge (f, cs);
343 }
344
345 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 {
347 struct cgraph_indirect_call_info *ii;
348 if (!ipa_edge_args_info_available_for_edge_p (cs))
349 continue;
350
351 ii = cs->indirect_info;
352 if (ii->agg_contents)
353 fprintf (f, " indirect %s callsite, calling param %i, "
354 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
355 ii->member_ptr ? "member ptr" : "aggregate",
356 ii->param_index, ii->offset,
357 ii->by_ref ? "by reference" : "by_value");
358 else
359 fprintf (f, " indirect %s callsite, calling param %i, "
360 "offset " HOST_WIDE_INT_PRINT_DEC,
361 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
362 ii->offset);
363
364 if (cs->call_stmt)
365 {
366 fprintf (f, ", for stmt ");
367 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 }
369 else
370 fprintf (f, "\n");
371 if (ii->polymorphic)
372 ii->context.dump (f);
373 ipa_print_node_jump_functions_for_edge (f, cs);
374 }
375 }
376
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378
379 void
380 ipa_print_all_jump_functions (FILE *f)
381 {
382 struct cgraph_node *node;
383
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
386 {
387 ipa_print_node_jump_functions (f, node);
388 }
389 }
390
391 /* Set jfunc to be a know-really nothing jump function. */
392
393 static void
394 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 {
396 jfunc->type = IPA_JF_UNKNOWN;
397 jfunc->bits.known = false;
398 jfunc->vr_known = false;
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 jfunc->type = IPA_JF_CONST;
421 jfunc->value.constant.value = unshare_expr_without_location (constant);
422
423 if (TREE_CODE (constant) == ADDR_EXPR
424 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
425 {
426 struct ipa_cst_ref_desc *rdesc;
427
428 rdesc = ipa_refdesc_pool.allocate ();
429 rdesc->cs = cs;
430 rdesc->next_duplicate = NULL;
431 rdesc->refcount = 1;
432 jfunc->value.constant.rdesc = rdesc;
433 }
434 else
435 jfunc->value.constant.rdesc = NULL;
436 }
437
438 /* Set JFUNC to be a simple pass-through jump function. */
439 static void
440 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
441 bool agg_preserved)
442 {
443 jfunc->type = IPA_JF_PASS_THROUGH;
444 jfunc->value.pass_through.operand = NULL_TREE;
445 jfunc->value.pass_through.formal_id = formal_id;
446 jfunc->value.pass_through.operation = NOP_EXPR;
447 jfunc->value.pass_through.agg_preserved = agg_preserved;
448 }
449
450 /* Set JFUNC to be an unary pass through jump function. */
451
452 static void
453 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
454 enum tree_code operation)
455 {
456 jfunc->type = IPA_JF_PASS_THROUGH;
457 jfunc->value.pass_through.operand = NULL_TREE;
458 jfunc->value.pass_through.formal_id = formal_id;
459 jfunc->value.pass_through.operation = operation;
460 jfunc->value.pass_through.agg_preserved = false;
461 }
462 /* Set JFUNC to be an arithmetic pass through jump function. */
463
464 static void
465 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
466 tree operand, enum tree_code operation)
467 {
468 jfunc->type = IPA_JF_PASS_THROUGH;
469 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
470 jfunc->value.pass_through.formal_id = formal_id;
471 jfunc->value.pass_through.operation = operation;
472 jfunc->value.pass_through.agg_preserved = false;
473 }
474
475 /* Set JFUNC to be an ancestor jump function. */
476
477 static void
478 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
479 int formal_id, bool agg_preserved)
480 {
481 jfunc->type = IPA_JF_ANCESTOR;
482 jfunc->value.ancestor.formal_id = formal_id;
483 jfunc->value.ancestor.offset = offset;
484 jfunc->value.ancestor.agg_preserved = agg_preserved;
485 }
486
487 /* Get IPA BB information about the given BB. FBI is the context of analyzis
488 of this function body. */
489
490 static struct ipa_bb_info *
491 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
492 {
493 gcc_checking_assert (fbi);
494 return &fbi->bb_infos[bb->index];
495 }
496
497 /* Structure to be passed in between detect_type_change and
498 check_stmt_for_type_change. */
499
500 struct prop_type_change_info
501 {
502 /* Offset into the object where there is the virtual method pointer we are
503 looking for. */
504 HOST_WIDE_INT offset;
505 /* The declaration or SSA_NAME pointer of the base that we are checking for
506 type change. */
507 tree object;
508 /* Set to true if dynamic type change has been detected. */
509 bool type_maybe_changed;
510 };
511
512 /* Return true if STMT can modify a virtual method table pointer.
513
514 This function makes special assumptions about both constructors and
515 destructors which are all the functions that are allowed to alter the VMT
516 pointers. It assumes that destructors begin with assignment into all VMT
517 pointers and that constructors essentially look in the following way:
518
519 1) The very first thing they do is that they call constructors of ancestor
520 sub-objects that have them.
521
522 2) Then VMT pointers of this and all its ancestors is set to new values
523 corresponding to the type corresponding to the constructor.
524
525 3) Only afterwards, other stuff such as constructor of member sub-objects
526 and the code written by the user is run. Only this may include calling
527 virtual functions, directly or indirectly.
528
529 There is no way to call a constructor of an ancestor sub-object in any
530 other way.
531
532 This means that we do not have to care whether constructors get the correct
533 type information because they will always change it (in fact, if we define
534 the type to be given by the VMT pointer, it is undefined).
535
536 The most important fact to derive from the above is that if, for some
537 statement in the section 3, we try to detect whether the dynamic type has
538 changed, we can safely ignore all calls as we examine the function body
539 backwards until we reach statements in section 2 because these calls cannot
540 be ancestor constructors or destructors (if the input is not bogus) and so
541 do not change the dynamic type (this holds true only for automatically
542 allocated objects but at the moment we devirtualize only these). We then
543 must detect that statements in section 2 change the dynamic type and can try
544 to derive the new type. That is enough and we can stop, we will never see
545 the calls into constructors of sub-objects in this code. Therefore we can
546 safely ignore all call statements that we traverse.
547 */
548
549 static bool
550 stmt_may_be_vtbl_ptr_store (gimple *stmt)
551 {
552 if (is_gimple_call (stmt))
553 return false;
554 if (gimple_clobber_p (stmt))
555 return false;
556 else if (is_gimple_assign (stmt))
557 {
558 tree lhs = gimple_assign_lhs (stmt);
559
560 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
561 {
562 if (flag_strict_aliasing
563 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
564 return false;
565
566 if (TREE_CODE (lhs) == COMPONENT_REF
567 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
568 return false;
569 /* In the future we might want to use get_base_ref_and_offset to find
570 if there is a field corresponding to the offset and if so, proceed
571 almost like if it was a component ref. */
572 }
573 }
574 return true;
575 }
576
577 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
578 to check whether a particular statement may modify the virtual table
579 pointerIt stores its result into DATA, which points to a
580 prop_type_change_info structure. */
581
582 static bool
583 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
584 {
585 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
586 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
587
588 if (stmt_may_be_vtbl_ptr_store (stmt))
589 {
590 tci->type_maybe_changed = true;
591 return true;
592 }
593 else
594 return false;
595 }
596
597 /* See if ARG is PARAM_DECl describing instance passed by pointer
598 or reference in FUNCTION. Return false if the dynamic type may change
599 in between beggining of the function until CALL is invoked.
600
601 Generally functions are not allowed to change type of such instances,
602 but they call destructors. We assume that methods can not destroy the THIS
603 pointer. Also as a special cases, constructor and destructors may change
604 type of the THIS pointer. */
605
606 static bool
607 param_type_may_change_p (tree function, tree arg, gimple *call)
608 {
609 /* Pure functions can not do any changes on the dynamic type;
610 that require writting to memory. */
611 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
612 return false;
613 /* We need to check if we are within inlined consturctor
614 or destructor (ideally we would have way to check that the
615 inline cdtor is actually working on ARG, but we don't have
616 easy tie on this, so punt on all non-pure cdtors.
617 We may also record the types of cdtors and once we know type
618 of the instance match them.
619
620 Also code unification optimizations may merge calls from
621 different blocks making return values unreliable. So
622 do nothing during late optimization. */
623 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
624 return true;
625 if (TREE_CODE (arg) == SSA_NAME
626 && SSA_NAME_IS_DEFAULT_DEF (arg)
627 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
628 {
629 /* Normal (non-THIS) argument. */
630 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
631 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
632 /* THIS pointer of an method - here we want to watch constructors
633 and destructors as those definitely may change the dynamic
634 type. */
635 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
636 && !DECL_CXX_CONSTRUCTOR_P (function)
637 && !DECL_CXX_DESTRUCTOR_P (function)
638 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
639 {
640 /* Walk the inline stack and watch out for ctors/dtors. */
641 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
642 block = BLOCK_SUPERCONTEXT (block))
643 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
644 return true;
645 return false;
646 }
647 }
648 return true;
649 }
650
651 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
652 callsite CALL) by looking for assignments to its virtual table pointer. If
653 it is, return true and fill in the jump function JFUNC with relevant type
654 information or set it to unknown. ARG is the object itself (not a pointer
655 to it, unless dereferenced). BASE is the base of the memory access as
656 returned by get_ref_base_and_extent, as is the offset.
657
658 This is helper function for detect_type_change and detect_type_change_ssa
659 that does the heavy work which is usually unnecesary. */
660
661 static bool
662 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
663 gcall *call, struct ipa_jump_func *jfunc,
664 HOST_WIDE_INT offset)
665 {
666 struct prop_type_change_info tci;
667 ao_ref ao;
668 bool entry_reached = false;
669
670 gcc_checking_assert (DECL_P (arg)
671 || TREE_CODE (arg) == MEM_REF
672 || handled_component_p (arg));
673
674 comp_type = TYPE_MAIN_VARIANT (comp_type);
675
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
683 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
684 return true;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.type_maybe_changed = false;
695
696 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
697 &tci, NULL, &entry_reached);
698 if (!tci.type_maybe_changed)
699 return false;
700
701 ipa_set_jf_unknown (jfunc);
702 return true;
703 }
704
705 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
706 If it is, return true and fill in the jump function JFUNC with relevant type
707 information or set it to unknown. ARG is the object itself (not a pointer
708 to it, unless dereferenced). BASE is the base of the memory access as
709 returned by get_ref_base_and_extent, as is the offset. */
710
711 static bool
712 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
713 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
714 {
715 if (!flag_devirtualize)
716 return false;
717
718 if (TREE_CODE (base) == MEM_REF
719 && !param_type_may_change_p (current_function_decl,
720 TREE_OPERAND (base, 0),
721 call))
722 return false;
723 return detect_type_change_from_memory_writes (arg, base, comp_type,
724 call, jfunc, offset);
725 }
726
727 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
728 SSA name (its dereference will become the base and the offset is assumed to
729 be zero). */
730
731 static bool
732 detect_type_change_ssa (tree arg, tree comp_type,
733 gcall *call, struct ipa_jump_func *jfunc)
734 {
735 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
736 if (!flag_devirtualize
737 || !POINTER_TYPE_P (TREE_TYPE (arg)))
738 return false;
739
740 if (!param_type_may_change_p (current_function_decl, arg, call))
741 return false;
742
743 arg = build2 (MEM_REF, ptr_type_node, arg,
744 build_int_cst (ptr_type_node, 0));
745
746 return detect_type_change_from_memory_writes (arg, arg, comp_type,
747 call, jfunc, 0);
748 }
749
750 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
751 boolean variable pointed to by DATA. */
752
753 static bool
754 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
755 void *data)
756 {
757 bool *b = (bool *) data;
758 *b = true;
759 return true;
760 }
761
762 /* Return true if we have already walked so many statements in AA that we
763 should really just start giving up. */
764
765 static bool
766 aa_overwalked (struct ipa_func_body_info *fbi)
767 {
768 gcc_checking_assert (fbi);
769 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
770 }
771
772 /* Find the nearest valid aa status for parameter specified by INDEX that
773 dominates BB. */
774
775 static struct ipa_param_aa_status *
776 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
777 int index)
778 {
779 while (true)
780 {
781 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
782 if (!bb)
783 return NULL;
784 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
785 if (!bi->param_aa_statuses.is_empty ()
786 && bi->param_aa_statuses[index].valid)
787 return &bi->param_aa_statuses[index];
788 }
789 }
790
791 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
792 structures and/or intialize the result with a dominating description as
793 necessary. */
794
795 static struct ipa_param_aa_status *
796 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
797 int index)
798 {
799 gcc_checking_assert (fbi);
800 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
801 if (bi->param_aa_statuses.is_empty ())
802 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
803 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
804 if (!paa->valid)
805 {
806 gcc_checking_assert (!paa->parm_modified
807 && !paa->ref_modified
808 && !paa->pt_modified);
809 struct ipa_param_aa_status *dom_paa;
810 dom_paa = find_dominating_aa_status (fbi, bb, index);
811 if (dom_paa)
812 *paa = *dom_paa;
813 else
814 paa->valid = true;
815 }
816
817 return paa;
818 }
819
820 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
821 a value known not to be modified in this function before reaching the
822 statement STMT. FBI holds information about the function we have so far
823 gathered but do not survive the summary building stage. */
824
825 static bool
826 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
827 gimple *stmt, tree parm_load)
828 {
829 struct ipa_param_aa_status *paa;
830 bool modified = false;
831 ao_ref refd;
832
833 tree base = get_base_address (parm_load);
834 gcc_assert (TREE_CODE (base) == PARM_DECL);
835 if (TREE_READONLY (base))
836 return true;
837
838 /* FIXME: FBI can be NULL if we are being called from outside
839 ipa_node_analysis or ipcp_transform_function, which currently happens
840 during inlining analysis. It would be great to extend fbi's lifetime and
841 always have it. Currently, we are just not afraid of too much walking in
842 that case. */
843 if (fbi)
844 {
845 if (aa_overwalked (fbi))
846 return false;
847 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
848 if (paa->parm_modified)
849 return false;
850 }
851 else
852 paa = NULL;
853
854 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
855 ao_ref_init (&refd, parm_load);
856 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
857 &modified, NULL);
858 if (fbi)
859 fbi->aa_walked += walked;
860 if (paa && modified)
861 paa->parm_modified = true;
862 return !modified;
863 }
864
865 /* If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params which has not been
867 modified. Otherwise return -1. */
868
869 static int
870 load_from_unmodified_param (struct ipa_func_body_info *fbi,
871 vec<ipa_param_descriptor, va_gc> *descriptors,
872 gimple *stmt)
873 {
874 int index;
875 tree op1;
876
877 if (!gimple_assign_single_p (stmt))
878 return -1;
879
880 op1 = gimple_assign_rhs1 (stmt);
881 if (TREE_CODE (op1) != PARM_DECL)
882 return -1;
883
884 index = ipa_get_param_decl_index_1 (descriptors, op1);
885 if (index < 0
886 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
887 return -1;
888
889 return index;
890 }
891
892 /* Return true if memory reference REF (which must be a load through parameter
893 with INDEX) loads data that are known to be unmodified in this function
894 before reaching statement STMT. */
895
896 static bool
897 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
898 int index, gimple *stmt, tree ref)
899 {
900 struct ipa_param_aa_status *paa;
901 bool modified = false;
902 ao_ref refd;
903
904 /* FIXME: FBI can be NULL if we are being called from outside
905 ipa_node_analysis or ipcp_transform_function, which currently happens
906 during inlining analysis. It would be great to extend fbi's lifetime and
907 always have it. Currently, we are just not afraid of too much walking in
908 that case. */
909 if (fbi)
910 {
911 if (aa_overwalked (fbi))
912 return false;
913 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
914 if (paa->ref_modified)
915 return false;
916 }
917 else
918 paa = NULL;
919
920 gcc_checking_assert (gimple_vuse (stmt));
921 ao_ref_init (&refd, ref);
922 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
923 &modified, NULL);
924 if (fbi)
925 fbi->aa_walked += walked;
926 if (paa && modified)
927 paa->ref_modified = true;
928 return !modified;
929 }
930
931 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
932 is known to be unmodified in this function before reaching call statement
933 CALL into which it is passed. FBI describes the function body. */
934
935 static bool
936 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
937 gimple *call, tree parm)
938 {
939 bool modified = false;
940 ao_ref refd;
941
942 /* It's unnecessary to calculate anything about memory contnets for a const
943 function because it is not goin to use it. But do not cache the result
944 either. Also, no such calculations for non-pointers. */
945 if (!gimple_vuse (call)
946 || !POINTER_TYPE_P (TREE_TYPE (parm))
947 || aa_overwalked (fbi))
948 return false;
949
950 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
951 gimple_bb (call),
952 index);
953 if (paa->pt_modified)
954 return false;
955
956 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
957 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
958 &modified, NULL);
959 fbi->aa_walked += walked;
960 if (modified)
961 paa->pt_modified = true;
962 return !modified;
963 }
964
965 /* Return true if we can prove that OP is a memory reference loading
966 data from an aggregate passed as a parameter.
967
968 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
969 false if it cannot prove that the value has not been modified before the
970 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
971 if it cannot prove the value has not been modified, in that case it will
972 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
973
974 INFO and PARMS_AINFO describe parameters of the current function (but the
975 latter can be NULL), STMT is the load statement. If function returns true,
976 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
977 within the aggregate and whether it is a load from a value passed by
978 reference respectively. */
979
980 bool
981 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
982 vec<ipa_param_descriptor, va_gc> *descriptors,
983 gimple *stmt, tree op, int *index_p,
984 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
985 bool *by_ref_p, bool *guaranteed_unmodified)
986 {
987 int index;
988 HOST_WIDE_INT size, max_size;
989 bool reverse;
990 tree base
991 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
992
993 if (max_size == -1 || max_size != size || *offset_p < 0)
994 return false;
995
996 if (DECL_P (base))
997 {
998 int index = ipa_get_param_decl_index_1 (descriptors, base);
999 if (index >= 0
1000 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1001 {
1002 *index_p = index;
1003 *by_ref_p = false;
1004 if (size_p)
1005 *size_p = size;
1006 if (guaranteed_unmodified)
1007 *guaranteed_unmodified = true;
1008 return true;
1009 }
1010 return false;
1011 }
1012
1013 if (TREE_CODE (base) != MEM_REF
1014 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1015 || !integer_zerop (TREE_OPERAND (base, 1)))
1016 return false;
1017
1018 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1019 {
1020 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1021 index = ipa_get_param_decl_index_1 (descriptors, parm);
1022 }
1023 else
1024 {
1025 /* This branch catches situations where a pointer parameter is not a
1026 gimple register, for example:
1027
1028 void hip7(S*) (struct S * p)
1029 {
1030 void (*<T2e4>) (struct S *) D.1867;
1031 struct S * p.1;
1032
1033 <bb 2>:
1034 p.1_1 = p;
1035 D.1867_2 = p.1_1->f;
1036 D.1867_2 ();
1037 gdp = &p;
1038 */
1039
1040 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1041 index = load_from_unmodified_param (fbi, descriptors, def);
1042 }
1043
1044 if (index >= 0)
1045 {
1046 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1047 if (!data_preserved && !guaranteed_unmodified)
1048 return false;
1049
1050 *index_p = index;
1051 *by_ref_p = true;
1052 if (size_p)
1053 *size_p = size;
1054 if (guaranteed_unmodified)
1055 *guaranteed_unmodified = data_preserved;
1056 return true;
1057 }
1058 return false;
1059 }
1060
1061 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1062 of an assignment statement STMT, try to determine whether we are actually
1063 handling any of the following cases and construct an appropriate jump
1064 function into JFUNC if so:
1065
1066 1) The passed value is loaded from a formal parameter which is not a gimple
1067 register (most probably because it is addressable, the value has to be
1068 scalar) and we can guarantee the value has not changed. This case can
1069 therefore be described by a simple pass-through jump function. For example:
1070
1071 foo (int a)
1072 {
1073 int a.0;
1074
1075 a.0_2 = a;
1076 bar (a.0_2);
1077
1078 2) The passed value can be described by a simple arithmetic pass-through
1079 jump function. E.g.
1080
1081 foo (int a)
1082 {
1083 int D.2064;
1084
1085 D.2064_4 = a.1(D) + 4;
1086 bar (D.2064_4);
1087
1088 This case can also occur in combination of the previous one, e.g.:
1089
1090 foo (int a, int z)
1091 {
1092 int a.0;
1093 int D.2064;
1094
1095 a.0_3 = a;
1096 D.2064_4 = a.0_3 + 4;
1097 foo (D.2064_4);
1098
1099 3) The passed value is an address of an object within another one (which
1100 also passed by reference). Such situations are described by an ancestor
1101 jump function and describe situations such as:
1102
1103 B::foo() (struct B * const this)
1104 {
1105 struct A * D.1845;
1106
1107 D.1845_2 = &this_1(D)->D.1748;
1108 A::bar (D.1845_2);
1109
1110 INFO is the structure describing individual parameters access different
1111 stages of IPA optimizations. PARMS_AINFO contains the information that is
1112 only needed for intraprocedural analysis. */
1113
1114 static void
1115 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1116 struct ipa_node_params *info,
1117 struct ipa_jump_func *jfunc,
1118 gcall *call, gimple *stmt, tree name,
1119 tree param_type)
1120 {
1121 HOST_WIDE_INT offset, size, max_size;
1122 tree op1, tc_ssa, base, ssa;
1123 bool reverse;
1124 int index;
1125
1126 op1 = gimple_assign_rhs1 (stmt);
1127
1128 if (TREE_CODE (op1) == SSA_NAME)
1129 {
1130 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1131 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1132 else
1133 index = load_from_unmodified_param (fbi, info->descriptors,
1134 SSA_NAME_DEF_STMT (op1));
1135 tc_ssa = op1;
1136 }
1137 else
1138 {
1139 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1140 tc_ssa = gimple_assign_lhs (stmt);
1141 }
1142
1143 if (index >= 0)
1144 {
1145 switch (gimple_assign_rhs_class (stmt))
1146 {
1147 case GIMPLE_BINARY_RHS:
1148 {
1149 tree op2 = gimple_assign_rhs2 (stmt);
1150 if (!is_gimple_ip_invariant (op2)
1151 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1152 != tcc_comparison)
1153 && !useless_type_conversion_p (TREE_TYPE (name),
1154 TREE_TYPE (op1))))
1155 return;
1156
1157 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1158 gimple_assign_rhs_code (stmt));
1159 break;
1160 }
1161 case GIMPLE_SINGLE_RHS:
1162 {
1163 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1164 tc_ssa);
1165 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1166 break;
1167 }
1168 case GIMPLE_UNARY_RHS:
1169 if (is_gimple_assign (stmt)
1170 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1171 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1172 ipa_set_jf_unary_pass_through (jfunc, index,
1173 gimple_assign_rhs_code (stmt));
1174 default:;
1175 }
1176 return;
1177 }
1178
1179 if (TREE_CODE (op1) != ADDR_EXPR)
1180 return;
1181 op1 = TREE_OPERAND (op1, 0);
1182 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1183 return;
1184 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1185 if (TREE_CODE (base) != MEM_REF
1186 /* If this is a varying address, punt. */
1187 || max_size == -1
1188 || max_size != size)
1189 return;
1190 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1191 ssa = TREE_OPERAND (base, 0);
1192 if (TREE_CODE (ssa) != SSA_NAME
1193 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1194 || offset < 0)
1195 return;
1196
1197 /* Dynamic types are changed in constructors and destructors. */
1198 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1199 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1200 ipa_set_ancestor_jf (jfunc, offset, index,
1201 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1202 }
1203
1204 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1205 it looks like:
1206
1207 iftmp.1_3 = &obj_2(D)->D.1762;
1208
1209 The base of the MEM_REF must be a default definition SSA NAME of a
1210 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1211 whole MEM_REF expression is returned and the offset calculated from any
1212 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1213 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1214
1215 static tree
1216 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1217 {
1218 HOST_WIDE_INT size, max_size;
1219 tree expr, parm, obj;
1220 bool reverse;
1221
1222 if (!gimple_assign_single_p (assign))
1223 return NULL_TREE;
1224 expr = gimple_assign_rhs1 (assign);
1225
1226 if (TREE_CODE (expr) != ADDR_EXPR)
1227 return NULL_TREE;
1228 expr = TREE_OPERAND (expr, 0);
1229 obj = expr;
1230 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1231
1232 if (TREE_CODE (expr) != MEM_REF
1233 /* If this is a varying address, punt. */
1234 || max_size == -1
1235 || max_size != size
1236 || *offset < 0)
1237 return NULL_TREE;
1238 parm = TREE_OPERAND (expr, 0);
1239 if (TREE_CODE (parm) != SSA_NAME
1240 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1241 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1242 return NULL_TREE;
1243
1244 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1245 *obj_p = obj;
1246 return expr;
1247 }
1248
1249
1250 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1251 statement PHI, try to find out whether NAME is in fact a
1252 multiple-inheritance typecast from a descendant into an ancestor of a formal
1253 parameter and thus can be described by an ancestor jump function and if so,
1254 write the appropriate function into JFUNC.
1255
1256 Essentially we want to match the following pattern:
1257
1258 if (obj_2(D) != 0B)
1259 goto <bb 3>;
1260 else
1261 goto <bb 4>;
1262
1263 <bb 3>:
1264 iftmp.1_3 = &obj_2(D)->D.1762;
1265
1266 <bb 4>:
1267 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1268 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1269 return D.1879_6; */
1270
1271 static void
1272 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1273 struct ipa_node_params *info,
1274 struct ipa_jump_func *jfunc,
1275 gcall *call, gphi *phi)
1276 {
1277 HOST_WIDE_INT offset;
1278 gimple *assign, *cond;
1279 basic_block phi_bb, assign_bb, cond_bb;
1280 tree tmp, parm, expr, obj;
1281 int index, i;
1282
1283 if (gimple_phi_num_args (phi) != 2)
1284 return;
1285
1286 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1287 tmp = PHI_ARG_DEF (phi, 0);
1288 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1289 tmp = PHI_ARG_DEF (phi, 1);
1290 else
1291 return;
1292 if (TREE_CODE (tmp) != SSA_NAME
1293 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1294 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1295 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1296 return;
1297
1298 assign = SSA_NAME_DEF_STMT (tmp);
1299 assign_bb = gimple_bb (assign);
1300 if (!single_pred_p (assign_bb))
1301 return;
1302 expr = get_ancestor_addr_info (assign, &obj, &offset);
1303 if (!expr)
1304 return;
1305 parm = TREE_OPERAND (expr, 0);
1306 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1307 if (index < 0)
1308 return;
1309
1310 cond_bb = single_pred (assign_bb);
1311 cond = last_stmt (cond_bb);
1312 if (!cond
1313 || gimple_code (cond) != GIMPLE_COND
1314 || gimple_cond_code (cond) != NE_EXPR
1315 || gimple_cond_lhs (cond) != parm
1316 || !integer_zerop (gimple_cond_rhs (cond)))
1317 return;
1318
1319 phi_bb = gimple_bb (phi);
1320 for (i = 0; i < 2; i++)
1321 {
1322 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1323 if (pred != assign_bb && pred != cond_bb)
1324 return;
1325 }
1326
1327 ipa_set_ancestor_jf (jfunc, offset, index,
1328 parm_ref_data_pass_through_p (fbi, index, call, parm));
1329 }
1330
1331 /* Inspect the given TYPE and return true iff it has the same structure (the
1332 same number of fields of the same types) as a C++ member pointer. If
1333 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1334 corresponding fields there. */
1335
1336 static bool
1337 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1338 {
1339 tree fld;
1340
1341 if (TREE_CODE (type) != RECORD_TYPE)
1342 return false;
1343
1344 fld = TYPE_FIELDS (type);
1345 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1346 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1347 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1348 return false;
1349
1350 if (method_ptr)
1351 *method_ptr = fld;
1352
1353 fld = DECL_CHAIN (fld);
1354 if (!fld || INTEGRAL_TYPE_P (fld)
1355 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1356 return false;
1357 if (delta)
1358 *delta = fld;
1359
1360 if (DECL_CHAIN (fld))
1361 return false;
1362
1363 return true;
1364 }
1365
1366 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1367 return the rhs of its defining statement. Otherwise return RHS as it
1368 is. */
1369
1370 static inline tree
1371 get_ssa_def_if_simple_copy (tree rhs)
1372 {
1373 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1374 {
1375 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1376
1377 if (gimple_assign_single_p (def_stmt))
1378 rhs = gimple_assign_rhs1 (def_stmt);
1379 else
1380 break;
1381 }
1382 return rhs;
1383 }
1384
1385 /* Simple linked list, describing known contents of an aggregate beforere
1386 call. */
1387
1388 struct ipa_known_agg_contents_list
1389 {
1390 /* Offset and size of the described part of the aggregate. */
1391 HOST_WIDE_INT offset, size;
1392 /* Known constant value or NULL if the contents is known to be unknown. */
1393 tree constant;
1394 /* Pointer to the next structure in the list. */
1395 struct ipa_known_agg_contents_list *next;
1396 };
1397
1398 /* Find the proper place in linked list of ipa_known_agg_contents_list
1399 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1400 unless there is a partial overlap, in which case return NULL, or such
1401 element is already there, in which case set *ALREADY_THERE to true. */
1402
1403 static struct ipa_known_agg_contents_list **
1404 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1405 HOST_WIDE_INT lhs_offset,
1406 HOST_WIDE_INT lhs_size,
1407 bool *already_there)
1408 {
1409 struct ipa_known_agg_contents_list **p = list;
1410 while (*p && (*p)->offset < lhs_offset)
1411 {
1412 if ((*p)->offset + (*p)->size > lhs_offset)
1413 return NULL;
1414 p = &(*p)->next;
1415 }
1416
1417 if (*p && (*p)->offset < lhs_offset + lhs_size)
1418 {
1419 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1420 /* We already know this value is subsequently overwritten with
1421 something else. */
1422 *already_there = true;
1423 else
1424 /* Otherwise this is a partial overlap which we cannot
1425 represent. */
1426 return NULL;
1427 }
1428 return p;
1429 }
1430
1431 /* Build aggregate jump function from LIST, assuming there are exactly
1432 CONST_COUNT constant entries there and that th offset of the passed argument
1433 is ARG_OFFSET and store it into JFUNC. */
1434
1435 static void
1436 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1437 int const_count, HOST_WIDE_INT arg_offset,
1438 struct ipa_jump_func *jfunc)
1439 {
1440 vec_alloc (jfunc->agg.items, const_count);
1441 while (list)
1442 {
1443 if (list->constant)
1444 {
1445 struct ipa_agg_jf_item item;
1446 item.offset = list->offset - arg_offset;
1447 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1448 item.value = unshare_expr_without_location (list->constant);
1449 jfunc->agg.items->quick_push (item);
1450 }
1451 list = list->next;
1452 }
1453 }
1454
1455 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1456 in ARG is filled in with constant values. ARG can either be an aggregate
1457 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1458 aggregate. JFUNC is the jump function into which the constants are
1459 subsequently stored. */
1460
1461 static void
1462 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1463 tree arg_type,
1464 struct ipa_jump_func *jfunc)
1465 {
1466 struct ipa_known_agg_contents_list *list = NULL;
1467 int item_count = 0, const_count = 0;
1468 HOST_WIDE_INT arg_offset, arg_size;
1469 gimple_stmt_iterator gsi;
1470 tree arg_base;
1471 bool check_ref, by_ref;
1472 ao_ref r;
1473
1474 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1475 return;
1476
1477 /* The function operates in three stages. First, we prepare check_ref, r,
1478 arg_base and arg_offset based on what is actually passed as an actual
1479 argument. */
1480
1481 if (POINTER_TYPE_P (arg_type))
1482 {
1483 by_ref = true;
1484 if (TREE_CODE (arg) == SSA_NAME)
1485 {
1486 tree type_size;
1487 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1488 return;
1489 check_ref = true;
1490 arg_base = arg;
1491 arg_offset = 0;
1492 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1493 arg_size = tree_to_uhwi (type_size);
1494 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1495 }
1496 else if (TREE_CODE (arg) == ADDR_EXPR)
1497 {
1498 HOST_WIDE_INT arg_max_size;
1499 bool reverse;
1500
1501 arg = TREE_OPERAND (arg, 0);
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1508 if (DECL_P (arg_base))
1509 {
1510 check_ref = false;
1511 ao_ref_init (&r, arg_base);
1512 }
1513 else
1514 return;
1515 }
1516 else
1517 return;
1518 }
1519 else
1520 {
1521 HOST_WIDE_INT arg_max_size;
1522 bool reverse;
1523
1524 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1525
1526 by_ref = false;
1527 check_ref = false;
1528 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1529 &arg_max_size, &reverse);
1530 if (arg_max_size == -1
1531 || arg_max_size != arg_size
1532 || arg_offset < 0)
1533 return;
1534
1535 ao_ref_init (&r, arg);
1536 }
1537
1538 /* Second stage walks back the BB, looks at individual statements and as long
1539 as it is confident of how the statements affect contents of the
1540 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1541 describing it. */
1542 gsi = gsi_for_stmt (call);
1543 gsi_prev (&gsi);
1544 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1545 {
1546 struct ipa_known_agg_contents_list *n, **p;
1547 gimple *stmt = gsi_stmt (gsi);
1548 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1551
1552 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1553 continue;
1554 if (!gimple_assign_single_p (stmt))
1555 break;
1556
1557 lhs = gimple_assign_lhs (stmt);
1558 rhs = gimple_assign_rhs1 (stmt);
1559 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1560 || TREE_CODE (lhs) == BIT_FIELD_REF
1561 || contains_bitfld_component_ref_p (lhs))
1562 break;
1563
1564 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1565 &lhs_max_size, &reverse);
1566 if (lhs_max_size == -1
1567 || lhs_max_size != lhs_size)
1568 break;
1569
1570 if (check_ref)
1571 {
1572 if (TREE_CODE (lhs_base) != MEM_REF
1573 || TREE_OPERAND (lhs_base, 0) != arg_base
1574 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1575 break;
1576 }
1577 else if (lhs_base != arg_base)
1578 {
1579 if (DECL_P (lhs_base))
1580 continue;
1581 else
1582 break;
1583 }
1584
1585 bool already_there = false;
1586 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1587 &already_there);
1588 if (!p)
1589 break;
1590 if (already_there)
1591 continue;
1592
1593 rhs = get_ssa_def_if_simple_copy (rhs);
1594 n = XALLOCA (struct ipa_known_agg_contents_list);
1595 n->size = lhs_size;
1596 n->offset = lhs_offset;
1597 if (is_gimple_ip_invariant (rhs))
1598 {
1599 n->constant = rhs;
1600 const_count++;
1601 }
1602 else
1603 n->constant = NULL_TREE;
1604 n->next = *p;
1605 *p = n;
1606
1607 item_count++;
1608 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1609 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1610 break;
1611 }
1612
1613 /* Third stage just goes over the list and creates an appropriate vector of
1614 ipa_agg_jf_item structures out of it, of sourse only if there are
1615 any known constants to begin with. */
1616
1617 if (const_count)
1618 {
1619 jfunc->agg.by_ref = by_ref;
1620 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1621 }
1622 }
1623
1624 /* Return the Ith param type of callee associated with call graph
1625 edge E. */
1626
1627 tree
1628 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1629 {
1630 int n;
1631 tree type = (e->callee
1632 ? TREE_TYPE (e->callee->decl)
1633 : gimple_call_fntype (e->call_stmt));
1634 tree t = TYPE_ARG_TYPES (type);
1635
1636 for (n = 0; n < i; n++)
1637 {
1638 if (!t)
1639 break;
1640 t = TREE_CHAIN (t);
1641 }
1642 if (t)
1643 return TREE_VALUE (t);
1644 if (!e->callee)
1645 return NULL;
1646 t = DECL_ARGUMENTS (e->callee->decl);
1647 for (n = 0; n < i; n++)
1648 {
1649 if (!t)
1650 return NULL;
1651 t = TREE_CHAIN (t);
1652 }
1653 if (t)
1654 return TREE_TYPE (t);
1655 return NULL;
1656 }
1657
1658 /* Compute jump function for all arguments of callsite CS and insert the
1659 information in the jump_functions array in the ipa_edge_args corresponding
1660 to this callsite. */
1661
1662 static void
1663 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1664 struct cgraph_edge *cs)
1665 {
1666 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1667 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1668 gcall *call = cs->call_stmt;
1669 int n, arg_num = gimple_call_num_args (call);
1670 bool useful_context = false;
1671
1672 if (arg_num == 0 || args->jump_functions)
1673 return;
1674 vec_safe_grow_cleared (args->jump_functions, arg_num);
1675 if (flag_devirtualize)
1676 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1677
1678 if (gimple_call_internal_p (call))
1679 return;
1680 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1681 return;
1682
1683 for (n = 0; n < arg_num; n++)
1684 {
1685 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1686 tree arg = gimple_call_arg (call, n);
1687 tree param_type = ipa_get_callee_param_type (cs, n);
1688 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1689 {
1690 tree instance;
1691 struct ipa_polymorphic_call_context context (cs->caller->decl,
1692 arg, cs->call_stmt,
1693 &instance);
1694 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1695 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1696 if (!context.useless_p ())
1697 useful_context = true;
1698 }
1699
1700 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1701 {
1702 bool addr_nonzero = false;
1703 bool strict_overflow = false;
1704
1705 if (TREE_CODE (arg) == SSA_NAME
1706 && param_type
1707 && get_ptr_nonnull (arg))
1708 addr_nonzero = true;
1709 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1710 addr_nonzero = true;
1711
1712 if (addr_nonzero)
1713 {
1714 jfunc->vr_known = true;
1715 jfunc->m_vr.type = VR_ANTI_RANGE;
1716 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1717 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1718 jfunc->m_vr.equiv = NULL;
1719 }
1720 else
1721 gcc_assert (!jfunc->vr_known);
1722 }
1723 else
1724 {
1725 wide_int min, max;
1726 value_range_type type;
1727 if (TREE_CODE (arg) == SSA_NAME
1728 && param_type
1729 && (type = get_range_info (arg, &min, &max))
1730 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1731 {
1732 value_range vr;
1733
1734 vr.type = type;
1735 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1736 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1737 vr.equiv = NULL;
1738 extract_range_from_unary_expr (&jfunc->m_vr,
1739 NOP_EXPR,
1740 param_type,
1741 &vr, TREE_TYPE (arg));
1742 if (jfunc->m_vr.type == VR_RANGE
1743 || jfunc->m_vr.type == VR_ANTI_RANGE)
1744 jfunc->vr_known = true;
1745 else
1746 jfunc->vr_known = false;
1747 }
1748 else
1749 gcc_assert (!jfunc->vr_known);
1750 }
1751
1752 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1753 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1754 {
1755 jfunc->bits.known = true;
1756
1757 if (TREE_CODE (arg) == SSA_NAME)
1758 {
1759 jfunc->bits.value = 0;
1760 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1761 TYPE_SIGN (TREE_TYPE (arg)));
1762 }
1763 else
1764 {
1765 jfunc->bits.value = wi::to_widest (arg);
1766 jfunc->bits.mask = 0;
1767 }
1768 }
1769 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1770 {
1771 unsigned HOST_WIDE_INT bitpos;
1772 unsigned align;
1773
1774 jfunc->bits.known = true;
1775 get_pointer_alignment_1 (arg, &align, &bitpos);
1776 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1777 .and_not (align / BITS_PER_UNIT - 1);
1778 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1779 }
1780 else
1781 gcc_assert (!jfunc->bits.known);
1782
1783 if (is_gimple_ip_invariant (arg)
1784 || (VAR_P (arg)
1785 && is_global_var (arg)
1786 && TREE_READONLY (arg)))
1787 ipa_set_jf_constant (jfunc, arg, cs);
1788 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1789 && TREE_CODE (arg) == PARM_DECL)
1790 {
1791 int index = ipa_get_param_decl_index (info, arg);
1792
1793 gcc_assert (index >=0);
1794 /* Aggregate passed by value, check for pass-through, otherwise we
1795 will attempt to fill in aggregate contents later in this
1796 for cycle. */
1797 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1798 {
1799 ipa_set_jf_simple_pass_through (jfunc, index, false);
1800 continue;
1801 }
1802 }
1803 else if (TREE_CODE (arg) == SSA_NAME)
1804 {
1805 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1806 {
1807 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1808 if (index >= 0)
1809 {
1810 bool agg_p;
1811 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1812 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1813 }
1814 }
1815 else
1816 {
1817 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1818 if (is_gimple_assign (stmt))
1819 compute_complex_assign_jump_func (fbi, info, jfunc,
1820 call, stmt, arg, param_type);
1821 else if (gimple_code (stmt) == GIMPLE_PHI)
1822 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1823 call,
1824 as_a <gphi *> (stmt));
1825 }
1826 }
1827
1828 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1829 passed (because type conversions are ignored in gimple). Usually we can
1830 safely get type from function declaration, but in case of K&R prototypes or
1831 variadic functions we can try our luck with type of the pointer passed.
1832 TODO: Since we look for actual initialization of the memory object, we may better
1833 work out the type based on the memory stores we find. */
1834 if (!param_type)
1835 param_type = TREE_TYPE (arg);
1836
1837 if ((jfunc->type != IPA_JF_PASS_THROUGH
1838 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1839 && (jfunc->type != IPA_JF_ANCESTOR
1840 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1841 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1842 || POINTER_TYPE_P (param_type)))
1843 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1844 }
1845 if (!useful_context)
1846 vec_free (args->polymorphic_call_contexts);
1847 }
1848
1849 /* Compute jump functions for all edges - both direct and indirect - outgoing
1850 from BB. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1854 {
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
1857 struct cgraph_edge *cs;
1858
1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1860 {
1861 struct cgraph_node *callee = cs->callee;
1862
1863 if (callee)
1864 {
1865 callee->ultimate_alias_target ();
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1870 }
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1872 }
1873 }
1874
1875 /* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
1880
1881 static tree
1882 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
1884 {
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1886
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
1889
1890 rhs = gimple_assign_rhs1 (stmt);
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1892 {
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1895 }
1896 else
1897 ref_field = NULL_TREE;
1898 if (TREE_CODE (rhs) != MEM_REF)
1899 return NULL_TREE;
1900 rec = TREE_OPERAND (rhs, 0);
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
1904 if (TREE_CODE (rec) != PARM_DECL
1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1906 return NULL_TREE;
1907 ref_offset = TREE_OPERAND (rhs, 1);
1908
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1915
1916 if (ref_field)
1917 {
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
1920 return ref_field == fld ? rec : NULL_TREE;
1921 }
1922 else
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
1925 }
1926
1927 /* Returns true iff T is an SSA_NAME defined by a statement. */
1928
1929 static bool
1930 ipa_is_ssa_with_stmt_def (tree t)
1931 {
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1937 }
1938
1939 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
1942
1943 static struct cgraph_edge *
1944 ipa_note_param_call (struct cgraph_node *node, int param_index,
1945 gcall *stmt)
1946 {
1947 struct cgraph_edge *cs;
1948
1949 cs = node->get_edge (stmt);
1950 cs->indirect_info->param_index = param_index;
1951 cs->indirect_info->agg_contents = 0;
1952 cs->indirect_info->member_ptr = 0;
1953 cs->indirect_info->guaranteed_unmodified = 0;
1954 return cs;
1955 }
1956
1957 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1958 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1959 intermediate information about each formal parameter. Currently it checks
1960 whether the call calls a pointer that is a formal parameter and if so, the
1961 parameter is marked with the called flag and an indirect call graph edge
1962 describing the call is created. This is very simple for ordinary pointers
1963 represented in SSA but not-so-nice when it comes to member pointers. The
1964 ugly part of this function does nothing more than trying to match the
1965 pattern of such a call. An example of such a pattern is the gimple dump
1966 below, the call is on the last line:
1967
1968 <bb 2>:
1969 f$__delta_5 = f.__delta;
1970 f$__pfn_24 = f.__pfn;
1971
1972 or
1973 <bb 2>:
1974 f$__delta_5 = MEM[(struct *)&f];
1975 f$__pfn_24 = MEM[(struct *)&f + 4B];
1976
1977 and a few lines below:
1978
1979 <bb 5>
1980 D.2496_3 = (int) f$__pfn_24;
1981 D.2497_4 = D.2496_3 & 1;
1982 if (D.2497_4 != 0)
1983 goto <bb 3>;
1984 else
1985 goto <bb 4>;
1986
1987 <bb 6>:
1988 D.2500_7 = (unsigned int) f$__delta_5;
1989 D.2501_8 = &S + D.2500_7;
1990 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1991 D.2503_10 = *D.2502_9;
1992 D.2504_12 = f$__pfn_24 + -1;
1993 D.2505_13 = (unsigned int) D.2504_12;
1994 D.2506_14 = D.2503_10 + D.2505_13;
1995 D.2507_15 = *D.2506_14;
1996 iftmp.11_16 = (String:: *) D.2507_15;
1997
1998 <bb 7>:
1999 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2000 D.2500_19 = (unsigned int) f$__delta_5;
2001 D.2508_20 = &S + D.2500_19;
2002 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2003
2004 Such patterns are results of simple calls to a member pointer:
2005
2006 int doprinting (int (MyString::* f)(int) const)
2007 {
2008 MyString S ("somestring");
2009
2010 return (S.*f)(4);
2011 }
2012
2013 Moreover, the function also looks for called pointers loaded from aggregates
2014 passed by value or reference. */
2015
2016 static void
2017 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2018 tree target)
2019 {
2020 struct ipa_node_params *info = fbi->info;
2021 HOST_WIDE_INT offset;
2022 bool by_ref;
2023
2024 if (SSA_NAME_IS_DEFAULT_DEF (target))
2025 {
2026 tree var = SSA_NAME_VAR (target);
2027 int index = ipa_get_param_decl_index (info, var);
2028 if (index >= 0)
2029 ipa_note_param_call (fbi->node, index, call);
2030 return;
2031 }
2032
2033 int index;
2034 gimple *def = SSA_NAME_DEF_STMT (target);
2035 bool guaranteed_unmodified;
2036 if (gimple_assign_single_p (def)
2037 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2038 gimple_assign_rhs1 (def), &index, &offset,
2039 NULL, &by_ref, &guaranteed_unmodified))
2040 {
2041 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2042 cs->indirect_info->offset = offset;
2043 cs->indirect_info->agg_contents = 1;
2044 cs->indirect_info->by_ref = by_ref;
2045 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2046 return;
2047 }
2048
2049 /* Now we need to try to match the complex pattern of calling a member
2050 pointer. */
2051 if (gimple_code (def) != GIMPLE_PHI
2052 || gimple_phi_num_args (def) != 2
2053 || !POINTER_TYPE_P (TREE_TYPE (target))
2054 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2055 return;
2056
2057 /* First, we need to check whether one of these is a load from a member
2058 pointer that is a parameter to this function. */
2059 tree n1 = PHI_ARG_DEF (def, 0);
2060 tree n2 = PHI_ARG_DEF (def, 1);
2061 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2062 return;
2063 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2064 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2065
2066 tree rec;
2067 basic_block bb, virt_bb;
2068 basic_block join = gimple_bb (def);
2069 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2070 {
2071 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2072 return;
2073
2074 bb = EDGE_PRED (join, 0)->src;
2075 virt_bb = gimple_bb (d2);
2076 }
2077 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2078 {
2079 bb = EDGE_PRED (join, 1)->src;
2080 virt_bb = gimple_bb (d1);
2081 }
2082 else
2083 return;
2084
2085 /* Second, we need to check that the basic blocks are laid out in the way
2086 corresponding to the pattern. */
2087
2088 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2089 || single_pred (virt_bb) != bb
2090 || single_succ (virt_bb) != join)
2091 return;
2092
2093 /* Third, let's see that the branching is done depending on the least
2094 significant bit of the pfn. */
2095
2096 gimple *branch = last_stmt (bb);
2097 if (!branch || gimple_code (branch) != GIMPLE_COND)
2098 return;
2099
2100 if ((gimple_cond_code (branch) != NE_EXPR
2101 && gimple_cond_code (branch) != EQ_EXPR)
2102 || !integer_zerop (gimple_cond_rhs (branch)))
2103 return;
2104
2105 tree cond = gimple_cond_lhs (branch);
2106 if (!ipa_is_ssa_with_stmt_def (cond))
2107 return;
2108
2109 def = SSA_NAME_DEF_STMT (cond);
2110 if (!is_gimple_assign (def)
2111 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2112 || !integer_onep (gimple_assign_rhs2 (def)))
2113 return;
2114
2115 cond = gimple_assign_rhs1 (def);
2116 if (!ipa_is_ssa_with_stmt_def (cond))
2117 return;
2118
2119 def = SSA_NAME_DEF_STMT (cond);
2120
2121 if (is_gimple_assign (def)
2122 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2123 {
2124 cond = gimple_assign_rhs1 (def);
2125 if (!ipa_is_ssa_with_stmt_def (cond))
2126 return;
2127 def = SSA_NAME_DEF_STMT (cond);
2128 }
2129
2130 tree rec2;
2131 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2132 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2133 == ptrmemfunc_vbit_in_delta),
2134 NULL);
2135 if (rec != rec2)
2136 return;
2137
2138 index = ipa_get_param_decl_index (info, rec);
2139 if (index >= 0
2140 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2141 {
2142 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
2145 cs->indirect_info->member_ptr = 1;
2146 cs->indirect_info->guaranteed_unmodified = 1;
2147 }
2148
2149 return;
2150 }
2151
2152 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2153 object referenced in the expression is a formal parameter of the caller
2154 FBI->node (described by FBI->info), create a call note for the
2155 statement. */
2156
2157 static void
2158 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2159 gcall *call, tree target)
2160 {
2161 tree obj = OBJ_TYPE_REF_OBJECT (target);
2162 int index;
2163 HOST_WIDE_INT anc_offset;
2164
2165 if (!flag_devirtualize)
2166 return;
2167
2168 if (TREE_CODE (obj) != SSA_NAME)
2169 return;
2170
2171 struct ipa_node_params *info = fbi->info;
2172 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2173 {
2174 struct ipa_jump_func jfunc;
2175 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2176 return;
2177
2178 anc_offset = 0;
2179 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2180 gcc_assert (index >= 0);
2181 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2182 call, &jfunc))
2183 return;
2184 }
2185 else
2186 {
2187 struct ipa_jump_func jfunc;
2188 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2189 tree expr;
2190
2191 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2192 if (!expr)
2193 return;
2194 index = ipa_get_param_decl_index (info,
2195 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2196 gcc_assert (index >= 0);
2197 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2198 call, &jfunc, anc_offset))
2199 return;
2200 }
2201
2202 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2203 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2204 ii->offset = anc_offset;
2205 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2206 ii->otr_type = obj_type_ref_class (target);
2207 ii->polymorphic = 1;
2208 }
2209
2210 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2211 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2212 containing intermediate information about each formal parameter. */
2213
2214 static void
2215 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2216 {
2217 tree target = gimple_call_fn (call);
2218
2219 if (!target
2220 || (TREE_CODE (target) != SSA_NAME
2221 && !virtual_method_call_p (target)))
2222 return;
2223
2224 struct cgraph_edge *cs = fbi->node->get_edge (call);
2225 /* If we previously turned the call into a direct call, there is
2226 no need to analyze. */
2227 if (cs && !cs->indirect_unknown_callee)
2228 return;
2229
2230 if (cs->indirect_info->polymorphic && flag_devirtualize)
2231 {
2232 tree instance;
2233 tree target = gimple_call_fn (call);
2234 ipa_polymorphic_call_context context (current_function_decl,
2235 target, call, &instance);
2236
2237 gcc_checking_assert (cs->indirect_info->otr_type
2238 == obj_type_ref_class (target));
2239 gcc_checking_assert (cs->indirect_info->otr_token
2240 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2241
2242 cs->indirect_info->vptr_changed
2243 = !context.get_dynamic_type (instance,
2244 OBJ_TYPE_REF_OBJECT (target),
2245 obj_type_ref_class (target), call);
2246 cs->indirect_info->context = context;
2247 }
2248
2249 if (TREE_CODE (target) == SSA_NAME)
2250 ipa_analyze_indirect_call_uses (fbi, call, target);
2251 else if (virtual_method_call_p (target))
2252 ipa_analyze_virtual_call_uses (fbi, call, target);
2253 }
2254
2255
2256 /* Analyze the call statement STMT with respect to formal parameters (described
2257 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2258 formal parameters are called. */
2259
2260 static void
2261 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2262 {
2263 if (is_gimple_call (stmt))
2264 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2265 }
2266
2267 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2268 If OP is a parameter declaration, mark it as used in the info structure
2269 passed in DATA. */
2270
2271 static bool
2272 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2273 {
2274 struct ipa_node_params *info = (struct ipa_node_params *) data;
2275
2276 op = get_base_address (op);
2277 if (op
2278 && TREE_CODE (op) == PARM_DECL)
2279 {
2280 int index = ipa_get_param_decl_index (info, op);
2281 gcc_assert (index >= 0);
2282 ipa_set_param_used (info, index, true);
2283 }
2284
2285 return false;
2286 }
2287
2288 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2289 the findings in various structures of the associated ipa_node_params
2290 structure, such as parameter flags, notes etc. FBI holds various data about
2291 the function being analyzed. */
2292
2293 static void
2294 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2295 {
2296 gimple_stmt_iterator gsi;
2297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2298 {
2299 gimple *stmt = gsi_stmt (gsi);
2300
2301 if (is_gimple_debug (stmt))
2302 continue;
2303
2304 ipa_analyze_stmt_uses (fbi, stmt);
2305 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2306 visit_ref_for_mod_analysis,
2307 visit_ref_for_mod_analysis,
2308 visit_ref_for_mod_analysis);
2309 }
2310 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2311 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2312 visit_ref_for_mod_analysis,
2313 visit_ref_for_mod_analysis,
2314 visit_ref_for_mod_analysis);
2315 }
2316
2317 /* Calculate controlled uses of parameters of NODE. */
2318
2319 static void
2320 ipa_analyze_controlled_uses (struct cgraph_node *node)
2321 {
2322 struct ipa_node_params *info = IPA_NODE_REF (node);
2323
2324 for (int i = 0; i < ipa_get_param_count (info); i++)
2325 {
2326 tree parm = ipa_get_param (info, i);
2327 int controlled_uses = 0;
2328
2329 /* For SSA regs see if parameter is used. For non-SSA we compute
2330 the flag during modification analysis. */
2331 if (is_gimple_reg (parm))
2332 {
2333 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2334 parm);
2335 if (ddef && !has_zero_uses (ddef))
2336 {
2337 imm_use_iterator imm_iter;
2338 use_operand_p use_p;
2339
2340 ipa_set_param_used (info, i, true);
2341 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2342 if (!is_gimple_call (USE_STMT (use_p)))
2343 {
2344 if (!is_gimple_debug (USE_STMT (use_p)))
2345 {
2346 controlled_uses = IPA_UNDESCRIBED_USE;
2347 break;
2348 }
2349 }
2350 else
2351 controlled_uses++;
2352 }
2353 else
2354 controlled_uses = 0;
2355 }
2356 else
2357 controlled_uses = IPA_UNDESCRIBED_USE;
2358 ipa_set_controlled_uses (info, i, controlled_uses);
2359 }
2360 }
2361
2362 /* Free stuff in BI. */
2363
2364 static void
2365 free_ipa_bb_info (struct ipa_bb_info *bi)
2366 {
2367 bi->cg_edges.release ();
2368 bi->param_aa_statuses.release ();
2369 }
2370
2371 /* Dominator walker driving the analysis. */
2372
2373 class analysis_dom_walker : public dom_walker
2374 {
2375 public:
2376 analysis_dom_walker (struct ipa_func_body_info *fbi)
2377 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2378
2379 virtual edge before_dom_children (basic_block);
2380
2381 private:
2382 struct ipa_func_body_info *m_fbi;
2383 };
2384
2385 edge
2386 analysis_dom_walker::before_dom_children (basic_block bb)
2387 {
2388 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2389 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2390 return NULL;
2391 }
2392
2393 /* Release body info FBI. */
2394
2395 void
2396 ipa_release_body_info (struct ipa_func_body_info *fbi)
2397 {
2398 int i;
2399 struct ipa_bb_info *bi;
2400
2401 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2402 free_ipa_bb_info (bi);
2403 fbi->bb_infos.release ();
2404 }
2405
2406 /* Initialize the array describing properties of formal parameters
2407 of NODE, analyze their uses and compute jump functions associated
2408 with actual arguments of calls from within NODE. */
2409
2410 void
2411 ipa_analyze_node (struct cgraph_node *node)
2412 {
2413 struct ipa_func_body_info fbi;
2414 struct ipa_node_params *info;
2415
2416 ipa_check_create_node_params ();
2417 ipa_check_create_edge_args ();
2418 info = IPA_NODE_REF (node);
2419
2420 if (info->analysis_done)
2421 return;
2422 info->analysis_done = 1;
2423
2424 if (ipa_func_spec_opts_forbid_analysis_p (node))
2425 {
2426 for (int i = 0; i < ipa_get_param_count (info); i++)
2427 {
2428 ipa_set_param_used (info, i, true);
2429 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2430 }
2431 return;
2432 }
2433
2434 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2435 push_cfun (func);
2436 calculate_dominance_info (CDI_DOMINATORS);
2437 ipa_initialize_node_params (node);
2438 ipa_analyze_controlled_uses (node);
2439
2440 fbi.node = node;
2441 fbi.info = IPA_NODE_REF (node);
2442 fbi.bb_infos = vNULL;
2443 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2444 fbi.param_count = ipa_get_param_count (info);
2445 fbi.aa_walked = 0;
2446
2447 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2448 {
2449 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2450 bi->cg_edges.safe_push (cs);
2451 }
2452
2453 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2454 {
2455 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2456 bi->cg_edges.safe_push (cs);
2457 }
2458
2459 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2460
2461 ipa_release_body_info (&fbi);
2462 free_dominance_info (CDI_DOMINATORS);
2463 pop_cfun ();
2464 }
2465
2466 /* Update the jump functions associated with call graph edge E when the call
2467 graph edge CS is being inlined, assuming that E->caller is already (possibly
2468 indirectly) inlined into CS->callee and that E has not been inlined. */
2469
2470 static void
2471 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2472 struct cgraph_edge *e)
2473 {
2474 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2475 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2476 int count = ipa_get_cs_argument_count (args);
2477 int i;
2478
2479 for (i = 0; i < count; i++)
2480 {
2481 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2482 struct ipa_polymorphic_call_context *dst_ctx
2483 = ipa_get_ith_polymorhic_call_context (args, i);
2484
2485 if (dst->type == IPA_JF_ANCESTOR)
2486 {
2487 struct ipa_jump_func *src;
2488 int dst_fid = dst->value.ancestor.formal_id;
2489 struct ipa_polymorphic_call_context *src_ctx
2490 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2491
2492 /* Variable number of arguments can cause havoc if we try to access
2493 one that does not exist in the inlined edge. So make sure we
2494 don't. */
2495 if (dst_fid >= ipa_get_cs_argument_count (top))
2496 {
2497 ipa_set_jf_unknown (dst);
2498 continue;
2499 }
2500
2501 src = ipa_get_ith_jump_func (top, dst_fid);
2502
2503 if (src_ctx && !src_ctx->useless_p ())
2504 {
2505 struct ipa_polymorphic_call_context ctx = *src_ctx;
2506
2507 /* TODO: Make type preserved safe WRT contexts. */
2508 if (!ipa_get_jf_ancestor_type_preserved (dst))
2509 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2510 ctx.offset_by (dst->value.ancestor.offset);
2511 if (!ctx.useless_p ())
2512 {
2513 if (!dst_ctx)
2514 {
2515 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2516 count);
2517 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2518 }
2519
2520 dst_ctx->combine_with (ctx);
2521 }
2522 }
2523
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2526 {
2527 struct ipa_agg_jf_item *item;
2528 int j;
2529
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2533
2534 dst->agg.items = vec_safe_copy (src->agg.items);
2535 dst->agg.by_ref = src->agg.by_ref;
2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2537 item->offset -= dst->value.ancestor.offset;
2538 }
2539
2540 if (src->type == IPA_JF_PASS_THROUGH
2541 && src->value.pass_through.operation == NOP_EXPR)
2542 {
2543 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2544 dst->value.ancestor.agg_preserved &=
2545 src->value.pass_through.agg_preserved;
2546 }
2547 else if (src->type == IPA_JF_PASS_THROUGH
2548 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2549 {
2550 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2551 dst->value.ancestor.agg_preserved = false;
2552 }
2553 else if (src->type == IPA_JF_ANCESTOR)
2554 {
2555 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2556 dst->value.ancestor.offset += src->value.ancestor.offset;
2557 dst->value.ancestor.agg_preserved &=
2558 src->value.ancestor.agg_preserved;
2559 }
2560 else
2561 ipa_set_jf_unknown (dst);
2562 }
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
2564 {
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2571 {
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2575 struct ipa_polymorphic_call_context *src_ctx
2576 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2577
2578 if (src_ctx && !src_ctx->useless_p ())
2579 {
2580 struct ipa_polymorphic_call_context ctx = *src_ctx;
2581
2582 /* TODO: Make type preserved safe WRT contexts. */
2583 if (!ipa_get_jf_pass_through_type_preserved (dst))
2584 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2585 if (!ctx.useless_p ())
2586 {
2587 if (!dst_ctx)
2588 {
2589 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2590 count);
2591 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2592 }
2593 dst_ctx->combine_with (ctx);
2594 }
2595 }
2596 switch (src->type)
2597 {
2598 case IPA_JF_UNKNOWN:
2599 ipa_set_jf_unknown (dst);
2600 break;
2601 case IPA_JF_CONST:
2602 ipa_set_jf_cst_copy (dst, src);
2603 break;
2604
2605 case IPA_JF_PASS_THROUGH:
2606 {
2607 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2608 enum tree_code operation;
2609 operation = ipa_get_jf_pass_through_operation (src);
2610
2611 if (operation == NOP_EXPR)
2612 {
2613 bool agg_p;
2614 agg_p = dst_agg_p
2615 && ipa_get_jf_pass_through_agg_preserved (src);
2616 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2617 }
2618 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2619 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2620 else
2621 {
2622 tree operand = ipa_get_jf_pass_through_operand (src);
2623 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2624 operation);
2625 }
2626 break;
2627 }
2628 case IPA_JF_ANCESTOR:
2629 {
2630 bool agg_p;
2631 agg_p = dst_agg_p
2632 && ipa_get_jf_ancestor_agg_preserved (src);
2633 ipa_set_ancestor_jf (dst,
2634 ipa_get_jf_ancestor_offset (src),
2635 ipa_get_jf_ancestor_formal_id (src),
2636 agg_p);
2637 break;
2638 }
2639 default:
2640 gcc_unreachable ();
2641 }
2642
2643 if (src->agg.items
2644 && (dst_agg_p || !src->agg.by_ref))
2645 {
2646 /* Currently we do not produce clobber aggregate jump
2647 functions, replace with merging when we do. */
2648 gcc_assert (!dst->agg.items);
2649
2650 dst->agg.by_ref = src->agg.by_ref;
2651 dst->agg.items = vec_safe_copy (src->agg.items);
2652 }
2653 }
2654 else
2655 ipa_set_jf_unknown (dst);
2656 }
2657 }
2658 }
2659
2660 /* If TARGET is an addr_expr of a function declaration, make it the
2661 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2662 Otherwise, return NULL. */
2663
2664 struct cgraph_edge *
2665 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2666 bool speculative)
2667 {
2668 struct cgraph_node *callee;
2669 struct inline_edge_summary *es = inline_edge_summary (ie);
2670 bool unreachable = false;
2671
2672 if (TREE_CODE (target) == ADDR_EXPR)
2673 target = TREE_OPERAND (target, 0);
2674 if (TREE_CODE (target) != FUNCTION_DECL)
2675 {
2676 target = canonicalize_constructor_val (target, NULL);
2677 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2678 {
2679 /* Member pointer call that goes through a VMT lookup. */
2680 if (ie->indirect_info->member_ptr
2681 /* Or if target is not an invariant expression and we do not
2682 know if it will evaulate to function at runtime.
2683 This can happen when folding through &VAR, where &VAR
2684 is IP invariant, but VAR itself is not.
2685
2686 TODO: Revisit this when GCC 5 is branched. It seems that
2687 member_ptr check is not needed and that we may try to fold
2688 the expression and see if VAR is readonly. */
2689 || !is_gimple_ip_invariant (target))
2690 {
2691 if (dump_enabled_p ())
2692 {
2693 location_t loc = gimple_location_safe (ie->call_stmt);
2694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2695 "discovered direct call non-invariant "
2696 "%s/%i\n",
2697 ie->caller->name (), ie->caller->order);
2698 }
2699 return NULL;
2700 }
2701
2702
2703 if (dump_enabled_p ())
2704 {
2705 location_t loc = gimple_location_safe (ie->call_stmt);
2706 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2707 "discovered direct call to non-function in %s/%i, "
2708 "making it __builtin_unreachable\n",
2709 ie->caller->name (), ie->caller->order);
2710 }
2711
2712 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2713 callee = cgraph_node::get_create (target);
2714 unreachable = true;
2715 }
2716 else
2717 callee = cgraph_node::get (target);
2718 }
2719 else
2720 callee = cgraph_node::get (target);
2721
2722 /* Because may-edges are not explicitely represented and vtable may be external,
2723 we may create the first reference to the object in the unit. */
2724 if (!callee || callee->global.inlined_to)
2725 {
2726
2727 /* We are better to ensure we can refer to it.
2728 In the case of static functions we are out of luck, since we already
2729 removed its body. In the case of public functions we may or may
2730 not introduce the reference. */
2731 if (!canonicalize_constructor_val (target, NULL)
2732 || !TREE_PUBLIC (target))
2733 {
2734 if (dump_file)
2735 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2736 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2737 xstrdup_for_dump (ie->caller->name ()),
2738 ie->caller->order,
2739 xstrdup_for_dump (ie->callee->name ()),
2740 ie->callee->order);
2741 return NULL;
2742 }
2743 callee = cgraph_node::get_create (target);
2744 }
2745
2746 /* If the edge is already speculated. */
2747 if (speculative && ie->speculative)
2748 {
2749 struct cgraph_edge *e2;
2750 struct ipa_ref *ref;
2751 ie->speculative_call_info (e2, ie, ref);
2752 if (e2->callee->ultimate_alias_target ()
2753 != callee->ultimate_alias_target ())
2754 {
2755 if (dump_file)
2756 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2757 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2758 xstrdup_for_dump (ie->caller->name ()),
2759 ie->caller->order,
2760 xstrdup_for_dump (callee->name ()),
2761 callee->order,
2762 xstrdup_for_dump (e2->callee->name ()),
2763 e2->callee->order);
2764 }
2765 else
2766 {
2767 if (dump_file)
2768 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2769 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2770 xstrdup_for_dump (ie->caller->name ()),
2771 ie->caller->order,
2772 xstrdup_for_dump (callee->name ()),
2773 callee->order);
2774 }
2775 return NULL;
2776 }
2777
2778 if (!dbg_cnt (devirt))
2779 return NULL;
2780
2781 ipa_check_create_node_params ();
2782
2783 /* We can not make edges to inline clones. It is bug that someone removed
2784 the cgraph node too early. */
2785 gcc_assert (!callee->global.inlined_to);
2786
2787 if (dump_file && !unreachable)
2788 {
2789 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2790 "(%s/%i -> %s/%i), for stmt ",
2791 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2792 speculative ? "speculative" : "known",
2793 xstrdup_for_dump (ie->caller->name ()),
2794 ie->caller->order,
2795 xstrdup_for_dump (callee->name ()),
2796 callee->order);
2797 if (ie->call_stmt)
2798 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2799 else
2800 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2801 }
2802 if (dump_enabled_p ())
2803 {
2804 location_t loc = gimple_location_safe (ie->call_stmt);
2805
2806 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2807 "converting indirect call in %s to direct call to %s\n",
2808 ie->caller->name (), callee->name ());
2809 }
2810 if (!speculative)
2811 {
2812 struct cgraph_edge *orig = ie;
2813 ie = ie->make_direct (callee);
2814 /* If we resolved speculative edge the cost is already up to date
2815 for direct call (adjusted by inline_edge_duplication_hook). */
2816 if (ie == orig)
2817 {
2818 es = inline_edge_summary (ie);
2819 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2820 - eni_size_weights.call_cost);
2821 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2822 - eni_time_weights.call_cost);
2823 }
2824 }
2825 else
2826 {
2827 if (!callee->can_be_discarded_p ())
2828 {
2829 cgraph_node *alias;
2830 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2831 if (alias)
2832 callee = alias;
2833 }
2834 /* make_speculative will update ie's cost to direct call cost. */
2835 ie = ie->make_speculative
2836 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2837 }
2838
2839 return ie;
2840 }
2841
2842 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2843 CONSTRUCTOR and return it. Return NULL if the search fails for some
2844 reason. */
2845
2846 static tree
2847 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2848 {
2849 tree type = TREE_TYPE (constructor);
2850 if (TREE_CODE (type) != ARRAY_TYPE
2851 && TREE_CODE (type) != RECORD_TYPE)
2852 return NULL;
2853
2854 unsigned ix;
2855 tree index, val;
2856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2857 {
2858 HOST_WIDE_INT elt_offset;
2859 if (TREE_CODE (type) == ARRAY_TYPE)
2860 {
2861 offset_int off;
2862 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2863 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2864
2865 if (index)
2866 {
2867 off = wi::to_offset (index);
2868 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2869 {
2870 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2871 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2872 off = wi::sext (off - wi::to_offset (low_bound),
2873 TYPE_PRECISION (TREE_TYPE (index)));
2874 }
2875 off *= wi::to_offset (unit_size);
2876 }
2877 else
2878 off = wi::to_offset (unit_size) * ix;
2879
2880 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2881 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2882 continue;
2883 elt_offset = off.to_shwi ();
2884 }
2885 else if (TREE_CODE (type) == RECORD_TYPE)
2886 {
2887 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2888 if (DECL_BIT_FIELD (index))
2889 continue;
2890 elt_offset = int_bit_position (index);
2891 }
2892 else
2893 gcc_unreachable ();
2894
2895 if (elt_offset > req_offset)
2896 return NULL;
2897
2898 if (TREE_CODE (val) == CONSTRUCTOR)
2899 return find_constructor_constant_at_offset (val,
2900 req_offset - elt_offset);
2901
2902 if (elt_offset == req_offset
2903 && is_gimple_reg_type (TREE_TYPE (val))
2904 && is_gimple_ip_invariant (val))
2905 return val;
2906 }
2907 return NULL;
2908 }
2909
2910 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2911 invariant from a static constructor and if so, return it. Otherwise return
2912 NULL. */
2913
2914 static tree
2915 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2916 {
2917 if (by_ref)
2918 {
2919 if (TREE_CODE (scalar) != ADDR_EXPR)
2920 return NULL;
2921 scalar = TREE_OPERAND (scalar, 0);
2922 }
2923
2924 if (!VAR_P (scalar)
2925 || !is_global_var (scalar)
2926 || !TREE_READONLY (scalar)
2927 || !DECL_INITIAL (scalar)
2928 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2929 return NULL;
2930
2931 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2932 }
2933
2934 /* Retrieve value from aggregate jump function AGG or static initializer of
2935 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2936 none. BY_REF specifies whether the value has to be passed by reference or
2937 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2938 to is set to true if the value comes from an initializer of a constant. */
2939
2940 tree
2941 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2942 HOST_WIDE_INT offset, bool by_ref,
2943 bool *from_global_constant)
2944 {
2945 struct ipa_agg_jf_item *item;
2946 int i;
2947
2948 if (scalar)
2949 {
2950 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2951 if (res)
2952 {
2953 if (from_global_constant)
2954 *from_global_constant = true;
2955 return res;
2956 }
2957 }
2958
2959 if (!agg
2960 || by_ref != agg->by_ref)
2961 return NULL;
2962
2963 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2964 if (item->offset == offset)
2965 {
2966 /* Currently we do not have clobber values, return NULL for them once
2967 we do. */
2968 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2969 if (from_global_constant)
2970 *from_global_constant = false;
2971 return item->value;
2972 }
2973 return NULL;
2974 }
2975
2976 /* Remove a reference to SYMBOL from the list of references of a node given by
2977 reference description RDESC. Return true if the reference has been
2978 successfully found and removed. */
2979
2980 static bool
2981 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2982 {
2983 struct ipa_ref *to_del;
2984 struct cgraph_edge *origin;
2985
2986 origin = rdesc->cs;
2987 if (!origin)
2988 return false;
2989 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2990 origin->lto_stmt_uid);
2991 if (!to_del)
2992 return false;
2993
2994 to_del->remove_reference ();
2995 if (dump_file)
2996 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2997 xstrdup_for_dump (origin->caller->name ()),
2998 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2999 return true;
3000 }
3001
3002 /* If JFUNC has a reference description with refcount different from
3003 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3004 NULL. JFUNC must be a constant jump function. */
3005
3006 static struct ipa_cst_ref_desc *
3007 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3008 {
3009 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3010 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3011 return rdesc;
3012 else
3013 return NULL;
3014 }
3015
3016 /* If the value of constant jump function JFUNC is an address of a function
3017 declaration, return the associated call graph node. Otherwise return
3018 NULL. */
3019
3020 static cgraph_node *
3021 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3022 {
3023 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3024 tree cst = ipa_get_jf_constant (jfunc);
3025 if (TREE_CODE (cst) != ADDR_EXPR
3026 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3027 return NULL;
3028
3029 return cgraph_node::get (TREE_OPERAND (cst, 0));
3030 }
3031
3032
3033 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3034 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3035 the edge specified in the rdesc. Return false if either the symbol or the
3036 reference could not be found, otherwise return true. */
3037
3038 static bool
3039 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3040 {
3041 struct ipa_cst_ref_desc *rdesc;
3042 if (jfunc->type == IPA_JF_CONST
3043 && (rdesc = jfunc_rdesc_usable (jfunc))
3044 && --rdesc->refcount == 0)
3045 {
3046 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3047 if (!symbol)
3048 return false;
3049
3050 return remove_described_reference (symbol, rdesc);
3051 }
3052 return true;
3053 }
3054
3055 /* Try to find a destination for indirect edge IE that corresponds to a simple
3056 call or a call of a member function pointer and where the destination is a
3057 pointer formal parameter described by jump function JFUNC. If it can be
3058 determined, return the newly direct edge, otherwise return NULL.
3059 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3060
3061 static struct cgraph_edge *
3062 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3063 struct ipa_jump_func *jfunc,
3064 struct ipa_node_params *new_root_info)
3065 {
3066 struct cgraph_edge *cs;
3067 tree target;
3068 bool agg_contents = ie->indirect_info->agg_contents;
3069 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3070 if (agg_contents)
3071 {
3072 bool from_global_constant;
3073 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3074 ie->indirect_info->offset,
3075 ie->indirect_info->by_ref,
3076 &from_global_constant);
3077 if (target
3078 && !from_global_constant
3079 && !ie->indirect_info->guaranteed_unmodified)
3080 return NULL;
3081 }
3082 else
3083 target = scalar;
3084 if (!target)
3085 return NULL;
3086 cs = ipa_make_edge_direct_to_target (ie, target);
3087
3088 if (cs && !agg_contents)
3089 {
3090 bool ok;
3091 gcc_checking_assert (cs->callee
3092 && (cs != ie
3093 || jfunc->type != IPA_JF_CONST
3094 || !cgraph_node_for_jfunc (jfunc)
3095 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3096 ok = try_decrement_rdesc_refcount (jfunc);
3097 gcc_checking_assert (ok);
3098 }
3099
3100 return cs;
3101 }
3102
3103 /* Return the target to be used in cases of impossible devirtualization. IE
3104 and target (the latter can be NULL) are dumped when dumping is enabled. */
3105
3106 tree
3107 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3108 {
3109 if (dump_file)
3110 {
3111 if (target)
3112 fprintf (dump_file,
3113 "Type inconsistent devirtualization: %s/%i->%s\n",
3114 ie->caller->name (), ie->caller->order,
3115 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3116 else
3117 fprintf (dump_file,
3118 "No devirtualization target in %s/%i\n",
3119 ie->caller->name (), ie->caller->order);
3120 }
3121 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3122 cgraph_node::get_create (new_target);
3123 return new_target;
3124 }
3125
3126 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3127 call based on a formal parameter which is described by jump function JFUNC
3128 and if it can be determined, make it direct and return the direct edge.
3129 Otherwise, return NULL. CTX describes the polymorphic context that the
3130 parameter the call is based on brings along with it. */
3131
3132 static struct cgraph_edge *
3133 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3134 struct ipa_jump_func *jfunc,
3135 struct ipa_polymorphic_call_context ctx)
3136 {
3137 tree target = NULL;
3138 bool speculative = false;
3139
3140 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3141 return NULL;
3142
3143 gcc_assert (!ie->indirect_info->by_ref);
3144
3145 /* Try to do lookup via known virtual table pointer value. */
3146 if (!ie->indirect_info->vptr_changed
3147 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3148 {
3149 tree vtable;
3150 unsigned HOST_WIDE_INT offset;
3151 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3152 : NULL;
3153 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3154 ie->indirect_info->offset,
3155 true);
3156 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3157 {
3158 bool can_refer;
3159 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3160 vtable, offset, &can_refer);
3161 if (can_refer)
3162 {
3163 if (!t
3164 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3165 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3166 || !possible_polymorphic_call_target_p
3167 (ie, cgraph_node::get (t)))
3168 {
3169 /* Do not speculate builtin_unreachable, it is stupid! */
3170 if (!ie->indirect_info->vptr_changed)
3171 target = ipa_impossible_devirt_target (ie, target);
3172 else
3173 target = NULL;
3174 }
3175 else
3176 {
3177 target = t;
3178 speculative = ie->indirect_info->vptr_changed;
3179 }
3180 }
3181 }
3182 }
3183
3184 ipa_polymorphic_call_context ie_context (ie);
3185 vec <cgraph_node *>targets;
3186 bool final;
3187
3188 ctx.offset_by (ie->indirect_info->offset);
3189 if (ie->indirect_info->vptr_changed)
3190 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3191 ie->indirect_info->otr_type);
3192 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3193 targets = possible_polymorphic_call_targets
3194 (ie->indirect_info->otr_type,
3195 ie->indirect_info->otr_token,
3196 ctx, &final);
3197 if (final && targets.length () <= 1)
3198 {
3199 speculative = false;
3200 if (targets.length () == 1)
3201 target = targets[0]->decl;
3202 else
3203 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3204 }
3205 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3206 && !ie->speculative && ie->maybe_hot_p ())
3207 {
3208 cgraph_node *n;
3209 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3210 ie->indirect_info->otr_token,
3211 ie->indirect_info->context);
3212 if (n)
3213 {
3214 target = n->decl;
3215 speculative = true;
3216 }
3217 }
3218
3219 if (target)
3220 {
3221 if (!possible_polymorphic_call_target_p
3222 (ie, cgraph_node::get_create (target)))
3223 {
3224 if (speculative)
3225 return NULL;
3226 target = ipa_impossible_devirt_target (ie, target);
3227 }
3228 return ipa_make_edge_direct_to_target (ie, target, speculative);
3229 }
3230 else
3231 return NULL;
3232 }
3233
3234 /* Update the param called notes associated with NODE when CS is being inlined,
3235 assuming NODE is (potentially indirectly) inlined into CS->callee.
3236 Moreover, if the callee is discovered to be constant, create a new cgraph
3237 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3238 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3239
3240 static bool
3241 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3242 struct cgraph_node *node,
3243 vec<cgraph_edge *> *new_edges)
3244 {
3245 struct ipa_edge_args *top;
3246 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3247 struct ipa_node_params *new_root_info;
3248 bool res = false;
3249
3250 ipa_check_create_edge_args ();
3251 top = IPA_EDGE_REF (cs);
3252 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3253 ? cs->caller->global.inlined_to
3254 : cs->caller);
3255
3256 for (ie = node->indirect_calls; ie; ie = next_ie)
3257 {
3258 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3259 struct ipa_jump_func *jfunc;
3260 int param_index;
3261 cgraph_node *spec_target = NULL;
3262
3263 next_ie = ie->next_callee;
3264
3265 if (ici->param_index == -1)
3266 continue;
3267
3268 /* We must check range due to calls with variable number of arguments: */
3269 if (ici->param_index >= ipa_get_cs_argument_count (top))
3270 {
3271 ici->param_index = -1;
3272 continue;
3273 }
3274
3275 param_index = ici->param_index;
3276 jfunc = ipa_get_ith_jump_func (top, param_index);
3277
3278 if (ie->speculative)
3279 {
3280 struct cgraph_edge *de;
3281 struct ipa_ref *ref;
3282 ie->speculative_call_info (de, ie, ref);
3283 spec_target = de->callee;
3284 }
3285
3286 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3287 new_direct_edge = NULL;
3288 else if (ici->polymorphic)
3289 {
3290 ipa_polymorphic_call_context ctx;
3291 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3292 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3293 }
3294 else
3295 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3296 new_root_info);
3297 /* If speculation was removed, then we need to do nothing. */
3298 if (new_direct_edge && new_direct_edge != ie
3299 && new_direct_edge->callee == spec_target)
3300 {
3301 new_direct_edge->indirect_inlining_edge = 1;
3302 top = IPA_EDGE_REF (cs);
3303 res = true;
3304 if (!new_direct_edge->speculative)
3305 continue;
3306 }
3307 else if (new_direct_edge)
3308 {
3309 new_direct_edge->indirect_inlining_edge = 1;
3310 if (new_direct_edge->call_stmt)
3311 new_direct_edge->call_stmt_cannot_inline_p
3312 = !gimple_check_call_matching_types (
3313 new_direct_edge->call_stmt,
3314 new_direct_edge->callee->decl, false);
3315 if (new_edges)
3316 {
3317 new_edges->safe_push (new_direct_edge);
3318 res = true;
3319 }
3320 top = IPA_EDGE_REF (cs);
3321 /* If speculative edge was introduced we still need to update
3322 call info of the indirect edge. */
3323 if (!new_direct_edge->speculative)
3324 continue;
3325 }
3326 if (jfunc->type == IPA_JF_PASS_THROUGH
3327 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3328 {
3329 if (ici->agg_contents
3330 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3331 && !ici->polymorphic)
3332 ici->param_index = -1;
3333 else
3334 {
3335 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3336 if (ici->polymorphic
3337 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3338 ici->vptr_changed = true;
3339 }
3340 }
3341 else if (jfunc->type == IPA_JF_ANCESTOR)
3342 {
3343 if (ici->agg_contents
3344 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3345 && !ici->polymorphic)
3346 ici->param_index = -1;
3347 else
3348 {
3349 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3350 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3351 if (ici->polymorphic
3352 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3353 ici->vptr_changed = true;
3354 }
3355 }
3356 else
3357 /* Either we can find a destination for this edge now or never. */
3358 ici->param_index = -1;
3359 }
3360
3361 return res;
3362 }
3363
3364 /* Recursively traverse subtree of NODE (including node) made of inlined
3365 cgraph_edges when CS has been inlined and invoke
3366 update_indirect_edges_after_inlining on all nodes and
3367 update_jump_functions_after_inlining on all non-inlined edges that lead out
3368 of this subtree. Newly discovered indirect edges will be added to
3369 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3370 created. */
3371
3372 static bool
3373 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3374 struct cgraph_node *node,
3375 vec<cgraph_edge *> *new_edges)
3376 {
3377 struct cgraph_edge *e;
3378 bool res;
3379
3380 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3381
3382 for (e = node->callees; e; e = e->next_callee)
3383 if (!e->inline_failed)
3384 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3385 else
3386 update_jump_functions_after_inlining (cs, e);
3387 for (e = node->indirect_calls; e; e = e->next_callee)
3388 update_jump_functions_after_inlining (cs, e);
3389
3390 return res;
3391 }
3392
3393 /* Combine two controlled uses counts as done during inlining. */
3394
3395 static int
3396 combine_controlled_uses_counters (int c, int d)
3397 {
3398 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3399 return IPA_UNDESCRIBED_USE;
3400 else
3401 return c + d - 1;
3402 }
3403
3404 /* Propagate number of controlled users from CS->caleee to the new root of the
3405 tree of inlined nodes. */
3406
3407 static void
3408 propagate_controlled_uses (struct cgraph_edge *cs)
3409 {
3410 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3411 struct cgraph_node *new_root = cs->caller->global.inlined_to
3412 ? cs->caller->global.inlined_to : cs->caller;
3413 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3414 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3415 int count, i;
3416
3417 count = MIN (ipa_get_cs_argument_count (args),
3418 ipa_get_param_count (old_root_info));
3419 for (i = 0; i < count; i++)
3420 {
3421 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3422 struct ipa_cst_ref_desc *rdesc;
3423
3424 if (jf->type == IPA_JF_PASS_THROUGH)
3425 {
3426 int src_idx, c, d;
3427 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3428 c = ipa_get_controlled_uses (new_root_info, src_idx);
3429 d = ipa_get_controlled_uses (old_root_info, i);
3430
3431 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3432 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3433 c = combine_controlled_uses_counters (c, d);
3434 ipa_set_controlled_uses (new_root_info, src_idx, c);
3435 if (c == 0 && new_root_info->ipcp_orig_node)
3436 {
3437 struct cgraph_node *n;
3438 struct ipa_ref *ref;
3439 tree t = new_root_info->known_csts[src_idx];
3440
3441 if (t && TREE_CODE (t) == ADDR_EXPR
3442 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3443 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3444 && (ref = new_root->find_reference (n, NULL, 0)))
3445 {
3446 if (dump_file)
3447 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3448 "reference from %s/%i to %s/%i.\n",
3449 xstrdup_for_dump (new_root->name ()),
3450 new_root->order,
3451 xstrdup_for_dump (n->name ()), n->order);
3452 ref->remove_reference ();
3453 }
3454 }
3455 }
3456 else if (jf->type == IPA_JF_CONST
3457 && (rdesc = jfunc_rdesc_usable (jf)))
3458 {
3459 int d = ipa_get_controlled_uses (old_root_info, i);
3460 int c = rdesc->refcount;
3461 rdesc->refcount = combine_controlled_uses_counters (c, d);
3462 if (rdesc->refcount == 0)
3463 {
3464 tree cst = ipa_get_jf_constant (jf);
3465 struct cgraph_node *n;
3466 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3467 && TREE_CODE (TREE_OPERAND (cst, 0))
3468 == FUNCTION_DECL);
3469 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3470 if (n)
3471 {
3472 struct cgraph_node *clone;
3473 bool ok;
3474 ok = remove_described_reference (n, rdesc);
3475 gcc_checking_assert (ok);
3476
3477 clone = cs->caller;
3478 while (clone->global.inlined_to
3479 && clone != rdesc->cs->caller
3480 && IPA_NODE_REF (clone)->ipcp_orig_node)
3481 {
3482 struct ipa_ref *ref;
3483 ref = clone->find_reference (n, NULL, 0);
3484 if (ref)
3485 {
3486 if (dump_file)
3487 fprintf (dump_file, "ipa-prop: Removing "
3488 "cloning-created reference "
3489 "from %s/%i to %s/%i.\n",
3490 xstrdup_for_dump (clone->name ()),
3491 clone->order,
3492 xstrdup_for_dump (n->name ()),
3493 n->order);
3494 ref->remove_reference ();
3495 }
3496 clone = clone->callers->caller;
3497 }
3498 }
3499 }
3500 }
3501 }
3502
3503 for (i = ipa_get_param_count (old_root_info);
3504 i < ipa_get_cs_argument_count (args);
3505 i++)
3506 {
3507 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3508
3509 if (jf->type == IPA_JF_CONST)
3510 {
3511 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3512 if (rdesc)
3513 rdesc->refcount = IPA_UNDESCRIBED_USE;
3514 }
3515 else if (jf->type == IPA_JF_PASS_THROUGH)
3516 ipa_set_controlled_uses (new_root_info,
3517 jf->value.pass_through.formal_id,
3518 IPA_UNDESCRIBED_USE);
3519 }
3520 }
3521
3522 /* Update jump functions and call note functions on inlining the call site CS.
3523 CS is expected to lead to a node already cloned by
3524 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3525 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3526 created. */
3527
3528 bool
3529 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3530 vec<cgraph_edge *> *new_edges)
3531 {
3532 bool changed;
3533 /* Do nothing if the preparation phase has not been carried out yet
3534 (i.e. during early inlining). */
3535 if (!ipa_node_params_sum)
3536 return false;
3537 gcc_assert (ipa_edge_args_vector);
3538
3539 propagate_controlled_uses (cs);
3540 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3541
3542 return changed;
3543 }
3544
3545 /* Frees all dynamically allocated structures that the argument info points
3546 to. */
3547
3548 void
3549 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3550 {
3551 vec_free (args->jump_functions);
3552 memset (args, 0, sizeof (*args));
3553 }
3554
3555 /* Free all ipa_edge structures. */
3556
3557 void
3558 ipa_free_all_edge_args (void)
3559 {
3560 int i;
3561 struct ipa_edge_args *args;
3562
3563 if (!ipa_edge_args_vector)
3564 return;
3565
3566 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3567 ipa_free_edge_args_substructures (args);
3568
3569 vec_free (ipa_edge_args_vector);
3570 }
3571
3572 /* Free all ipa_node_params structures. */
3573
3574 void
3575 ipa_free_all_node_params (void)
3576 {
3577 ipa_node_params_sum->release ();
3578 ipa_node_params_sum = NULL;
3579 }
3580
3581 /* Grow ipcp_transformations if necessary. */
3582
3583 void
3584 ipcp_grow_transformations_if_necessary (void)
3585 {
3586 if (vec_safe_length (ipcp_transformations)
3587 <= (unsigned) symtab->cgraph_max_uid)
3588 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3589 }
3590
3591 /* Set the aggregate replacements of NODE to be AGGVALS. */
3592
3593 void
3594 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3595 struct ipa_agg_replacement_value *aggvals)
3596 {
3597 ipcp_grow_transformations_if_necessary ();
3598 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3599 }
3600
3601 /* Hook that is called by cgraph.c when an edge is removed. */
3602
3603 static void
3604 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3605 {
3606 struct ipa_edge_args *args;
3607
3608 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3609 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3610 return;
3611
3612 args = IPA_EDGE_REF (cs);
3613 if (args->jump_functions)
3614 {
3615 struct ipa_jump_func *jf;
3616 int i;
3617 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3618 {
3619 struct ipa_cst_ref_desc *rdesc;
3620 try_decrement_rdesc_refcount (jf);
3621 if (jf->type == IPA_JF_CONST
3622 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3623 && rdesc->cs == cs)
3624 rdesc->cs = NULL;
3625 }
3626 }
3627
3628 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3629 }
3630
3631 /* Hook that is called by cgraph.c when an edge is duplicated. */
3632
3633 static void
3634 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3635 void *)
3636 {
3637 struct ipa_edge_args *old_args, *new_args;
3638 unsigned int i;
3639
3640 ipa_check_create_edge_args ();
3641
3642 old_args = IPA_EDGE_REF (src);
3643 new_args = IPA_EDGE_REF (dst);
3644
3645 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3646 if (old_args->polymorphic_call_contexts)
3647 new_args->polymorphic_call_contexts
3648 = vec_safe_copy (old_args->polymorphic_call_contexts);
3649
3650 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3651 {
3652 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3653 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3654
3655 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3656
3657 if (src_jf->type == IPA_JF_CONST)
3658 {
3659 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3660
3661 if (!src_rdesc)
3662 dst_jf->value.constant.rdesc = NULL;
3663 else if (src->caller == dst->caller)
3664 {
3665 struct ipa_ref *ref;
3666 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3667 gcc_checking_assert (n);
3668 ref = src->caller->find_reference (n, src->call_stmt,
3669 src->lto_stmt_uid);
3670 gcc_checking_assert (ref);
3671 dst->caller->clone_reference (ref, ref->stmt);
3672
3673 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3674 dst_rdesc->cs = dst;
3675 dst_rdesc->refcount = src_rdesc->refcount;
3676 dst_rdesc->next_duplicate = NULL;
3677 dst_jf->value.constant.rdesc = dst_rdesc;
3678 }
3679 else if (src_rdesc->cs == src)
3680 {
3681 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3682 dst_rdesc->cs = dst;
3683 dst_rdesc->refcount = src_rdesc->refcount;
3684 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3685 src_rdesc->next_duplicate = dst_rdesc;
3686 dst_jf->value.constant.rdesc = dst_rdesc;
3687 }
3688 else
3689 {
3690 struct ipa_cst_ref_desc *dst_rdesc;
3691 /* This can happen during inlining, when a JFUNC can refer to a
3692 reference taken in a function up in the tree of inline clones.
3693 We need to find the duplicate that refers to our tree of
3694 inline clones. */
3695
3696 gcc_assert (dst->caller->global.inlined_to);
3697 for (dst_rdesc = src_rdesc->next_duplicate;
3698 dst_rdesc;
3699 dst_rdesc = dst_rdesc->next_duplicate)
3700 {
3701 struct cgraph_node *top;
3702 top = dst_rdesc->cs->caller->global.inlined_to
3703 ? dst_rdesc->cs->caller->global.inlined_to
3704 : dst_rdesc->cs->caller;
3705 if (dst->caller->global.inlined_to == top)
3706 break;
3707 }
3708 gcc_assert (dst_rdesc);
3709 dst_jf->value.constant.rdesc = dst_rdesc;
3710 }
3711 }
3712 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3713 && src->caller == dst->caller)
3714 {
3715 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3716 ? dst->caller->global.inlined_to : dst->caller;
3717 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3718 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3719
3720 int c = ipa_get_controlled_uses (root_info, idx);
3721 if (c != IPA_UNDESCRIBED_USE)
3722 {
3723 c++;
3724 ipa_set_controlled_uses (root_info, idx, c);
3725 }
3726 }
3727 }
3728 }
3729
3730 /* Analyze newly added function into callgraph. */
3731
3732 static void
3733 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3734 {
3735 if (node->has_gimple_body_p ())
3736 ipa_analyze_node (node);
3737 }
3738
3739 /* Hook that is called by summary when a node is duplicated. */
3740
3741 void
3742 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3743 ipa_node_params *old_info,
3744 ipa_node_params *new_info)
3745 {
3746 ipa_agg_replacement_value *old_av, *new_av;
3747
3748 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3749 new_info->lattices = NULL;
3750 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3751 new_info->known_csts = old_info->known_csts.copy ();
3752 new_info->known_contexts = old_info->known_contexts.copy ();
3753
3754 new_info->analysis_done = old_info->analysis_done;
3755 new_info->node_enqueued = old_info->node_enqueued;
3756 new_info->versionable = old_info->versionable;
3757
3758 old_av = ipa_get_agg_replacements_for_node (src);
3759 if (old_av)
3760 {
3761 new_av = NULL;
3762 while (old_av)
3763 {
3764 struct ipa_agg_replacement_value *v;
3765
3766 v = ggc_alloc<ipa_agg_replacement_value> ();
3767 memcpy (v, old_av, sizeof (*v));
3768 v->next = new_av;
3769 new_av = v;
3770 old_av = old_av->next;
3771 }
3772 ipa_set_node_agg_value_chain (dst, new_av);
3773 }
3774
3775 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3776
3777 if (src_trans)
3778 {
3779 ipcp_grow_transformations_if_necessary ();
3780 src_trans = ipcp_get_transformation_summary (src);
3781 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3782 vec<ipa_vr, va_gc> *&dst_vr
3783 = ipcp_get_transformation_summary (dst)->m_vr;
3784 if (vec_safe_length (src_trans->m_vr) > 0)
3785 {
3786 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3787 for (unsigned i = 0; i < src_vr->length (); ++i)
3788 dst_vr->quick_push ((*src_vr)[i]);
3789 }
3790 }
3791
3792 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3793 {
3794 ipcp_grow_transformations_if_necessary ();
3795 src_trans = ipcp_get_transformation_summary (src);
3796 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3797 vec<ipa_bits, va_gc> *&dst_bits
3798 = ipcp_get_transformation_summary (dst)->bits;
3799 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3800 for (unsigned i = 0; i < src_bits->length (); ++i)
3801 dst_bits->quick_push ((*src_bits)[i]);
3802 }
3803 }
3804
3805 /* Register our cgraph hooks if they are not already there. */
3806
3807 void
3808 ipa_register_cgraph_hooks (void)
3809 {
3810 ipa_check_create_node_params ();
3811
3812 if (!edge_removal_hook_holder)
3813 edge_removal_hook_holder =
3814 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3815 if (!edge_duplication_hook_holder)
3816 edge_duplication_hook_holder =
3817 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3818 function_insertion_hook_holder =
3819 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3820 }
3821
3822 /* Unregister our cgraph hooks if they are not already there. */
3823
3824 static void
3825 ipa_unregister_cgraph_hooks (void)
3826 {
3827 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3828 edge_removal_hook_holder = NULL;
3829 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3830 edge_duplication_hook_holder = NULL;
3831 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3832 function_insertion_hook_holder = NULL;
3833 }
3834
3835 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3836 longer needed after ipa-cp. */
3837
3838 void
3839 ipa_free_all_structures_after_ipa_cp (void)
3840 {
3841 if (!optimize && !in_lto_p)
3842 {
3843 ipa_free_all_edge_args ();
3844 ipa_free_all_node_params ();
3845 ipcp_sources_pool.release ();
3846 ipcp_cst_values_pool.release ();
3847 ipcp_poly_ctx_values_pool.release ();
3848 ipcp_agg_lattice_pool.release ();
3849 ipa_unregister_cgraph_hooks ();
3850 ipa_refdesc_pool.release ();
3851 }
3852 }
3853
3854 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3855 longer needed after indirect inlining. */
3856
3857 void
3858 ipa_free_all_structures_after_iinln (void)
3859 {
3860 ipa_free_all_edge_args ();
3861 ipa_free_all_node_params ();
3862 ipa_unregister_cgraph_hooks ();
3863 ipcp_sources_pool.release ();
3864 ipcp_cst_values_pool.release ();
3865 ipcp_poly_ctx_values_pool.release ();
3866 ipcp_agg_lattice_pool.release ();
3867 ipa_refdesc_pool.release ();
3868 }
3869
3870 /* Print ipa_tree_map data structures of all functions in the
3871 callgraph to F. */
3872
3873 void
3874 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3875 {
3876 int i, count;
3877 struct ipa_node_params *info;
3878
3879 if (!node->definition)
3880 return;
3881 info = IPA_NODE_REF (node);
3882 fprintf (f, " function %s/%i parameter descriptors:\n",
3883 node->name (), node->order);
3884 count = ipa_get_param_count (info);
3885 for (i = 0; i < count; i++)
3886 {
3887 int c;
3888
3889 fprintf (f, " ");
3890 ipa_dump_param (f, info, i);
3891 if (ipa_is_param_used (info, i))
3892 fprintf (f, " used");
3893 c = ipa_get_controlled_uses (info, i);
3894 if (c == IPA_UNDESCRIBED_USE)
3895 fprintf (f, " undescribed_use");
3896 else
3897 fprintf (f, " controlled_uses=%i", c);
3898 fprintf (f, "\n");
3899 }
3900 }
3901
3902 /* Print ipa_tree_map data structures of all functions in the
3903 callgraph to F. */
3904
3905 void
3906 ipa_print_all_params (FILE * f)
3907 {
3908 struct cgraph_node *node;
3909
3910 fprintf (f, "\nFunction parameters:\n");
3911 FOR_EACH_FUNCTION (node)
3912 ipa_print_node_params (f, node);
3913 }
3914
3915 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3916
3917 vec<tree>
3918 ipa_get_vector_of_formal_parms (tree fndecl)
3919 {
3920 vec<tree> args;
3921 int count;
3922 tree parm;
3923
3924 gcc_assert (!flag_wpa);
3925 count = count_formal_params (fndecl);
3926 args.create (count);
3927 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3928 args.quick_push (parm);
3929
3930 return args;
3931 }
3932
3933 /* Return a heap allocated vector containing types of formal parameters of
3934 function type FNTYPE. */
3935
3936 vec<tree>
3937 ipa_get_vector_of_formal_parm_types (tree fntype)
3938 {
3939 vec<tree> types;
3940 int count = 0;
3941 tree t;
3942
3943 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3944 count++;
3945
3946 types.create (count);
3947 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3948 types.quick_push (TREE_VALUE (t));
3949
3950 return types;
3951 }
3952
3953 /* Modify the function declaration FNDECL and its type according to the plan in
3954 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3955 to reflect the actual parameters being modified which are determined by the
3956 base_index field. */
3957
3958 void
3959 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3960 {
3961 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3962 tree orig_type = TREE_TYPE (fndecl);
3963 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3964
3965 /* The following test is an ugly hack, some functions simply don't have any
3966 arguments in their type. This is probably a bug but well... */
3967 bool care_for_types = (old_arg_types != NULL_TREE);
3968 bool last_parm_void;
3969 vec<tree> otypes;
3970 if (care_for_types)
3971 {
3972 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3973 == void_type_node);
3974 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3975 if (last_parm_void)
3976 gcc_assert (oparms.length () + 1 == otypes.length ());
3977 else
3978 gcc_assert (oparms.length () == otypes.length ());
3979 }
3980 else
3981 {
3982 last_parm_void = false;
3983 otypes.create (0);
3984 }
3985
3986 int len = adjustments.length ();
3987 tree *link = &DECL_ARGUMENTS (fndecl);
3988 tree new_arg_types = NULL;
3989 for (int i = 0; i < len; i++)
3990 {
3991 struct ipa_parm_adjustment *adj;
3992 gcc_assert (link);
3993
3994 adj = &adjustments[i];
3995 tree parm;
3996 if (adj->op == IPA_PARM_OP_NEW)
3997 parm = NULL;
3998 else
3999 parm = oparms[adj->base_index];
4000 adj->base = parm;
4001
4002 if (adj->op == IPA_PARM_OP_COPY)
4003 {
4004 if (care_for_types)
4005 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4006 new_arg_types);
4007 *link = parm;
4008 link = &DECL_CHAIN (parm);
4009 }
4010 else if (adj->op != IPA_PARM_OP_REMOVE)
4011 {
4012 tree new_parm;
4013 tree ptype;
4014
4015 if (adj->by_ref)
4016 ptype = build_pointer_type (adj->type);
4017 else
4018 {
4019 ptype = adj->type;
4020 if (is_gimple_reg_type (ptype))
4021 {
4022 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4023 if (TYPE_ALIGN (ptype) != malign)
4024 ptype = build_aligned_type (ptype, malign);
4025 }
4026 }
4027
4028 if (care_for_types)
4029 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4030
4031 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4032 ptype);
4033 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4034 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4035 DECL_ARTIFICIAL (new_parm) = 1;
4036 DECL_ARG_TYPE (new_parm) = ptype;
4037 DECL_CONTEXT (new_parm) = fndecl;
4038 TREE_USED (new_parm) = 1;
4039 DECL_IGNORED_P (new_parm) = 1;
4040 layout_decl (new_parm, 0);
4041
4042 if (adj->op == IPA_PARM_OP_NEW)
4043 adj->base = NULL;
4044 else
4045 adj->base = parm;
4046 adj->new_decl = new_parm;
4047
4048 *link = new_parm;
4049 link = &DECL_CHAIN (new_parm);
4050 }
4051 }
4052
4053 *link = NULL_TREE;
4054
4055 tree new_reversed = NULL;
4056 if (care_for_types)
4057 {
4058 new_reversed = nreverse (new_arg_types);
4059 if (last_parm_void)
4060 {
4061 if (new_reversed)
4062 TREE_CHAIN (new_arg_types) = void_list_node;
4063 else
4064 new_reversed = void_list_node;
4065 }
4066 }
4067
4068 /* Use copy_node to preserve as much as possible from original type
4069 (debug info, attribute lists etc.)
4070 Exception is METHOD_TYPEs must have THIS argument.
4071 When we are asked to remove it, we need to build new FUNCTION_TYPE
4072 instead. */
4073 tree new_type = NULL;
4074 if (TREE_CODE (orig_type) != METHOD_TYPE
4075 || (adjustments[0].op == IPA_PARM_OP_COPY
4076 && adjustments[0].base_index == 0))
4077 {
4078 new_type = build_distinct_type_copy (orig_type);
4079 TYPE_ARG_TYPES (new_type) = new_reversed;
4080 }
4081 else
4082 {
4083 new_type
4084 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4085 new_reversed));
4086 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4087 DECL_VINDEX (fndecl) = NULL_TREE;
4088 }
4089
4090 /* When signature changes, we need to clear builtin info. */
4091 if (DECL_BUILT_IN (fndecl))
4092 {
4093 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4094 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4095 }
4096
4097 TREE_TYPE (fndecl) = new_type;
4098 DECL_VIRTUAL_P (fndecl) = 0;
4099 DECL_LANG_SPECIFIC (fndecl) = NULL;
4100 otypes.release ();
4101 oparms.release ();
4102 }
4103
4104 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4105 If this is a directly recursive call, CS must be NULL. Otherwise it must
4106 contain the corresponding call graph edge. */
4107
4108 void
4109 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4110 ipa_parm_adjustment_vec adjustments)
4111 {
4112 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4113 vec<tree> vargs;
4114 vec<tree, va_gc> **debug_args = NULL;
4115 gcall *new_stmt;
4116 gimple_stmt_iterator gsi, prev_gsi;
4117 tree callee_decl;
4118 int i, len;
4119
4120 len = adjustments.length ();
4121 vargs.create (len);
4122 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4123 current_node->remove_stmt_references (stmt);
4124
4125 gsi = gsi_for_stmt (stmt);
4126 prev_gsi = gsi;
4127 gsi_prev (&prev_gsi);
4128 for (i = 0; i < len; i++)
4129 {
4130 struct ipa_parm_adjustment *adj;
4131
4132 adj = &adjustments[i];
4133
4134 if (adj->op == IPA_PARM_OP_COPY)
4135 {
4136 tree arg = gimple_call_arg (stmt, adj->base_index);
4137
4138 vargs.quick_push (arg);
4139 }
4140 else if (adj->op != IPA_PARM_OP_REMOVE)
4141 {
4142 tree expr, base, off;
4143 location_t loc;
4144 unsigned int deref_align = 0;
4145 bool deref_base = false;
4146
4147 /* We create a new parameter out of the value of the old one, we can
4148 do the following kind of transformations:
4149
4150 - A scalar passed by reference is converted to a scalar passed by
4151 value. (adj->by_ref is false and the type of the original
4152 actual argument is a pointer to a scalar).
4153
4154 - A part of an aggregate is passed instead of the whole aggregate.
4155 The part can be passed either by value or by reference, this is
4156 determined by value of adj->by_ref. Moreover, the code below
4157 handles both situations when the original aggregate is passed by
4158 value (its type is not a pointer) and when it is passed by
4159 reference (it is a pointer to an aggregate).
4160
4161 When the new argument is passed by reference (adj->by_ref is true)
4162 it must be a part of an aggregate and therefore we form it by
4163 simply taking the address of a reference inside the original
4164 aggregate. */
4165
4166 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4167 base = gimple_call_arg (stmt, adj->base_index);
4168 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4169 : EXPR_LOCATION (base);
4170
4171 if (TREE_CODE (base) != ADDR_EXPR
4172 && POINTER_TYPE_P (TREE_TYPE (base)))
4173 off = build_int_cst (adj->alias_ptr_type,
4174 adj->offset / BITS_PER_UNIT);
4175 else
4176 {
4177 HOST_WIDE_INT base_offset;
4178 tree prev_base;
4179 bool addrof;
4180
4181 if (TREE_CODE (base) == ADDR_EXPR)
4182 {
4183 base = TREE_OPERAND (base, 0);
4184 addrof = true;
4185 }
4186 else
4187 addrof = false;
4188 prev_base = base;
4189 base = get_addr_base_and_unit_offset (base, &base_offset);
4190 /* Aggregate arguments can have non-invariant addresses. */
4191 if (!base)
4192 {
4193 base = build_fold_addr_expr (prev_base);
4194 off = build_int_cst (adj->alias_ptr_type,
4195 adj->offset / BITS_PER_UNIT);
4196 }
4197 else if (TREE_CODE (base) == MEM_REF)
4198 {
4199 if (!addrof)
4200 {
4201 deref_base = true;
4202 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4203 }
4204 off = build_int_cst (adj->alias_ptr_type,
4205 base_offset
4206 + adj->offset / BITS_PER_UNIT);
4207 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4208 off);
4209 base = TREE_OPERAND (base, 0);
4210 }
4211 else
4212 {
4213 off = build_int_cst (adj->alias_ptr_type,
4214 base_offset
4215 + adj->offset / BITS_PER_UNIT);
4216 base = build_fold_addr_expr (base);
4217 }
4218 }
4219
4220 if (!adj->by_ref)
4221 {
4222 tree type = adj->type;
4223 unsigned int align;
4224 unsigned HOST_WIDE_INT misalign;
4225
4226 if (deref_base)
4227 {
4228 align = deref_align;
4229 misalign = 0;
4230 }
4231 else
4232 {
4233 get_pointer_alignment_1 (base, &align, &misalign);
4234 if (TYPE_ALIGN (type) > align)
4235 align = TYPE_ALIGN (type);
4236 }
4237 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4238 * BITS_PER_UNIT);
4239 misalign = misalign & (align - 1);
4240 if (misalign != 0)
4241 align = least_bit_hwi (misalign);
4242 if (align < TYPE_ALIGN (type))
4243 type = build_aligned_type (type, align);
4244 base = force_gimple_operand_gsi (&gsi, base,
4245 true, NULL, true, GSI_SAME_STMT);
4246 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4247 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4248 /* If expr is not a valid gimple call argument emit
4249 a load into a temporary. */
4250 if (is_gimple_reg_type (TREE_TYPE (expr)))
4251 {
4252 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4253 if (gimple_in_ssa_p (cfun))
4254 {
4255 gimple_set_vuse (tem, gimple_vuse (stmt));
4256 expr = make_ssa_name (TREE_TYPE (expr), tem);
4257 }
4258 else
4259 expr = create_tmp_reg (TREE_TYPE (expr));
4260 gimple_assign_set_lhs (tem, expr);
4261 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4262 }
4263 }
4264 else
4265 {
4266 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4267 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4268 expr = build_fold_addr_expr (expr);
4269 expr = force_gimple_operand_gsi (&gsi, expr,
4270 true, NULL, true, GSI_SAME_STMT);
4271 }
4272 vargs.quick_push (expr);
4273 }
4274 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4275 {
4276 unsigned int ix;
4277 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4278 gimple *def_temp;
4279
4280 arg = gimple_call_arg (stmt, adj->base_index);
4281 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4282 {
4283 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4284 continue;
4285 arg = fold_convert_loc (gimple_location (stmt),
4286 TREE_TYPE (origin), arg);
4287 }
4288 if (debug_args == NULL)
4289 debug_args = decl_debug_args_insert (callee_decl);
4290 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4291 if (ddecl == origin)
4292 {
4293 ddecl = (**debug_args)[ix + 1];
4294 break;
4295 }
4296 if (ddecl == NULL)
4297 {
4298 ddecl = make_node (DEBUG_EXPR_DECL);
4299 DECL_ARTIFICIAL (ddecl) = 1;
4300 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4301 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4302
4303 vec_safe_push (*debug_args, origin);
4304 vec_safe_push (*debug_args, ddecl);
4305 }
4306 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4307 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4308 }
4309 }
4310
4311 if (dump_file && (dump_flags & TDF_DETAILS))
4312 {
4313 fprintf (dump_file, "replacing stmt:");
4314 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4315 }
4316
4317 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4318 vargs.release ();
4319 if (gimple_call_lhs (stmt))
4320 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4321
4322 gimple_set_block (new_stmt, gimple_block (stmt));
4323 if (gimple_has_location (stmt))
4324 gimple_set_location (new_stmt, gimple_location (stmt));
4325 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4326 gimple_call_copy_flags (new_stmt, stmt);
4327 if (gimple_in_ssa_p (cfun))
4328 {
4329 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4330 if (gimple_vdef (stmt))
4331 {
4332 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4333 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4334 }
4335 }
4336
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 {
4339 fprintf (dump_file, "with stmt:");
4340 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4341 fprintf (dump_file, "\n");
4342 }
4343 gsi_replace (&gsi, new_stmt, true);
4344 if (cs)
4345 cs->set_call_stmt (new_stmt);
4346 do
4347 {
4348 current_node->record_stmt_references (gsi_stmt (gsi));
4349 gsi_prev (&gsi);
4350 }
4351 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4352 }
4353
4354 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4355 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4356 specifies whether the function should care about type incompatibility the
4357 current and new expressions. If it is false, the function will leave
4358 incompatibility issues to the caller. Return true iff the expression
4359 was modified. */
4360
4361 bool
4362 ipa_modify_expr (tree *expr, bool convert,
4363 ipa_parm_adjustment_vec adjustments)
4364 {
4365 struct ipa_parm_adjustment *cand
4366 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4367 if (!cand)
4368 return false;
4369
4370 tree src;
4371 if (cand->by_ref)
4372 {
4373 src = build_simple_mem_ref (cand->new_decl);
4374 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4375 }
4376 else
4377 src = cand->new_decl;
4378
4379 if (dump_file && (dump_flags & TDF_DETAILS))
4380 {
4381 fprintf (dump_file, "About to replace expr ");
4382 print_generic_expr (dump_file, *expr, 0);
4383 fprintf (dump_file, " with ");
4384 print_generic_expr (dump_file, src, 0);
4385 fprintf (dump_file, "\n");
4386 }
4387
4388 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4389 {
4390 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4391 *expr = vce;
4392 }
4393 else
4394 *expr = src;
4395 return true;
4396 }
4397
4398 /* If T is an SSA_NAME, return NULL if it is not a default def or
4399 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4400 the base variable is always returned, regardless if it is a default
4401 def. Return T if it is not an SSA_NAME. */
4402
4403 static tree
4404 get_ssa_base_param (tree t, bool ignore_default_def)
4405 {
4406 if (TREE_CODE (t) == SSA_NAME)
4407 {
4408 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4409 return SSA_NAME_VAR (t);
4410 else
4411 return NULL_TREE;
4412 }
4413 return t;
4414 }
4415
4416 /* Given an expression, return an adjustment entry specifying the
4417 transformation to be done on EXPR. If no suitable adjustment entry
4418 was found, returns NULL.
4419
4420 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4421 default def, otherwise bail on them.
4422
4423 If CONVERT is non-NULL, this function will set *CONVERT if the
4424 expression provided is a component reference. ADJUSTMENTS is the
4425 adjustments vector. */
4426
4427 ipa_parm_adjustment *
4428 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4429 ipa_parm_adjustment_vec adjustments,
4430 bool ignore_default_def)
4431 {
4432 if (TREE_CODE (**expr) == BIT_FIELD_REF
4433 || TREE_CODE (**expr) == IMAGPART_EXPR
4434 || TREE_CODE (**expr) == REALPART_EXPR)
4435 {
4436 *expr = &TREE_OPERAND (**expr, 0);
4437 if (convert)
4438 *convert = true;
4439 }
4440
4441 HOST_WIDE_INT offset, size, max_size;
4442 bool reverse;
4443 tree base
4444 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4445 if (!base || size == -1 || max_size == -1)
4446 return NULL;
4447
4448 if (TREE_CODE (base) == MEM_REF)
4449 {
4450 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4451 base = TREE_OPERAND (base, 0);
4452 }
4453
4454 base = get_ssa_base_param (base, ignore_default_def);
4455 if (!base || TREE_CODE (base) != PARM_DECL)
4456 return NULL;
4457
4458 struct ipa_parm_adjustment *cand = NULL;
4459 unsigned int len = adjustments.length ();
4460 for (unsigned i = 0; i < len; i++)
4461 {
4462 struct ipa_parm_adjustment *adj = &adjustments[i];
4463
4464 if (adj->base == base
4465 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4466 {
4467 cand = adj;
4468 break;
4469 }
4470 }
4471
4472 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4473 return NULL;
4474 return cand;
4475 }
4476
4477 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4478
4479 static bool
4480 index_in_adjustments_multiple_times_p (int base_index,
4481 ipa_parm_adjustment_vec adjustments)
4482 {
4483 int i, len = adjustments.length ();
4484 bool one = false;
4485
4486 for (i = 0; i < len; i++)
4487 {
4488 struct ipa_parm_adjustment *adj;
4489 adj = &adjustments[i];
4490
4491 if (adj->base_index == base_index)
4492 {
4493 if (one)
4494 return true;
4495 else
4496 one = true;
4497 }
4498 }
4499 return false;
4500 }
4501
4502
4503 /* Return adjustments that should have the same effect on function parameters
4504 and call arguments as if they were first changed according to adjustments in
4505 INNER and then by adjustments in OUTER. */
4506
4507 ipa_parm_adjustment_vec
4508 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4509 ipa_parm_adjustment_vec outer)
4510 {
4511 int i, outlen = outer.length ();
4512 int inlen = inner.length ();
4513 int removals = 0;
4514 ipa_parm_adjustment_vec adjustments, tmp;
4515
4516 tmp.create (inlen);
4517 for (i = 0; i < inlen; i++)
4518 {
4519 struct ipa_parm_adjustment *n;
4520 n = &inner[i];
4521
4522 if (n->op == IPA_PARM_OP_REMOVE)
4523 removals++;
4524 else
4525 {
4526 /* FIXME: Handling of new arguments are not implemented yet. */
4527 gcc_assert (n->op != IPA_PARM_OP_NEW);
4528 tmp.quick_push (*n);
4529 }
4530 }
4531
4532 adjustments.create (outlen + removals);
4533 for (i = 0; i < outlen; i++)
4534 {
4535 struct ipa_parm_adjustment r;
4536 struct ipa_parm_adjustment *out = &outer[i];
4537 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4538
4539 memset (&r, 0, sizeof (r));
4540 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4541 if (out->op == IPA_PARM_OP_REMOVE)
4542 {
4543 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4544 {
4545 r.op = IPA_PARM_OP_REMOVE;
4546 adjustments.quick_push (r);
4547 }
4548 continue;
4549 }
4550 else
4551 {
4552 /* FIXME: Handling of new arguments are not implemented yet. */
4553 gcc_assert (out->op != IPA_PARM_OP_NEW);
4554 }
4555
4556 r.base_index = in->base_index;
4557 r.type = out->type;
4558
4559 /* FIXME: Create nonlocal value too. */
4560
4561 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4562 r.op = IPA_PARM_OP_COPY;
4563 else if (in->op == IPA_PARM_OP_COPY)
4564 r.offset = out->offset;
4565 else if (out->op == IPA_PARM_OP_COPY)
4566 r.offset = in->offset;
4567 else
4568 r.offset = in->offset + out->offset;
4569 adjustments.quick_push (r);
4570 }
4571
4572 for (i = 0; i < inlen; i++)
4573 {
4574 struct ipa_parm_adjustment *n = &inner[i];
4575
4576 if (n->op == IPA_PARM_OP_REMOVE)
4577 adjustments.quick_push (*n);
4578 }
4579
4580 tmp.release ();
4581 return adjustments;
4582 }
4583
4584 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4585 friendly way, assuming they are meant to be applied to FNDECL. */
4586
4587 void
4588 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4589 tree fndecl)
4590 {
4591 int i, len = adjustments.length ();
4592 bool first = true;
4593 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4594
4595 fprintf (file, "IPA param adjustments: ");
4596 for (i = 0; i < len; i++)
4597 {
4598 struct ipa_parm_adjustment *adj;
4599 adj = &adjustments[i];
4600
4601 if (!first)
4602 fprintf (file, " ");
4603 else
4604 first = false;
4605
4606 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4607 print_generic_expr (file, parms[adj->base_index], 0);
4608 if (adj->base)
4609 {
4610 fprintf (file, ", base: ");
4611 print_generic_expr (file, adj->base, 0);
4612 }
4613 if (adj->new_decl)
4614 {
4615 fprintf (file, ", new_decl: ");
4616 print_generic_expr (file, adj->new_decl, 0);
4617 }
4618 if (adj->new_ssa_base)
4619 {
4620 fprintf (file, ", new_ssa_base: ");
4621 print_generic_expr (file, adj->new_ssa_base, 0);
4622 }
4623
4624 if (adj->op == IPA_PARM_OP_COPY)
4625 fprintf (file, ", copy_param");
4626 else if (adj->op == IPA_PARM_OP_REMOVE)
4627 fprintf (file, ", remove_param");
4628 else
4629 fprintf (file, ", offset %li", (long) adj->offset);
4630 if (adj->by_ref)
4631 fprintf (file, ", by_ref");
4632 print_node_brief (file, ", type: ", adj->type, 0);
4633 fprintf (file, "\n");
4634 }
4635 parms.release ();
4636 }
4637
4638 /* Dump the AV linked list. */
4639
4640 void
4641 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4642 {
4643 bool comma = false;
4644 fprintf (f, " Aggregate replacements:");
4645 for (; av; av = av->next)
4646 {
4647 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4648 av->index, av->offset);
4649 print_generic_expr (f, av->value, 0);
4650 comma = true;
4651 }
4652 fprintf (f, "\n");
4653 }
4654
4655 /* Stream out jump function JUMP_FUNC to OB. */
4656
4657 static void
4658 ipa_write_jump_function (struct output_block *ob,
4659 struct ipa_jump_func *jump_func)
4660 {
4661 struct ipa_agg_jf_item *item;
4662 struct bitpack_d bp;
4663 int i, count;
4664
4665 streamer_write_uhwi (ob, jump_func->type);
4666 switch (jump_func->type)
4667 {
4668 case IPA_JF_UNKNOWN:
4669 break;
4670 case IPA_JF_CONST:
4671 gcc_assert (
4672 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4673 stream_write_tree (ob, jump_func->value.constant.value, true);
4674 break;
4675 case IPA_JF_PASS_THROUGH:
4676 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4677 if (jump_func->value.pass_through.operation == NOP_EXPR)
4678 {
4679 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4680 bp = bitpack_create (ob->main_stream);
4681 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4682 streamer_write_bitpack (&bp);
4683 }
4684 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4685 == tcc_unary)
4686 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4687 else
4688 {
4689 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4690 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4691 }
4692 break;
4693 case IPA_JF_ANCESTOR:
4694 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4695 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4696 bp = bitpack_create (ob->main_stream);
4697 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4698 streamer_write_bitpack (&bp);
4699 break;
4700 }
4701
4702 count = vec_safe_length (jump_func->agg.items);
4703 streamer_write_uhwi (ob, count);
4704 if (count)
4705 {
4706 bp = bitpack_create (ob->main_stream);
4707 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4708 streamer_write_bitpack (&bp);
4709 }
4710
4711 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4712 {
4713 streamer_write_uhwi (ob, item->offset);
4714 stream_write_tree (ob, item->value, true);
4715 }
4716
4717 bp = bitpack_create (ob->main_stream);
4718 bp_pack_value (&bp, jump_func->bits.known, 1);
4719 streamer_write_bitpack (&bp);
4720 if (jump_func->bits.known)
4721 {
4722 streamer_write_widest_int (ob, jump_func->bits.value);
4723 streamer_write_widest_int (ob, jump_func->bits.mask);
4724 }
4725 bp_pack_value (&bp, jump_func->vr_known, 1);
4726 streamer_write_bitpack (&bp);
4727 if (jump_func->vr_known)
4728 {
4729 streamer_write_enum (ob->main_stream, value_rang_type,
4730 VR_LAST, jump_func->m_vr.type);
4731 stream_write_tree (ob, jump_func->m_vr.min, true);
4732 stream_write_tree (ob, jump_func->m_vr.max, true);
4733 }
4734 }
4735
4736 /* Read in jump function JUMP_FUNC from IB. */
4737
4738 static void
4739 ipa_read_jump_function (struct lto_input_block *ib,
4740 struct ipa_jump_func *jump_func,
4741 struct cgraph_edge *cs,
4742 struct data_in *data_in)
4743 {
4744 enum jump_func_type jftype;
4745 enum tree_code operation;
4746 int i, count;
4747
4748 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4749 switch (jftype)
4750 {
4751 case IPA_JF_UNKNOWN:
4752 ipa_set_jf_unknown (jump_func);
4753 break;
4754 case IPA_JF_CONST:
4755 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4756 break;
4757 case IPA_JF_PASS_THROUGH:
4758 operation = (enum tree_code) streamer_read_uhwi (ib);
4759 if (operation == NOP_EXPR)
4760 {
4761 int formal_id = streamer_read_uhwi (ib);
4762 struct bitpack_d bp = streamer_read_bitpack (ib);
4763 bool agg_preserved = bp_unpack_value (&bp, 1);
4764 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4765 }
4766 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4767 {
4768 int formal_id = streamer_read_uhwi (ib);
4769 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4770 }
4771 else
4772 {
4773 tree operand = stream_read_tree (ib, data_in);
4774 int formal_id = streamer_read_uhwi (ib);
4775 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4776 operation);
4777 }
4778 break;
4779 case IPA_JF_ANCESTOR:
4780 {
4781 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4782 int formal_id = streamer_read_uhwi (ib);
4783 struct bitpack_d bp = streamer_read_bitpack (ib);
4784 bool agg_preserved = bp_unpack_value (&bp, 1);
4785 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4786 break;
4787 }
4788 }
4789
4790 count = streamer_read_uhwi (ib);
4791 vec_alloc (jump_func->agg.items, count);
4792 if (count)
4793 {
4794 struct bitpack_d bp = streamer_read_bitpack (ib);
4795 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4796 }
4797 for (i = 0; i < count; i++)
4798 {
4799 struct ipa_agg_jf_item item;
4800 item.offset = streamer_read_uhwi (ib);
4801 item.value = stream_read_tree (ib, data_in);
4802 jump_func->agg.items->quick_push (item);
4803 }
4804
4805 struct bitpack_d bp = streamer_read_bitpack (ib);
4806 bool bits_known = bp_unpack_value (&bp, 1);
4807 if (bits_known)
4808 {
4809 jump_func->bits.known = true;
4810 jump_func->bits.value = streamer_read_widest_int (ib);
4811 jump_func->bits.mask = streamer_read_widest_int (ib);
4812 }
4813 else
4814 jump_func->bits.known = false;
4815
4816 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4817 bool vr_known = bp_unpack_value (&vr_bp, 1);
4818 if (vr_known)
4819 {
4820 jump_func->vr_known = true;
4821 jump_func->m_vr.type = streamer_read_enum (ib,
4822 value_range_type,
4823 VR_LAST);
4824 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4825 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4826 }
4827 else
4828 jump_func->vr_known = false;
4829 }
4830
4831 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4832 relevant to indirect inlining to OB. */
4833
4834 static void
4835 ipa_write_indirect_edge_info (struct output_block *ob,
4836 struct cgraph_edge *cs)
4837 {
4838 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4839 struct bitpack_d bp;
4840
4841 streamer_write_hwi (ob, ii->param_index);
4842 bp = bitpack_create (ob->main_stream);
4843 bp_pack_value (&bp, ii->polymorphic, 1);
4844 bp_pack_value (&bp, ii->agg_contents, 1);
4845 bp_pack_value (&bp, ii->member_ptr, 1);
4846 bp_pack_value (&bp, ii->by_ref, 1);
4847 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4848 bp_pack_value (&bp, ii->vptr_changed, 1);
4849 streamer_write_bitpack (&bp);
4850 if (ii->agg_contents || ii->polymorphic)
4851 streamer_write_hwi (ob, ii->offset);
4852 else
4853 gcc_assert (ii->offset == 0);
4854
4855 if (ii->polymorphic)
4856 {
4857 streamer_write_hwi (ob, ii->otr_token);
4858 stream_write_tree (ob, ii->otr_type, true);
4859 ii->context.stream_out (ob);
4860 }
4861 }
4862
4863 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4864 relevant to indirect inlining from IB. */
4865
4866 static void
4867 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4868 struct data_in *data_in,
4869 struct cgraph_edge *cs)
4870 {
4871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4872 struct bitpack_d bp;
4873
4874 ii->param_index = (int) streamer_read_hwi (ib);
4875 bp = streamer_read_bitpack (ib);
4876 ii->polymorphic = bp_unpack_value (&bp, 1);
4877 ii->agg_contents = bp_unpack_value (&bp, 1);
4878 ii->member_ptr = bp_unpack_value (&bp, 1);
4879 ii->by_ref = bp_unpack_value (&bp, 1);
4880 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4881 ii->vptr_changed = bp_unpack_value (&bp, 1);
4882 if (ii->agg_contents || ii->polymorphic)
4883 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4884 else
4885 ii->offset = 0;
4886 if (ii->polymorphic)
4887 {
4888 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4889 ii->otr_type = stream_read_tree (ib, data_in);
4890 ii->context.stream_in (ib, data_in);
4891 }
4892 }
4893
4894 /* Stream out NODE info to OB. */
4895
4896 static void
4897 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4898 {
4899 int node_ref;
4900 lto_symtab_encoder_t encoder;
4901 struct ipa_node_params *info = IPA_NODE_REF (node);
4902 int j;
4903 struct cgraph_edge *e;
4904 struct bitpack_d bp;
4905
4906 encoder = ob->decl_state->symtab_node_encoder;
4907 node_ref = lto_symtab_encoder_encode (encoder, node);
4908 streamer_write_uhwi (ob, node_ref);
4909
4910 streamer_write_uhwi (ob, ipa_get_param_count (info));
4911 for (j = 0; j < ipa_get_param_count (info); j++)
4912 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4913 bp = bitpack_create (ob->main_stream);
4914 gcc_assert (info->analysis_done
4915 || ipa_get_param_count (info) == 0);
4916 gcc_assert (!info->node_enqueued);
4917 gcc_assert (!info->ipcp_orig_node);
4918 for (j = 0; j < ipa_get_param_count (info); j++)
4919 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4920 streamer_write_bitpack (&bp);
4921 for (j = 0; j < ipa_get_param_count (info); j++)
4922 {
4923 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4924 stream_write_tree (ob, ipa_get_type (info, j), true);
4925 }
4926 for (e = node->callees; e; e = e->next_callee)
4927 {
4928 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4929
4930 streamer_write_uhwi (ob,
4931 ipa_get_cs_argument_count (args) * 2
4932 + (args->polymorphic_call_contexts != NULL));
4933 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4934 {
4935 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4936 if (args->polymorphic_call_contexts != NULL)
4937 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4938 }
4939 }
4940 for (e = node->indirect_calls; e; e = e->next_callee)
4941 {
4942 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4943
4944 streamer_write_uhwi (ob,
4945 ipa_get_cs_argument_count (args) * 2
4946 + (args->polymorphic_call_contexts != NULL));
4947 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4948 {
4949 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4950 if (args->polymorphic_call_contexts != NULL)
4951 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4952 }
4953 ipa_write_indirect_edge_info (ob, e);
4954 }
4955 }
4956
4957 /* Stream in NODE info from IB. */
4958
4959 static void
4960 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4961 struct data_in *data_in)
4962 {
4963 struct ipa_node_params *info = IPA_NODE_REF (node);
4964 int k;
4965 struct cgraph_edge *e;
4966 struct bitpack_d bp;
4967
4968 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4969
4970 for (k = 0; k < ipa_get_param_count (info); k++)
4971 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4972
4973 bp = streamer_read_bitpack (ib);
4974 if (ipa_get_param_count (info) != 0)
4975 info->analysis_done = true;
4976 info->node_enqueued = false;
4977 for (k = 0; k < ipa_get_param_count (info); k++)
4978 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4979 for (k = 0; k < ipa_get_param_count (info); k++)
4980 {
4981 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4982 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4983 }
4984 for (e = node->callees; e; e = e->next_callee)
4985 {
4986 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4987 int count = streamer_read_uhwi (ib);
4988 bool contexts_computed = count & 1;
4989 count /= 2;
4990
4991 if (!count)
4992 continue;
4993 vec_safe_grow_cleared (args->jump_functions, count);
4994 if (contexts_computed)
4995 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4996
4997 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4998 {
4999 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5000 data_in);
5001 if (contexts_computed)
5002 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5003 }
5004 }
5005 for (e = node->indirect_calls; e; e = e->next_callee)
5006 {
5007 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5008 int count = streamer_read_uhwi (ib);
5009 bool contexts_computed = count & 1;
5010 count /= 2;
5011
5012 if (count)
5013 {
5014 vec_safe_grow_cleared (args->jump_functions, count);
5015 if (contexts_computed)
5016 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5017 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5018 {
5019 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5020 data_in);
5021 if (contexts_computed)
5022 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5023 }
5024 }
5025 ipa_read_indirect_edge_info (ib, data_in, e);
5026 }
5027 }
5028
5029 /* Write jump functions for nodes in SET. */
5030
5031 void
5032 ipa_prop_write_jump_functions (void)
5033 {
5034 struct cgraph_node *node;
5035 struct output_block *ob;
5036 unsigned int count = 0;
5037 lto_symtab_encoder_iterator lsei;
5038 lto_symtab_encoder_t encoder;
5039
5040 if (!ipa_node_params_sum)
5041 return;
5042
5043 ob = create_output_block (LTO_section_jump_functions);
5044 encoder = ob->decl_state->symtab_node_encoder;
5045 ob->symbol = NULL;
5046 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5047 lsei_next_function_in_partition (&lsei))
5048 {
5049 node = lsei_cgraph_node (lsei);
5050 if (node->has_gimple_body_p ()
5051 && IPA_NODE_REF (node) != NULL)
5052 count++;
5053 }
5054
5055 streamer_write_uhwi (ob, count);
5056
5057 /* Process all of the functions. */
5058 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5059 lsei_next_function_in_partition (&lsei))
5060 {
5061 node = lsei_cgraph_node (lsei);
5062 if (node->has_gimple_body_p ()
5063 && IPA_NODE_REF (node) != NULL)
5064 ipa_write_node_info (ob, node);
5065 }
5066 streamer_write_char_stream (ob->main_stream, 0);
5067 produce_asm (ob, NULL);
5068 destroy_output_block (ob);
5069 }
5070
5071 /* Read section in file FILE_DATA of length LEN with data DATA. */
5072
5073 static void
5074 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5075 size_t len)
5076 {
5077 const struct lto_function_header *header =
5078 (const struct lto_function_header *) data;
5079 const int cfg_offset = sizeof (struct lto_function_header);
5080 const int main_offset = cfg_offset + header->cfg_size;
5081 const int string_offset = main_offset + header->main_size;
5082 struct data_in *data_in;
5083 unsigned int i;
5084 unsigned int count;
5085
5086 lto_input_block ib_main ((const char *) data + main_offset,
5087 header->main_size, file_data->mode_table);
5088
5089 data_in =
5090 lto_data_in_create (file_data, (const char *) data + string_offset,
5091 header->string_size, vNULL);
5092 count = streamer_read_uhwi (&ib_main);
5093
5094 for (i = 0; i < count; i++)
5095 {
5096 unsigned int index;
5097 struct cgraph_node *node;
5098 lto_symtab_encoder_t encoder;
5099
5100 index = streamer_read_uhwi (&ib_main);
5101 encoder = file_data->symtab_node_encoder;
5102 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5103 index));
5104 gcc_assert (node->definition);
5105 ipa_read_node_info (&ib_main, node, data_in);
5106 }
5107 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5108 len);
5109 lto_data_in_delete (data_in);
5110 }
5111
5112 /* Read ipcp jump functions. */
5113
5114 void
5115 ipa_prop_read_jump_functions (void)
5116 {
5117 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5118 struct lto_file_decl_data *file_data;
5119 unsigned int j = 0;
5120
5121 ipa_check_create_node_params ();
5122 ipa_check_create_edge_args ();
5123 ipa_register_cgraph_hooks ();
5124
5125 while ((file_data = file_data_vec[j++]))
5126 {
5127 size_t len;
5128 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5129
5130 if (data)
5131 ipa_prop_read_section (file_data, data, len);
5132 }
5133 }
5134
5135 /* After merging units, we can get mismatch in argument counts.
5136 Also decl merging might've rendered parameter lists obsolete.
5137 Also compute called_with_variable_arg info. */
5138
5139 void
5140 ipa_update_after_lto_read (void)
5141 {
5142 ipa_check_create_node_params ();
5143 ipa_check_create_edge_args ();
5144 }
5145
5146 void
5147 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5148 {
5149 int node_ref;
5150 unsigned int count = 0;
5151 lto_symtab_encoder_t encoder;
5152 struct ipa_agg_replacement_value *aggvals, *av;
5153
5154 aggvals = ipa_get_agg_replacements_for_node (node);
5155 encoder = ob->decl_state->symtab_node_encoder;
5156 node_ref = lto_symtab_encoder_encode (encoder, node);
5157 streamer_write_uhwi (ob, node_ref);
5158
5159 for (av = aggvals; av; av = av->next)
5160 count++;
5161 streamer_write_uhwi (ob, count);
5162
5163 for (av = aggvals; av; av = av->next)
5164 {
5165 struct bitpack_d bp;
5166
5167 streamer_write_uhwi (ob, av->offset);
5168 streamer_write_uhwi (ob, av->index);
5169 stream_write_tree (ob, av->value, true);
5170
5171 bp = bitpack_create (ob->main_stream);
5172 bp_pack_value (&bp, av->by_ref, 1);
5173 streamer_write_bitpack (&bp);
5174 }
5175
5176 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5177 if (ts && vec_safe_length (ts->m_vr) > 0)
5178 {
5179 count = ts->m_vr->length ();
5180 streamer_write_uhwi (ob, count);
5181 for (unsigned i = 0; i < count; ++i)
5182 {
5183 struct bitpack_d bp;
5184 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5185 bp = bitpack_create (ob->main_stream);
5186 bp_pack_value (&bp, parm_vr->known, 1);
5187 streamer_write_bitpack (&bp);
5188 if (parm_vr->known)
5189 {
5190 streamer_write_enum (ob->main_stream, value_rang_type,
5191 VR_LAST, parm_vr->type);
5192 streamer_write_wide_int (ob, parm_vr->min);
5193 streamer_write_wide_int (ob, parm_vr->max);
5194 }
5195 }
5196 }
5197 else
5198 streamer_write_uhwi (ob, 0);
5199
5200 if (ts && vec_safe_length (ts->bits) > 0)
5201 {
5202 count = ts->bits->length ();
5203 streamer_write_uhwi (ob, count);
5204
5205 for (unsigned i = 0; i < count; ++i)
5206 {
5207 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5208 struct bitpack_d bp = bitpack_create (ob->main_stream);
5209 bp_pack_value (&bp, bits_jfunc.known, 1);
5210 streamer_write_bitpack (&bp);
5211 if (bits_jfunc.known)
5212 {
5213 streamer_write_widest_int (ob, bits_jfunc.value);
5214 streamer_write_widest_int (ob, bits_jfunc.mask);
5215 }
5216 }
5217 }
5218 else
5219 streamer_write_uhwi (ob, 0);
5220 }
5221
5222 /* Stream in the aggregate value replacement chain for NODE from IB. */
5223
5224 static void
5225 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5226 data_in *data_in)
5227 {
5228 struct ipa_agg_replacement_value *aggvals = NULL;
5229 unsigned int count, i;
5230
5231 count = streamer_read_uhwi (ib);
5232 for (i = 0; i <count; i++)
5233 {
5234 struct ipa_agg_replacement_value *av;
5235 struct bitpack_d bp;
5236
5237 av = ggc_alloc<ipa_agg_replacement_value> ();
5238 av->offset = streamer_read_uhwi (ib);
5239 av->index = streamer_read_uhwi (ib);
5240 av->value = stream_read_tree (ib, data_in);
5241 bp = streamer_read_bitpack (ib);
5242 av->by_ref = bp_unpack_value (&bp, 1);
5243 av->next = aggvals;
5244 aggvals = av;
5245 }
5246 ipa_set_node_agg_value_chain (node, aggvals);
5247
5248 count = streamer_read_uhwi (ib);
5249 if (count > 0)
5250 {
5251 ipcp_grow_transformations_if_necessary ();
5252
5253 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5254 vec_safe_grow_cleared (ts->m_vr, count);
5255 for (i = 0; i < count; i++)
5256 {
5257 ipa_vr *parm_vr;
5258 parm_vr = &(*ts->m_vr)[i];
5259 struct bitpack_d bp;
5260 bp = streamer_read_bitpack (ib);
5261 parm_vr->known = bp_unpack_value (&bp, 1);
5262 if (parm_vr->known)
5263 {
5264 parm_vr->type = streamer_read_enum (ib, value_range_type,
5265 VR_LAST);
5266 parm_vr->min = streamer_read_wide_int (ib);
5267 parm_vr->max = streamer_read_wide_int (ib);
5268 }
5269 }
5270 }
5271 count = streamer_read_uhwi (ib);
5272 if (count > 0)
5273 {
5274 ipcp_grow_transformations_if_necessary ();
5275
5276 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5277 vec_safe_grow_cleared (ts->bits, count);
5278
5279 for (i = 0; i < count; i++)
5280 {
5281 ipa_bits& bits_jfunc = (*ts->bits)[i];
5282 struct bitpack_d bp = streamer_read_bitpack (ib);
5283 bits_jfunc.known = bp_unpack_value (&bp, 1);
5284 if (bits_jfunc.known)
5285 {
5286 bits_jfunc.value = streamer_read_widest_int (ib);
5287 bits_jfunc.mask = streamer_read_widest_int (ib);
5288 }
5289 }
5290 }
5291 }
5292
5293 /* Write all aggregate replacement for nodes in set. */
5294
5295 void
5296 ipcp_write_transformation_summaries (void)
5297 {
5298 struct cgraph_node *node;
5299 struct output_block *ob;
5300 unsigned int count = 0;
5301 lto_symtab_encoder_iterator lsei;
5302 lto_symtab_encoder_t encoder;
5303
5304 ob = create_output_block (LTO_section_ipcp_transform);
5305 encoder = ob->decl_state->symtab_node_encoder;
5306 ob->symbol = NULL;
5307 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5308 lsei_next_function_in_partition (&lsei))
5309 {
5310 node = lsei_cgraph_node (lsei);
5311 if (node->has_gimple_body_p ())
5312 count++;
5313 }
5314
5315 streamer_write_uhwi (ob, count);
5316
5317 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5318 lsei_next_function_in_partition (&lsei))
5319 {
5320 node = lsei_cgraph_node (lsei);
5321 if (node->has_gimple_body_p ())
5322 write_ipcp_transformation_info (ob, node);
5323 }
5324 streamer_write_char_stream (ob->main_stream, 0);
5325 produce_asm (ob, NULL);
5326 destroy_output_block (ob);
5327 }
5328
5329 /* Read replacements section in file FILE_DATA of length LEN with data
5330 DATA. */
5331
5332 static void
5333 read_replacements_section (struct lto_file_decl_data *file_data,
5334 const char *data,
5335 size_t len)
5336 {
5337 const struct lto_function_header *header =
5338 (const struct lto_function_header *) data;
5339 const int cfg_offset = sizeof (struct lto_function_header);
5340 const int main_offset = cfg_offset + header->cfg_size;
5341 const int string_offset = main_offset + header->main_size;
5342 struct data_in *data_in;
5343 unsigned int i;
5344 unsigned int count;
5345
5346 lto_input_block ib_main ((const char *) data + main_offset,
5347 header->main_size, file_data->mode_table);
5348
5349 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5350 header->string_size, vNULL);
5351 count = streamer_read_uhwi (&ib_main);
5352
5353 for (i = 0; i < count; i++)
5354 {
5355 unsigned int index;
5356 struct cgraph_node *node;
5357 lto_symtab_encoder_t encoder;
5358
5359 index = streamer_read_uhwi (&ib_main);
5360 encoder = file_data->symtab_node_encoder;
5361 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5362 index));
5363 gcc_assert (node->definition);
5364 read_ipcp_transformation_info (&ib_main, node, data_in);
5365 }
5366 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5367 len);
5368 lto_data_in_delete (data_in);
5369 }
5370
5371 /* Read IPA-CP aggregate replacements. */
5372
5373 void
5374 ipcp_read_transformation_summaries (void)
5375 {
5376 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5377 struct lto_file_decl_data *file_data;
5378 unsigned int j = 0;
5379
5380 while ((file_data = file_data_vec[j++]))
5381 {
5382 size_t len;
5383 const char *data = lto_get_section_data (file_data,
5384 LTO_section_ipcp_transform,
5385 NULL, &len);
5386 if (data)
5387 read_replacements_section (file_data, data, len);
5388 }
5389 }
5390
5391 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5392 NODE. */
5393
5394 static void
5395 adjust_agg_replacement_values (struct cgraph_node *node,
5396 struct ipa_agg_replacement_value *aggval)
5397 {
5398 struct ipa_agg_replacement_value *v;
5399 int i, c = 0, d = 0, *adj;
5400
5401 if (!node->clone.combined_args_to_skip)
5402 return;
5403
5404 for (v = aggval; v; v = v->next)
5405 {
5406 gcc_assert (v->index >= 0);
5407 if (c < v->index)
5408 c = v->index;
5409 }
5410 c++;
5411
5412 adj = XALLOCAVEC (int, c);
5413 for (i = 0; i < c; i++)
5414 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5415 {
5416 adj[i] = -1;
5417 d++;
5418 }
5419 else
5420 adj[i] = i - d;
5421
5422 for (v = aggval; v; v = v->next)
5423 v->index = adj[v->index];
5424 }
5425
5426 /* Dominator walker driving the ipcp modification phase. */
5427
5428 class ipcp_modif_dom_walker : public dom_walker
5429 {
5430 public:
5431 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5432 vec<ipa_param_descriptor, va_gc> *descs,
5433 struct ipa_agg_replacement_value *av,
5434 bool *sc, bool *cc)
5435 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5436 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5437
5438 virtual edge before_dom_children (basic_block);
5439
5440 private:
5441 struct ipa_func_body_info *m_fbi;
5442 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5443 struct ipa_agg_replacement_value *m_aggval;
5444 bool *m_something_changed, *m_cfg_changed;
5445 };
5446
5447 edge
5448 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5449 {
5450 gimple_stmt_iterator gsi;
5451 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5452 {
5453 struct ipa_agg_replacement_value *v;
5454 gimple *stmt = gsi_stmt (gsi);
5455 tree rhs, val, t;
5456 HOST_WIDE_INT offset, size;
5457 int index;
5458 bool by_ref, vce;
5459
5460 if (!gimple_assign_load_p (stmt))
5461 continue;
5462 rhs = gimple_assign_rhs1 (stmt);
5463 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5464 continue;
5465
5466 vce = false;
5467 t = rhs;
5468 while (handled_component_p (t))
5469 {
5470 /* V_C_E can do things like convert an array of integers to one
5471 bigger integer and similar things we do not handle below. */
5472 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5473 {
5474 vce = true;
5475 break;
5476 }
5477 t = TREE_OPERAND (t, 0);
5478 }
5479 if (vce)
5480 continue;
5481
5482 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5483 &offset, &size, &by_ref))
5484 continue;
5485 for (v = m_aggval; v; v = v->next)
5486 if (v->index == index
5487 && v->offset == offset)
5488 break;
5489 if (!v
5490 || v->by_ref != by_ref
5491 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5492 continue;
5493
5494 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5495 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5496 {
5497 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5498 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5499 else if (TYPE_SIZE (TREE_TYPE (rhs))
5500 == TYPE_SIZE (TREE_TYPE (v->value)))
5501 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5502 else
5503 {
5504 if (dump_file)
5505 {
5506 fprintf (dump_file, " const ");
5507 print_generic_expr (dump_file, v->value, 0);
5508 fprintf (dump_file, " can't be converted to type of ");
5509 print_generic_expr (dump_file, rhs, 0);
5510 fprintf (dump_file, "\n");
5511 }
5512 continue;
5513 }
5514 }
5515 else
5516 val = v->value;
5517
5518 if (dump_file && (dump_flags & TDF_DETAILS))
5519 {
5520 fprintf (dump_file, "Modifying stmt:\n ");
5521 print_gimple_stmt (dump_file, stmt, 0, 0);
5522 }
5523 gimple_assign_set_rhs_from_tree (&gsi, val);
5524 update_stmt (stmt);
5525
5526 if (dump_file && (dump_flags & TDF_DETAILS))
5527 {
5528 fprintf (dump_file, "into:\n ");
5529 print_gimple_stmt (dump_file, stmt, 0, 0);
5530 fprintf (dump_file, "\n");
5531 }
5532
5533 *m_something_changed = true;
5534 if (maybe_clean_eh_stmt (stmt)
5535 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5536 *m_cfg_changed = true;
5537 }
5538 return NULL;
5539 }
5540
5541 /* Update bits info of formal parameters as described in
5542 ipcp_transformation_summary. */
5543
5544 static void
5545 ipcp_update_bits (struct cgraph_node *node)
5546 {
5547 tree parm = DECL_ARGUMENTS (node->decl);
5548 tree next_parm = parm;
5549 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5550
5551 if (!ts || vec_safe_length (ts->bits) == 0)
5552 return;
5553
5554 vec<ipa_bits, va_gc> &bits = *ts->bits;
5555 unsigned count = bits.length ();
5556
5557 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5558 {
5559 if (node->clone.combined_args_to_skip
5560 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5561 continue;
5562
5563 gcc_checking_assert (parm);
5564 next_parm = DECL_CHAIN (parm);
5565
5566 if (!bits[i].known
5567 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5568 || !is_gimple_reg (parm))
5569 continue;
5570
5571 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5572 if (!ddef)
5573 continue;
5574
5575 if (dump_file)
5576 {
5577 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5578 print_hex (bits[i].mask, dump_file);
5579 fprintf (dump_file, "\n");
5580 }
5581
5582 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5583 {
5584 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5585 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5586
5587 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5588 | wide_int::from (bits[i].value, prec, sgn);
5589 set_nonzero_bits (ddef, nonzero_bits);
5590 }
5591 else
5592 {
5593 unsigned tem = bits[i].mask.to_uhwi ();
5594 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5595 unsigned align = tem & -tem;
5596 unsigned misalign = bitpos & (align - 1);
5597
5598 if (align > 1)
5599 {
5600 if (dump_file)
5601 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5602
5603 unsigned old_align, old_misalign;
5604 struct ptr_info_def *pi = get_ptr_info (ddef);
5605 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5606
5607 if (old_known
5608 && old_align > align)
5609 {
5610 if (dump_file)
5611 {
5612 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5613 if ((old_misalign & (align - 1)) != misalign)
5614 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5615 old_misalign, misalign);
5616 }
5617 continue;
5618 }
5619
5620 if (old_known
5621 && ((misalign & (old_align - 1)) != old_misalign)
5622 && dump_file)
5623 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5624 old_misalign, misalign);
5625
5626 set_ptr_info_alignment (pi, align, misalign);
5627 }
5628 }
5629 }
5630 }
5631
5632 /* Update value range of formal parameters as described in
5633 ipcp_transformation_summary. */
5634
5635 static void
5636 ipcp_update_vr (struct cgraph_node *node)
5637 {
5638 tree fndecl = node->decl;
5639 tree parm = DECL_ARGUMENTS (fndecl);
5640 tree next_parm = parm;
5641 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5642 if (!ts || vec_safe_length (ts->m_vr) == 0)
5643 return;
5644 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5645 unsigned count = vr.length ();
5646
5647 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5648 {
5649 if (node->clone.combined_args_to_skip
5650 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5651 continue;
5652 gcc_checking_assert (parm);
5653 next_parm = DECL_CHAIN (parm);
5654 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5655
5656 if (!ddef || !is_gimple_reg (parm))
5657 continue;
5658
5659 if (vr[i].known
5660 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5661 {
5662 tree type = TREE_TYPE (ddef);
5663 unsigned prec = TYPE_PRECISION (type);
5664 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5665 {
5666 if (dump_file)
5667 {
5668 fprintf (dump_file, "Setting value range of param %u ", i);
5669 fprintf (dump_file, "%s[",
5670 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5671 print_decs (vr[i].min, dump_file);
5672 fprintf (dump_file, ", ");
5673 print_decs (vr[i].max, dump_file);
5674 fprintf (dump_file, "]\n");
5675 }
5676 set_range_info (ddef, vr[i].type,
5677 wide_int_storage::from (vr[i].min, prec,
5678 TYPE_SIGN (type)),
5679 wide_int_storage::from (vr[i].max, prec,
5680 TYPE_SIGN (type)));
5681 }
5682 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5683 && vr[i].type == VR_ANTI_RANGE
5684 && wi::eq_p (vr[i].min, 0)
5685 && wi::eq_p (vr[i].max, 0))
5686 {
5687 if (dump_file)
5688 fprintf (dump_file, "Setting nonnull for %u\n", i);
5689 set_ptr_nonnull (ddef);
5690 }
5691 }
5692 }
5693 }
5694
5695 /* IPCP transformation phase doing propagation of aggregate values. */
5696
5697 unsigned int
5698 ipcp_transform_function (struct cgraph_node *node)
5699 {
5700 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5701 struct ipa_func_body_info fbi;
5702 struct ipa_agg_replacement_value *aggval;
5703 int param_count;
5704 bool cfg_changed = false, something_changed = false;
5705
5706 gcc_checking_assert (cfun);
5707 gcc_checking_assert (current_function_decl);
5708
5709 if (dump_file)
5710 fprintf (dump_file, "Modification phase of node %s/%i\n",
5711 node->name (), node->order);
5712
5713 ipcp_update_bits (node);
5714 ipcp_update_vr (node);
5715 aggval = ipa_get_agg_replacements_for_node (node);
5716 if (!aggval)
5717 return 0;
5718 param_count = count_formal_params (node->decl);
5719 if (param_count == 0)
5720 return 0;
5721 adjust_agg_replacement_values (node, aggval);
5722 if (dump_file)
5723 ipa_dump_agg_replacement_values (dump_file, aggval);
5724
5725 fbi.node = node;
5726 fbi.info = NULL;
5727 fbi.bb_infos = vNULL;
5728 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5729 fbi.param_count = param_count;
5730 fbi.aa_walked = 0;
5731
5732 vec_safe_grow_cleared (descriptors, param_count);
5733 ipa_populate_param_decls (node, *descriptors);
5734 calculate_dominance_info (CDI_DOMINATORS);
5735 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5736 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5737
5738 int i;
5739 struct ipa_bb_info *bi;
5740 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5741 free_ipa_bb_info (bi);
5742 fbi.bb_infos.release ();
5743 free_dominance_info (CDI_DOMINATORS);
5744 (*ipcp_transformations)[node->uid].agg_values = NULL;
5745 (*ipcp_transformations)[node->uid].bits = NULL;
5746 (*ipcp_transformations)[node->uid].m_vr = NULL;
5747
5748 vec_free (descriptors);
5749
5750 if (!something_changed)
5751 return 0;
5752 else if (cfg_changed)
5753 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5754 else
5755 return TODO_update_ssa_only_virtuals;
5756 }