Add an asssert and testcase for PR 68064
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->alignment.known)
298 {
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
302 }
303 else
304 fprintf (f, " Unknown alignment\n");
305 }
306 }
307
308
309 /* Print the jump functions of all arguments on all call graph edges going from
310 NODE to file F. */
311
312 void
313 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
314 {
315 struct cgraph_edge *cs;
316
317 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
318 node->order);
319 for (cs = node->callees; cs; cs = cs->next_callee)
320 {
321 if (!ipa_edge_args_info_available_for_edge_p (cs))
322 continue;
323
324 fprintf (f, " callsite %s/%i -> %s/%i : \n",
325 xstrdup_for_dump (node->name ()), node->order,
326 xstrdup_for_dump (cs->callee->name ()),
327 cs->callee->order);
328 ipa_print_node_jump_functions_for_edge (f, cs);
329 }
330
331 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
332 {
333 struct cgraph_indirect_call_info *ii;
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 ii = cs->indirect_info;
338 if (ii->agg_contents)
339 fprintf (f, " indirect %s callsite, calling param %i, "
340 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
341 ii->member_ptr ? "member ptr" : "aggregate",
342 ii->param_index, ii->offset,
343 ii->by_ref ? "by reference" : "by_value");
344 else
345 fprintf (f, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC,
347 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
348 ii->offset);
349
350 if (cs->call_stmt)
351 {
352 fprintf (f, ", for stmt ");
353 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
354 }
355 else
356 fprintf (f, "\n");
357 if (ii->polymorphic)
358 ii->context.dump (f);
359 ipa_print_node_jump_functions_for_edge (f, cs);
360 }
361 }
362
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
364
365 void
366 ipa_print_all_jump_functions (FILE *f)
367 {
368 struct cgraph_node *node;
369
370 fprintf (f, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node)
372 {
373 ipa_print_node_jump_functions (f, node);
374 }
375 }
376
377 /* Set jfunc to be a know-really nothing jump function. */
378
379 static void
380 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
381 {
382 jfunc->type = IPA_JF_UNKNOWN;
383 jfunc->alignment.known = false;
384 }
385
386 /* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
388
389 static void
390 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
391 struct ipa_jump_func *src)
392
393 {
394 gcc_checking_assert (src->type == IPA_JF_CONST);
395 dst->type = IPA_JF_CONST;
396 dst->value.constant = src->value.constant;
397 }
398
399 /* Set JFUNC to be a constant jmp function. */
400
401 static void
402 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
403 struct cgraph_edge *cs)
404 {
405 constant = unshare_expr (constant);
406 if (constant && EXPR_P (constant))
407 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
408 jfunc->type = IPA_JF_CONST;
409 jfunc->value.constant.value = unshare_expr_without_location (constant);
410
411 if (TREE_CODE (constant) == ADDR_EXPR
412 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
413 {
414 struct ipa_cst_ref_desc *rdesc;
415
416 rdesc = ipa_refdesc_pool.allocate ();
417 rdesc->cs = cs;
418 rdesc->next_duplicate = NULL;
419 rdesc->refcount = 1;
420 jfunc->value.constant.rdesc = rdesc;
421 }
422 else
423 jfunc->value.constant.rdesc = NULL;
424 }
425
426 /* Set JFUNC to be a simple pass-through jump function. */
427 static void
428 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
429 bool agg_preserved)
430 {
431 jfunc->type = IPA_JF_PASS_THROUGH;
432 jfunc->value.pass_through.operand = NULL_TREE;
433 jfunc->value.pass_through.formal_id = formal_id;
434 jfunc->value.pass_through.operation = NOP_EXPR;
435 jfunc->value.pass_through.agg_preserved = agg_preserved;
436 }
437
438 /* Set JFUNC to be an arithmetic pass through jump function. */
439
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
443 {
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 }
450
451 /* Set JFUNC to be an ancestor jump function. */
452
453 static void
454 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
455 int formal_id, bool agg_preserved)
456 {
457 jfunc->type = IPA_JF_ANCESTOR;
458 jfunc->value.ancestor.formal_id = formal_id;
459 jfunc->value.ancestor.offset = offset;
460 jfunc->value.ancestor.agg_preserved = agg_preserved;
461 }
462
463 /* Get IPA BB information about the given BB. FBI is the context of analyzis
464 of this function body. */
465
466 static struct ipa_bb_info *
467 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
468 {
469 gcc_checking_assert (fbi);
470 return &fbi->bb_infos[bb->index];
471 }
472
473 /* Structure to be passed in between detect_type_change and
474 check_stmt_for_type_change. */
475
476 struct prop_type_change_info
477 {
478 /* Offset into the object where there is the virtual method pointer we are
479 looking for. */
480 HOST_WIDE_INT offset;
481 /* The declaration or SSA_NAME pointer of the base that we are checking for
482 type change. */
483 tree object;
484 /* Set to true if dynamic type change has been detected. */
485 bool type_maybe_changed;
486 };
487
488 /* Return true if STMT can modify a virtual method table pointer.
489
490 This function makes special assumptions about both constructors and
491 destructors which are all the functions that are allowed to alter the VMT
492 pointers. It assumes that destructors begin with assignment into all VMT
493 pointers and that constructors essentially look in the following way:
494
495 1) The very first thing they do is that they call constructors of ancestor
496 sub-objects that have them.
497
498 2) Then VMT pointers of this and all its ancestors is set to new values
499 corresponding to the type corresponding to the constructor.
500
501 3) Only afterwards, other stuff such as constructor of member sub-objects
502 and the code written by the user is run. Only this may include calling
503 virtual functions, directly or indirectly.
504
505 There is no way to call a constructor of an ancestor sub-object in any
506 other way.
507
508 This means that we do not have to care whether constructors get the correct
509 type information because they will always change it (in fact, if we define
510 the type to be given by the VMT pointer, it is undefined).
511
512 The most important fact to derive from the above is that if, for some
513 statement in the section 3, we try to detect whether the dynamic type has
514 changed, we can safely ignore all calls as we examine the function body
515 backwards until we reach statements in section 2 because these calls cannot
516 be ancestor constructors or destructors (if the input is not bogus) and so
517 do not change the dynamic type (this holds true only for automatically
518 allocated objects but at the moment we devirtualize only these). We then
519 must detect that statements in section 2 change the dynamic type and can try
520 to derive the new type. That is enough and we can stop, we will never see
521 the calls into constructors of sub-objects in this code. Therefore we can
522 safely ignore all call statements that we traverse.
523 */
524
525 static bool
526 stmt_may_be_vtbl_ptr_store (gimple *stmt)
527 {
528 if (is_gimple_call (stmt))
529 return false;
530 if (gimple_clobber_p (stmt))
531 return false;
532 else if (is_gimple_assign (stmt))
533 {
534 tree lhs = gimple_assign_lhs (stmt);
535
536 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
537 {
538 if (flag_strict_aliasing
539 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
540 return false;
541
542 if (TREE_CODE (lhs) == COMPONENT_REF
543 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
544 return false;
545 /* In the future we might want to use get_base_ref_and_offset to find
546 if there is a field corresponding to the offset and if so, proceed
547 almost like if it was a component ref. */
548 }
549 }
550 return true;
551 }
552
553 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
554 to check whether a particular statement may modify the virtual table
555 pointerIt stores its result into DATA, which points to a
556 prop_type_change_info structure. */
557
558 static bool
559 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
560 {
561 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
562 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
563
564 if (stmt_may_be_vtbl_ptr_store (stmt))
565 {
566 tci->type_maybe_changed = true;
567 return true;
568 }
569 else
570 return false;
571 }
572
573 /* See if ARG is PARAM_DECl describing instance passed by pointer
574 or reference in FUNCTION. Return false if the dynamic type may change
575 in between beggining of the function until CALL is invoked.
576
577 Generally functions are not allowed to change type of such instances,
578 but they call destructors. We assume that methods can not destroy the THIS
579 pointer. Also as a special cases, constructor and destructors may change
580 type of the THIS pointer. */
581
582 static bool
583 param_type_may_change_p (tree function, tree arg, gimple *call)
584 {
585 /* Pure functions can not do any changes on the dynamic type;
586 that require writting to memory. */
587 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
588 return false;
589 /* We need to check if we are within inlined consturctor
590 or destructor (ideally we would have way to check that the
591 inline cdtor is actually working on ARG, but we don't have
592 easy tie on this, so punt on all non-pure cdtors.
593 We may also record the types of cdtors and once we know type
594 of the instance match them.
595
596 Also code unification optimizations may merge calls from
597 different blocks making return values unreliable. So
598 do nothing during late optimization. */
599 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
600 return true;
601 if (TREE_CODE (arg) == SSA_NAME
602 && SSA_NAME_IS_DEFAULT_DEF (arg)
603 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
604 {
605 /* Normal (non-THIS) argument. */
606 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
607 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
608 /* THIS pointer of an method - here we want to watch constructors
609 and destructors as those definitely may change the dynamic
610 type. */
611 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
612 && !DECL_CXX_CONSTRUCTOR_P (function)
613 && !DECL_CXX_DESTRUCTOR_P (function)
614 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
615 {
616 /* Walk the inline stack and watch out for ctors/dtors. */
617 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
618 block = BLOCK_SUPERCONTEXT (block))
619 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
620 return true;
621 return false;
622 }
623 }
624 return true;
625 }
626
627 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
628 callsite CALL) by looking for assignments to its virtual table pointer. If
629 it is, return true and fill in the jump function JFUNC with relevant type
630 information or set it to unknown. ARG is the object itself (not a pointer
631 to it, unless dereferenced). BASE is the base of the memory access as
632 returned by get_ref_base_and_extent, as is the offset.
633
634 This is helper function for detect_type_change and detect_type_change_ssa
635 that does the heavy work which is usually unnecesary. */
636
637 static bool
638 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
639 gcall *call, struct ipa_jump_func *jfunc,
640 HOST_WIDE_INT offset)
641 {
642 struct prop_type_change_info tci;
643 ao_ref ao;
644 bool entry_reached = false;
645
646 gcc_checking_assert (DECL_P (arg)
647 || TREE_CODE (arg) == MEM_REF
648 || handled_component_p (arg));
649
650 comp_type = TYPE_MAIN_VARIANT (comp_type);
651
652 /* Const calls cannot call virtual methods through VMT and so type changes do
653 not matter. */
654 if (!flag_devirtualize || !gimple_vuse (call)
655 /* Be sure expected_type is polymorphic. */
656 || !comp_type
657 || TREE_CODE (comp_type) != RECORD_TYPE
658 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
659 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
660 return true;
661
662 ao_ref_init (&ao, arg);
663 ao.base = base;
664 ao.offset = offset;
665 ao.size = POINTER_SIZE;
666 ao.max_size = ao.size;
667
668 tci.offset = offset;
669 tci.object = get_base_address (arg);
670 tci.type_maybe_changed = false;
671
672 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
673 &tci, NULL, &entry_reached);
674 if (!tci.type_maybe_changed)
675 return false;
676
677 ipa_set_jf_unknown (jfunc);
678 return true;
679 }
680
681 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
682 If it is, return true and fill in the jump function JFUNC with relevant type
683 information or set it to unknown. ARG is the object itself (not a pointer
684 to it, unless dereferenced). BASE is the base of the memory access as
685 returned by get_ref_base_and_extent, as is the offset. */
686
687 static bool
688 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
689 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
690 {
691 if (!flag_devirtualize)
692 return false;
693
694 if (TREE_CODE (base) == MEM_REF
695 && !param_type_may_change_p (current_function_decl,
696 TREE_OPERAND (base, 0),
697 call))
698 return false;
699 return detect_type_change_from_memory_writes (arg, base, comp_type,
700 call, jfunc, offset);
701 }
702
703 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
704 SSA name (its dereference will become the base and the offset is assumed to
705 be zero). */
706
707 static bool
708 detect_type_change_ssa (tree arg, tree comp_type,
709 gcall *call, struct ipa_jump_func *jfunc)
710 {
711 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
712 if (!flag_devirtualize
713 || !POINTER_TYPE_P (TREE_TYPE (arg)))
714 return false;
715
716 if (!param_type_may_change_p (current_function_decl, arg, call))
717 return false;
718
719 arg = build2 (MEM_REF, ptr_type_node, arg,
720 build_int_cst (ptr_type_node, 0));
721
722 return detect_type_change_from_memory_writes (arg, arg, comp_type,
723 call, jfunc, 0);
724 }
725
726 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
727 boolean variable pointed to by DATA. */
728
729 static bool
730 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
731 void *data)
732 {
733 bool *b = (bool *) data;
734 *b = true;
735 return true;
736 }
737
738 /* Return true if we have already walked so many statements in AA that we
739 should really just start giving up. */
740
741 static bool
742 aa_overwalked (struct ipa_func_body_info *fbi)
743 {
744 gcc_checking_assert (fbi);
745 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
746 }
747
748 /* Find the nearest valid aa status for parameter specified by INDEX that
749 dominates BB. */
750
751 static struct ipa_param_aa_status *
752 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
753 int index)
754 {
755 while (true)
756 {
757 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
758 if (!bb)
759 return NULL;
760 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
761 if (!bi->param_aa_statuses.is_empty ()
762 && bi->param_aa_statuses[index].valid)
763 return &bi->param_aa_statuses[index];
764 }
765 }
766
767 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
768 structures and/or intialize the result with a dominating description as
769 necessary. */
770
771 static struct ipa_param_aa_status *
772 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
773 int index)
774 {
775 gcc_checking_assert (fbi);
776 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
777 if (bi->param_aa_statuses.is_empty ())
778 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
779 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
780 if (!paa->valid)
781 {
782 gcc_checking_assert (!paa->parm_modified
783 && !paa->ref_modified
784 && !paa->pt_modified);
785 struct ipa_param_aa_status *dom_paa;
786 dom_paa = find_dominating_aa_status (fbi, bb, index);
787 if (dom_paa)
788 *paa = *dom_paa;
789 else
790 paa->valid = true;
791 }
792
793 return paa;
794 }
795
796 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
797 a value known not to be modified in this function before reaching the
798 statement STMT. FBI holds information about the function we have so far
799 gathered but do not survive the summary building stage. */
800
801 static bool
802 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
803 gimple *stmt, tree parm_load)
804 {
805 struct ipa_param_aa_status *paa;
806 bool modified = false;
807 ao_ref refd;
808
809 /* FIXME: FBI can be NULL if we are being called from outside
810 ipa_node_analysis or ipcp_transform_function, which currently happens
811 during inlining analysis. It would be great to extend fbi's lifetime and
812 always have it. Currently, we are just not afraid of too much walking in
813 that case. */
814 if (fbi)
815 {
816 if (aa_overwalked (fbi))
817 return false;
818 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
819 if (paa->parm_modified)
820 return false;
821 }
822 else
823 paa = NULL;
824
825 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
826 ao_ref_init (&refd, parm_load);
827 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
828 &modified, NULL);
829 if (fbi)
830 fbi->aa_walked += walked;
831 if (paa && modified)
832 paa->parm_modified = true;
833 return !modified;
834 }
835
836 /* If STMT is an assignment that loads a value from an parameter declaration,
837 return the index of the parameter in ipa_node_params which has not been
838 modified. Otherwise return -1. */
839
840 static int
841 load_from_unmodified_param (struct ipa_func_body_info *fbi,
842 vec<ipa_param_descriptor> descriptors,
843 gimple *stmt)
844 {
845 int index;
846 tree op1;
847
848 if (!gimple_assign_single_p (stmt))
849 return -1;
850
851 op1 = gimple_assign_rhs1 (stmt);
852 if (TREE_CODE (op1) != PARM_DECL)
853 return -1;
854
855 index = ipa_get_param_decl_index_1 (descriptors, op1);
856 if (index < 0
857 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
858 return -1;
859
860 return index;
861 }
862
863 /* Return true if memory reference REF (which must be a load through parameter
864 with INDEX) loads data that are known to be unmodified in this function
865 before reaching statement STMT. */
866
867 static bool
868 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
869 int index, gimple *stmt, tree ref)
870 {
871 struct ipa_param_aa_status *paa;
872 bool modified = false;
873 ao_ref refd;
874
875 /* FIXME: FBI can be NULL if we are being called from outside
876 ipa_node_analysis or ipcp_transform_function, which currently happens
877 during inlining analysis. It would be great to extend fbi's lifetime and
878 always have it. Currently, we are just not afraid of too much walking in
879 that case. */
880 if (fbi)
881 {
882 if (aa_overwalked (fbi))
883 return false;
884 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
885 if (paa->ref_modified)
886 return false;
887 }
888 else
889 paa = NULL;
890
891 gcc_checking_assert (gimple_vuse (stmt));
892 ao_ref_init (&refd, ref);
893 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
894 &modified, NULL);
895 if (fbi)
896 fbi->aa_walked += walked;
897 if (paa && modified)
898 paa->ref_modified = true;
899 return !modified;
900 }
901
902 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
903 is known to be unmodified in this function before reaching call statement
904 CALL into which it is passed. FBI describes the function body. */
905
906 static bool
907 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
908 gimple *call, tree parm)
909 {
910 bool modified = false;
911 ao_ref refd;
912
913 /* It's unnecessary to calculate anything about memory contnets for a const
914 function because it is not goin to use it. But do not cache the result
915 either. Also, no such calculations for non-pointers. */
916 if (!gimple_vuse (call)
917 || !POINTER_TYPE_P (TREE_TYPE (parm))
918 || aa_overwalked (fbi))
919 return false;
920
921 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
922 gimple_bb (call),
923 index);
924 if (paa->pt_modified)
925 return false;
926
927 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
928 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
929 &modified, NULL);
930 fbi->aa_walked += walked;
931 if (modified)
932 paa->pt_modified = true;
933 return !modified;
934 }
935
936 /* Return true if we can prove that OP is a memory reference loading unmodified
937 data from an aggregate passed as a parameter and if the aggregate is passed
938 by reference, that the alias type of the load corresponds to the type of the
939 formal parameter (so that we can rely on this type for TBAA in callers).
940 INFO and PARMS_AINFO describe parameters of the current function (but the
941 latter can be NULL), STMT is the load statement. If function returns true,
942 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
943 within the aggregate and whether it is a load from a value passed by
944 reference respectively. */
945
946 bool
947 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor> descriptors,
949 gimple *stmt, tree op, int *index_p,
950 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
951 bool *by_ref_p)
952 {
953 int index;
954 HOST_WIDE_INT size, max_size;
955 bool reverse;
956 tree base
957 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
958
959 if (max_size == -1 || max_size != size || *offset_p < 0)
960 return false;
961
962 if (DECL_P (base))
963 {
964 int index = ipa_get_param_decl_index_1 (descriptors, base);
965 if (index >= 0
966 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
967 {
968 *index_p = index;
969 *by_ref_p = false;
970 if (size_p)
971 *size_p = size;
972 return true;
973 }
974 return false;
975 }
976
977 if (TREE_CODE (base) != MEM_REF
978 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
979 || !integer_zerop (TREE_OPERAND (base, 1)))
980 return false;
981
982 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
983 {
984 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
985 index = ipa_get_param_decl_index_1 (descriptors, parm);
986 }
987 else
988 {
989 /* This branch catches situations where a pointer parameter is not a
990 gimple register, for example:
991
992 void hip7(S*) (struct S * p)
993 {
994 void (*<T2e4>) (struct S *) D.1867;
995 struct S * p.1;
996
997 <bb 2>:
998 p.1_1 = p;
999 D.1867_2 = p.1_1->f;
1000 D.1867_2 ();
1001 gdp = &p;
1002 */
1003
1004 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1005 index = load_from_unmodified_param (fbi, descriptors, def);
1006 }
1007
1008 if (index >= 0
1009 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1010 {
1011 *index_p = index;
1012 *by_ref_p = true;
1013 if (size_p)
1014 *size_p = size;
1015 return true;
1016 }
1017 return false;
1018 }
1019
1020 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1021 of an assignment statement STMT, try to determine whether we are actually
1022 handling any of the following cases and construct an appropriate jump
1023 function into JFUNC if so:
1024
1025 1) The passed value is loaded from a formal parameter which is not a gimple
1026 register (most probably because it is addressable, the value has to be
1027 scalar) and we can guarantee the value has not changed. This case can
1028 therefore be described by a simple pass-through jump function. For example:
1029
1030 foo (int a)
1031 {
1032 int a.0;
1033
1034 a.0_2 = a;
1035 bar (a.0_2);
1036
1037 2) The passed value can be described by a simple arithmetic pass-through
1038 jump function. E.g.
1039
1040 foo (int a)
1041 {
1042 int D.2064;
1043
1044 D.2064_4 = a.1(D) + 4;
1045 bar (D.2064_4);
1046
1047 This case can also occur in combination of the previous one, e.g.:
1048
1049 foo (int a, int z)
1050 {
1051 int a.0;
1052 int D.2064;
1053
1054 a.0_3 = a;
1055 D.2064_4 = a.0_3 + 4;
1056 foo (D.2064_4);
1057
1058 3) The passed value is an address of an object within another one (which
1059 also passed by reference). Such situations are described by an ancestor
1060 jump function and describe situations such as:
1061
1062 B::foo() (struct B * const this)
1063 {
1064 struct A * D.1845;
1065
1066 D.1845_2 = &this_1(D)->D.1748;
1067 A::bar (D.1845_2);
1068
1069 INFO is the structure describing individual parameters access different
1070 stages of IPA optimizations. PARMS_AINFO contains the information that is
1071 only needed for intraprocedural analysis. */
1072
1073 static void
1074 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1075 struct ipa_node_params *info,
1076 struct ipa_jump_func *jfunc,
1077 gcall *call, gimple *stmt, tree name,
1078 tree param_type)
1079 {
1080 HOST_WIDE_INT offset, size, max_size;
1081 tree op1, tc_ssa, base, ssa;
1082 bool reverse;
1083 int index;
1084
1085 op1 = gimple_assign_rhs1 (stmt);
1086
1087 if (TREE_CODE (op1) == SSA_NAME)
1088 {
1089 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1090 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1091 else
1092 index = load_from_unmodified_param (fbi, info->descriptors,
1093 SSA_NAME_DEF_STMT (op1));
1094 tc_ssa = op1;
1095 }
1096 else
1097 {
1098 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1099 tc_ssa = gimple_assign_lhs (stmt);
1100 }
1101
1102 if (index >= 0)
1103 {
1104 tree op2 = gimple_assign_rhs2 (stmt);
1105
1106 if (op2)
1107 {
1108 if (!is_gimple_ip_invariant (op2)
1109 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1110 && !useless_type_conversion_p (TREE_TYPE (name),
1111 TREE_TYPE (op1))))
1112 return;
1113
1114 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1115 gimple_assign_rhs_code (stmt));
1116 }
1117 else if (gimple_assign_single_p (stmt))
1118 {
1119 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1120 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1121 }
1122 return;
1123 }
1124
1125 if (TREE_CODE (op1) != ADDR_EXPR)
1126 return;
1127 op1 = TREE_OPERAND (op1, 0);
1128 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1129 return;
1130 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1131 if (TREE_CODE (base) != MEM_REF
1132 /* If this is a varying address, punt. */
1133 || max_size == -1
1134 || max_size != size)
1135 return;
1136 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1137 ssa = TREE_OPERAND (base, 0);
1138 if (TREE_CODE (ssa) != SSA_NAME
1139 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1140 || offset < 0)
1141 return;
1142
1143 /* Dynamic types are changed in constructors and destructors. */
1144 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1145 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1146 ipa_set_ancestor_jf (jfunc, offset, index,
1147 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1148 }
1149
1150 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1151 it looks like:
1152
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 The base of the MEM_REF must be a default definition SSA NAME of a
1156 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1157 whole MEM_REF expression is returned and the offset calculated from any
1158 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1159 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1160
1161 static tree
1162 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1163 {
1164 HOST_WIDE_INT size, max_size;
1165 tree expr, parm, obj;
1166 bool reverse;
1167
1168 if (!gimple_assign_single_p (assign))
1169 return NULL_TREE;
1170 expr = gimple_assign_rhs1 (assign);
1171
1172 if (TREE_CODE (expr) != ADDR_EXPR)
1173 return NULL_TREE;
1174 expr = TREE_OPERAND (expr, 0);
1175 obj = expr;
1176 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1177
1178 if (TREE_CODE (expr) != MEM_REF
1179 /* If this is a varying address, punt. */
1180 || max_size == -1
1181 || max_size != size
1182 || *offset < 0)
1183 return NULL_TREE;
1184 parm = TREE_OPERAND (expr, 0);
1185 if (TREE_CODE (parm) != SSA_NAME
1186 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1187 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1188 return NULL_TREE;
1189
1190 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1191 *obj_p = obj;
1192 return expr;
1193 }
1194
1195
1196 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1197 statement PHI, try to find out whether NAME is in fact a
1198 multiple-inheritance typecast from a descendant into an ancestor of a formal
1199 parameter and thus can be described by an ancestor jump function and if so,
1200 write the appropriate function into JFUNC.
1201
1202 Essentially we want to match the following pattern:
1203
1204 if (obj_2(D) != 0B)
1205 goto <bb 3>;
1206 else
1207 goto <bb 4>;
1208
1209 <bb 3>:
1210 iftmp.1_3 = &obj_2(D)->D.1762;
1211
1212 <bb 4>:
1213 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1214 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1215 return D.1879_6; */
1216
1217 static void
1218 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1219 struct ipa_node_params *info,
1220 struct ipa_jump_func *jfunc,
1221 gcall *call, gphi *phi)
1222 {
1223 HOST_WIDE_INT offset;
1224 gimple *assign, *cond;
1225 basic_block phi_bb, assign_bb, cond_bb;
1226 tree tmp, parm, expr, obj;
1227 int index, i;
1228
1229 if (gimple_phi_num_args (phi) != 2)
1230 return;
1231
1232 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1233 tmp = PHI_ARG_DEF (phi, 0);
1234 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1235 tmp = PHI_ARG_DEF (phi, 1);
1236 else
1237 return;
1238 if (TREE_CODE (tmp) != SSA_NAME
1239 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1240 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1241 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1242 return;
1243
1244 assign = SSA_NAME_DEF_STMT (tmp);
1245 assign_bb = gimple_bb (assign);
1246 if (!single_pred_p (assign_bb))
1247 return;
1248 expr = get_ancestor_addr_info (assign, &obj, &offset);
1249 if (!expr)
1250 return;
1251 parm = TREE_OPERAND (expr, 0);
1252 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1253 if (index < 0)
1254 return;
1255
1256 cond_bb = single_pred (assign_bb);
1257 cond = last_stmt (cond_bb);
1258 if (!cond
1259 || gimple_code (cond) != GIMPLE_COND
1260 || gimple_cond_code (cond) != NE_EXPR
1261 || gimple_cond_lhs (cond) != parm
1262 || !integer_zerop (gimple_cond_rhs (cond)))
1263 return;
1264
1265 phi_bb = gimple_bb (phi);
1266 for (i = 0; i < 2; i++)
1267 {
1268 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1269 if (pred != assign_bb && pred != cond_bb)
1270 return;
1271 }
1272
1273 ipa_set_ancestor_jf (jfunc, offset, index,
1274 parm_ref_data_pass_through_p (fbi, index, call, parm));
1275 }
1276
1277 /* Inspect the given TYPE and return true iff it has the same structure (the
1278 same number of fields of the same types) as a C++ member pointer. If
1279 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1280 corresponding fields there. */
1281
1282 static bool
1283 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1284 {
1285 tree fld;
1286
1287 if (TREE_CODE (type) != RECORD_TYPE)
1288 return false;
1289
1290 fld = TYPE_FIELDS (type);
1291 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1292 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1293 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1294 return false;
1295
1296 if (method_ptr)
1297 *method_ptr = fld;
1298
1299 fld = DECL_CHAIN (fld);
1300 if (!fld || INTEGRAL_TYPE_P (fld)
1301 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1302 return false;
1303 if (delta)
1304 *delta = fld;
1305
1306 if (DECL_CHAIN (fld))
1307 return false;
1308
1309 return true;
1310 }
1311
1312 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1313 return the rhs of its defining statement. Otherwise return RHS as it
1314 is. */
1315
1316 static inline tree
1317 get_ssa_def_if_simple_copy (tree rhs)
1318 {
1319 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1320 {
1321 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1322
1323 if (gimple_assign_single_p (def_stmt))
1324 rhs = gimple_assign_rhs1 (def_stmt);
1325 else
1326 break;
1327 }
1328 return rhs;
1329 }
1330
1331 /* Simple linked list, describing known contents of an aggregate beforere
1332 call. */
1333
1334 struct ipa_known_agg_contents_list
1335 {
1336 /* Offset and size of the described part of the aggregate. */
1337 HOST_WIDE_INT offset, size;
1338 /* Known constant value or NULL if the contents is known to be unknown. */
1339 tree constant;
1340 /* Pointer to the next structure in the list. */
1341 struct ipa_known_agg_contents_list *next;
1342 };
1343
1344 /* Find the proper place in linked list of ipa_known_agg_contents_list
1345 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1346 unless there is a partial overlap, in which case return NULL, or such
1347 element is already there, in which case set *ALREADY_THERE to true. */
1348
1349 static struct ipa_known_agg_contents_list **
1350 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1351 HOST_WIDE_INT lhs_offset,
1352 HOST_WIDE_INT lhs_size,
1353 bool *already_there)
1354 {
1355 struct ipa_known_agg_contents_list **p = list;
1356 while (*p && (*p)->offset < lhs_offset)
1357 {
1358 if ((*p)->offset + (*p)->size > lhs_offset)
1359 return NULL;
1360 p = &(*p)->next;
1361 }
1362
1363 if (*p && (*p)->offset < lhs_offset + lhs_size)
1364 {
1365 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1366 /* We already know this value is subsequently overwritten with
1367 something else. */
1368 *already_there = true;
1369 else
1370 /* Otherwise this is a partial overlap which we cannot
1371 represent. */
1372 return NULL;
1373 }
1374 return p;
1375 }
1376
1377 /* Build aggregate jump function from LIST, assuming there are exactly
1378 CONST_COUNT constant entries there and that th offset of the passed argument
1379 is ARG_OFFSET and store it into JFUNC. */
1380
1381 static void
1382 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1383 int const_count, HOST_WIDE_INT arg_offset,
1384 struct ipa_jump_func *jfunc)
1385 {
1386 vec_alloc (jfunc->agg.items, const_count);
1387 while (list)
1388 {
1389 if (list->constant)
1390 {
1391 struct ipa_agg_jf_item item;
1392 item.offset = list->offset - arg_offset;
1393 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1394 item.value = unshare_expr_without_location (list->constant);
1395 jfunc->agg.items->quick_push (item);
1396 }
1397 list = list->next;
1398 }
1399 }
1400
1401 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1402 in ARG is filled in with constant values. ARG can either be an aggregate
1403 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1404 aggregate. JFUNC is the jump function into which the constants are
1405 subsequently stored. */
1406
1407 static void
1408 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1409 tree arg_type,
1410 struct ipa_jump_func *jfunc)
1411 {
1412 struct ipa_known_agg_contents_list *list = NULL;
1413 int item_count = 0, const_count = 0;
1414 HOST_WIDE_INT arg_offset, arg_size;
1415 gimple_stmt_iterator gsi;
1416 tree arg_base;
1417 bool check_ref, by_ref;
1418 ao_ref r;
1419
1420 /* The function operates in three stages. First, we prepare check_ref, r,
1421 arg_base and arg_offset based on what is actually passed as an actual
1422 argument. */
1423
1424 if (POINTER_TYPE_P (arg_type))
1425 {
1426 by_ref = true;
1427 if (TREE_CODE (arg) == SSA_NAME)
1428 {
1429 tree type_size;
1430 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1431 return;
1432 check_ref = true;
1433 arg_base = arg;
1434 arg_offset = 0;
1435 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1436 arg_size = tree_to_uhwi (type_size);
1437 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1438 }
1439 else if (TREE_CODE (arg) == ADDR_EXPR)
1440 {
1441 HOST_WIDE_INT arg_max_size;
1442 bool reverse;
1443
1444 arg = TREE_OPERAND (arg, 0);
1445 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1446 &arg_max_size, &reverse);
1447 if (arg_max_size == -1
1448 || arg_max_size != arg_size
1449 || arg_offset < 0)
1450 return;
1451 if (DECL_P (arg_base))
1452 {
1453 check_ref = false;
1454 ao_ref_init (&r, arg_base);
1455 }
1456 else
1457 return;
1458 }
1459 else
1460 return;
1461 }
1462 else
1463 {
1464 HOST_WIDE_INT arg_max_size;
1465 bool reverse;
1466
1467 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1468
1469 by_ref = false;
1470 check_ref = false;
1471 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1472 &arg_max_size, &reverse);
1473 if (arg_max_size == -1
1474 || arg_max_size != arg_size
1475 || arg_offset < 0)
1476 return;
1477
1478 ao_ref_init (&r, arg);
1479 }
1480
1481 /* Second stage walks back the BB, looks at individual statements and as long
1482 as it is confident of how the statements affect contents of the
1483 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1484 describing it. */
1485 gsi = gsi_for_stmt (call);
1486 gsi_prev (&gsi);
1487 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1488 {
1489 struct ipa_known_agg_contents_list *n, **p;
1490 gimple *stmt = gsi_stmt (gsi);
1491 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1492 tree lhs, rhs, lhs_base;
1493 bool reverse;
1494
1495 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1496 continue;
1497 if (!gimple_assign_single_p (stmt))
1498 break;
1499
1500 lhs = gimple_assign_lhs (stmt);
1501 rhs = gimple_assign_rhs1 (stmt);
1502 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1503 || TREE_CODE (lhs) == BIT_FIELD_REF
1504 || contains_bitfld_component_ref_p (lhs))
1505 break;
1506
1507 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1508 &lhs_max_size, &reverse);
1509 if (lhs_max_size == -1
1510 || lhs_max_size != lhs_size)
1511 break;
1512
1513 if (check_ref)
1514 {
1515 if (TREE_CODE (lhs_base) != MEM_REF
1516 || TREE_OPERAND (lhs_base, 0) != arg_base
1517 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1518 break;
1519 }
1520 else if (lhs_base != arg_base)
1521 {
1522 if (DECL_P (lhs_base))
1523 continue;
1524 else
1525 break;
1526 }
1527
1528 bool already_there = false;
1529 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1530 &already_there);
1531 if (!p)
1532 break;
1533 if (already_there)
1534 continue;
1535
1536 rhs = get_ssa_def_if_simple_copy (rhs);
1537 n = XALLOCA (struct ipa_known_agg_contents_list);
1538 n->size = lhs_size;
1539 n->offset = lhs_offset;
1540 if (is_gimple_ip_invariant (rhs))
1541 {
1542 n->constant = rhs;
1543 const_count++;
1544 }
1545 else
1546 n->constant = NULL_TREE;
1547 n->next = *p;
1548 *p = n;
1549
1550 item_count++;
1551 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1552 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1553 break;
1554 }
1555
1556 /* Third stage just goes over the list and creates an appropriate vector of
1557 ipa_agg_jf_item structures out of it, of sourse only if there are
1558 any known constants to begin with. */
1559
1560 if (const_count)
1561 {
1562 jfunc->agg.by_ref = by_ref;
1563 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1564 }
1565 }
1566
1567 static tree
1568 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1569 {
1570 int n;
1571 tree type = (e->callee
1572 ? TREE_TYPE (e->callee->decl)
1573 : gimple_call_fntype (e->call_stmt));
1574 tree t = TYPE_ARG_TYPES (type);
1575
1576 for (n = 0; n < i; n++)
1577 {
1578 if (!t)
1579 break;
1580 t = TREE_CHAIN (t);
1581 }
1582 if (t)
1583 return TREE_VALUE (t);
1584 if (!e->callee)
1585 return NULL;
1586 t = DECL_ARGUMENTS (e->callee->decl);
1587 for (n = 0; n < i; n++)
1588 {
1589 if (!t)
1590 return NULL;
1591 t = TREE_CHAIN (t);
1592 }
1593 if (t)
1594 return TREE_TYPE (t);
1595 return NULL;
1596 }
1597
1598 /* Compute jump function for all arguments of callsite CS and insert the
1599 information in the jump_functions array in the ipa_edge_args corresponding
1600 to this callsite. */
1601
1602 static void
1603 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1604 struct cgraph_edge *cs)
1605 {
1606 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1607 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1608 gcall *call = cs->call_stmt;
1609 int n, arg_num = gimple_call_num_args (call);
1610 bool useful_context = false;
1611
1612 if (arg_num == 0 || args->jump_functions)
1613 return;
1614 vec_safe_grow_cleared (args->jump_functions, arg_num);
1615 if (flag_devirtualize)
1616 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1617
1618 if (gimple_call_internal_p (call))
1619 return;
1620 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1621 return;
1622
1623 for (n = 0; n < arg_num; n++)
1624 {
1625 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1626 tree arg = gimple_call_arg (call, n);
1627 tree param_type = ipa_get_callee_param_type (cs, n);
1628 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1629 {
1630 tree instance;
1631 struct ipa_polymorphic_call_context context (cs->caller->decl,
1632 arg, cs->call_stmt,
1633 &instance);
1634 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1635 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1636 if (!context.useless_p ())
1637 useful_context = true;
1638 }
1639
1640 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1641 {
1642 unsigned HOST_WIDE_INT hwi_bitpos;
1643 unsigned align;
1644
1645 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1646 && align % BITS_PER_UNIT == 0
1647 && hwi_bitpos % BITS_PER_UNIT == 0)
1648 {
1649 gcc_checking_assert (align != 0);
1650 jfunc->alignment.known = true;
1651 jfunc->alignment.align = align / BITS_PER_UNIT;
1652 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1653 }
1654 else
1655 gcc_assert (!jfunc->alignment.known);
1656 }
1657 else
1658 gcc_assert (!jfunc->alignment.known);
1659
1660 if (is_gimple_ip_invariant (arg))
1661 ipa_set_jf_constant (jfunc, arg, cs);
1662 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1663 && TREE_CODE (arg) == PARM_DECL)
1664 {
1665 int index = ipa_get_param_decl_index (info, arg);
1666
1667 gcc_assert (index >=0);
1668 /* Aggregate passed by value, check for pass-through, otherwise we
1669 will attempt to fill in aggregate contents later in this
1670 for cycle. */
1671 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1672 {
1673 ipa_set_jf_simple_pass_through (jfunc, index, false);
1674 continue;
1675 }
1676 }
1677 else if (TREE_CODE (arg) == SSA_NAME)
1678 {
1679 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1680 {
1681 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1682 if (index >= 0)
1683 {
1684 bool agg_p;
1685 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1686 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1687 }
1688 }
1689 else
1690 {
1691 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1692 if (is_gimple_assign (stmt))
1693 compute_complex_assign_jump_func (fbi, info, jfunc,
1694 call, stmt, arg, param_type);
1695 else if (gimple_code (stmt) == GIMPLE_PHI)
1696 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1697 call,
1698 as_a <gphi *> (stmt));
1699 }
1700 }
1701
1702 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1703 passed (because type conversions are ignored in gimple). Usually we can
1704 safely get type from function declaration, but in case of K&R prototypes or
1705 variadic functions we can try our luck with type of the pointer passed.
1706 TODO: Since we look for actual initialization of the memory object, we may better
1707 work out the type based on the memory stores we find. */
1708 if (!param_type)
1709 param_type = TREE_TYPE (arg);
1710
1711 if ((jfunc->type != IPA_JF_PASS_THROUGH
1712 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1713 && (jfunc->type != IPA_JF_ANCESTOR
1714 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1715 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1716 || POINTER_TYPE_P (param_type)))
1717 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1718 }
1719 if (!useful_context)
1720 vec_free (args->polymorphic_call_contexts);
1721 }
1722
1723 /* Compute jump functions for all edges - both direct and indirect - outgoing
1724 from BB. */
1725
1726 static void
1727 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1728 {
1729 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1730 int i;
1731 struct cgraph_edge *cs;
1732
1733 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1734 {
1735 struct cgraph_node *callee = cs->callee;
1736
1737 if (callee)
1738 {
1739 callee->ultimate_alias_target ();
1740 /* We do not need to bother analyzing calls to unknown functions
1741 unless they may become known during lto/whopr. */
1742 if (!callee->definition && !flag_lto)
1743 continue;
1744 }
1745 ipa_compute_jump_functions_for_edge (fbi, cs);
1746 }
1747 }
1748
1749 /* If STMT looks like a statement loading a value from a member pointer formal
1750 parameter, return that parameter and store the offset of the field to
1751 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1752 might be clobbered). If USE_DELTA, then we look for a use of the delta
1753 field rather than the pfn. */
1754
1755 static tree
1756 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1757 HOST_WIDE_INT *offset_p)
1758 {
1759 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1760
1761 if (!gimple_assign_single_p (stmt))
1762 return NULL_TREE;
1763
1764 rhs = gimple_assign_rhs1 (stmt);
1765 if (TREE_CODE (rhs) == COMPONENT_REF)
1766 {
1767 ref_field = TREE_OPERAND (rhs, 1);
1768 rhs = TREE_OPERAND (rhs, 0);
1769 }
1770 else
1771 ref_field = NULL_TREE;
1772 if (TREE_CODE (rhs) != MEM_REF)
1773 return NULL_TREE;
1774 rec = TREE_OPERAND (rhs, 0);
1775 if (TREE_CODE (rec) != ADDR_EXPR)
1776 return NULL_TREE;
1777 rec = TREE_OPERAND (rec, 0);
1778 if (TREE_CODE (rec) != PARM_DECL
1779 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1780 return NULL_TREE;
1781 ref_offset = TREE_OPERAND (rhs, 1);
1782
1783 if (use_delta)
1784 fld = delta_field;
1785 else
1786 fld = ptr_field;
1787 if (offset_p)
1788 *offset_p = int_bit_position (fld);
1789
1790 if (ref_field)
1791 {
1792 if (integer_nonzerop (ref_offset))
1793 return NULL_TREE;
1794 return ref_field == fld ? rec : NULL_TREE;
1795 }
1796 else
1797 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1798 : NULL_TREE;
1799 }
1800
1801 /* Returns true iff T is an SSA_NAME defined by a statement. */
1802
1803 static bool
1804 ipa_is_ssa_with_stmt_def (tree t)
1805 {
1806 if (TREE_CODE (t) == SSA_NAME
1807 && !SSA_NAME_IS_DEFAULT_DEF (t))
1808 return true;
1809 else
1810 return false;
1811 }
1812
1813 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1814 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1815 indirect call graph edge. */
1816
1817 static struct cgraph_edge *
1818 ipa_note_param_call (struct cgraph_node *node, int param_index,
1819 gcall *stmt)
1820 {
1821 struct cgraph_edge *cs;
1822
1823 cs = node->get_edge (stmt);
1824 cs->indirect_info->param_index = param_index;
1825 cs->indirect_info->agg_contents = 0;
1826 cs->indirect_info->member_ptr = 0;
1827 return cs;
1828 }
1829
1830 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1831 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1832 intermediate information about each formal parameter. Currently it checks
1833 whether the call calls a pointer that is a formal parameter and if so, the
1834 parameter is marked with the called flag and an indirect call graph edge
1835 describing the call is created. This is very simple for ordinary pointers
1836 represented in SSA but not-so-nice when it comes to member pointers. The
1837 ugly part of this function does nothing more than trying to match the
1838 pattern of such a call. An example of such a pattern is the gimple dump
1839 below, the call is on the last line:
1840
1841 <bb 2>:
1842 f$__delta_5 = f.__delta;
1843 f$__pfn_24 = f.__pfn;
1844
1845 or
1846 <bb 2>:
1847 f$__delta_5 = MEM[(struct *)&f];
1848 f$__pfn_24 = MEM[(struct *)&f + 4B];
1849
1850 and a few lines below:
1851
1852 <bb 5>
1853 D.2496_3 = (int) f$__pfn_24;
1854 D.2497_4 = D.2496_3 & 1;
1855 if (D.2497_4 != 0)
1856 goto <bb 3>;
1857 else
1858 goto <bb 4>;
1859
1860 <bb 6>:
1861 D.2500_7 = (unsigned int) f$__delta_5;
1862 D.2501_8 = &S + D.2500_7;
1863 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1864 D.2503_10 = *D.2502_9;
1865 D.2504_12 = f$__pfn_24 + -1;
1866 D.2505_13 = (unsigned int) D.2504_12;
1867 D.2506_14 = D.2503_10 + D.2505_13;
1868 D.2507_15 = *D.2506_14;
1869 iftmp.11_16 = (String:: *) D.2507_15;
1870
1871 <bb 7>:
1872 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1873 D.2500_19 = (unsigned int) f$__delta_5;
1874 D.2508_20 = &S + D.2500_19;
1875 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1876
1877 Such patterns are results of simple calls to a member pointer:
1878
1879 int doprinting (int (MyString::* f)(int) const)
1880 {
1881 MyString S ("somestring");
1882
1883 return (S.*f)(4);
1884 }
1885
1886 Moreover, the function also looks for called pointers loaded from aggregates
1887 passed by value or reference. */
1888
1889 static void
1890 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1891 tree target)
1892 {
1893 struct ipa_node_params *info = fbi->info;
1894 HOST_WIDE_INT offset;
1895 bool by_ref;
1896
1897 if (SSA_NAME_IS_DEFAULT_DEF (target))
1898 {
1899 tree var = SSA_NAME_VAR (target);
1900 int index = ipa_get_param_decl_index (info, var);
1901 if (index >= 0)
1902 ipa_note_param_call (fbi->node, index, call);
1903 return;
1904 }
1905
1906 int index;
1907 gimple *def = SSA_NAME_DEF_STMT (target);
1908 if (gimple_assign_single_p (def)
1909 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1910 gimple_assign_rhs1 (def), &index, &offset,
1911 NULL, &by_ref))
1912 {
1913 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1914 cs->indirect_info->offset = offset;
1915 cs->indirect_info->agg_contents = 1;
1916 cs->indirect_info->by_ref = by_ref;
1917 return;
1918 }
1919
1920 /* Now we need to try to match the complex pattern of calling a member
1921 pointer. */
1922 if (gimple_code (def) != GIMPLE_PHI
1923 || gimple_phi_num_args (def) != 2
1924 || !POINTER_TYPE_P (TREE_TYPE (target))
1925 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1926 return;
1927
1928 /* First, we need to check whether one of these is a load from a member
1929 pointer that is a parameter to this function. */
1930 tree n1 = PHI_ARG_DEF (def, 0);
1931 tree n2 = PHI_ARG_DEF (def, 1);
1932 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1933 return;
1934 gimple *d1 = SSA_NAME_DEF_STMT (n1);
1935 gimple *d2 = SSA_NAME_DEF_STMT (n2);
1936
1937 tree rec;
1938 basic_block bb, virt_bb;
1939 basic_block join = gimple_bb (def);
1940 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1941 {
1942 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1943 return;
1944
1945 bb = EDGE_PRED (join, 0)->src;
1946 virt_bb = gimple_bb (d2);
1947 }
1948 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1949 {
1950 bb = EDGE_PRED (join, 1)->src;
1951 virt_bb = gimple_bb (d1);
1952 }
1953 else
1954 return;
1955
1956 /* Second, we need to check that the basic blocks are laid out in the way
1957 corresponding to the pattern. */
1958
1959 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1960 || single_pred (virt_bb) != bb
1961 || single_succ (virt_bb) != join)
1962 return;
1963
1964 /* Third, let's see that the branching is done depending on the least
1965 significant bit of the pfn. */
1966
1967 gimple *branch = last_stmt (bb);
1968 if (!branch || gimple_code (branch) != GIMPLE_COND)
1969 return;
1970
1971 if ((gimple_cond_code (branch) != NE_EXPR
1972 && gimple_cond_code (branch) != EQ_EXPR)
1973 || !integer_zerop (gimple_cond_rhs (branch)))
1974 return;
1975
1976 tree cond = gimple_cond_lhs (branch);
1977 if (!ipa_is_ssa_with_stmt_def (cond))
1978 return;
1979
1980 def = SSA_NAME_DEF_STMT (cond);
1981 if (!is_gimple_assign (def)
1982 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1983 || !integer_onep (gimple_assign_rhs2 (def)))
1984 return;
1985
1986 cond = gimple_assign_rhs1 (def);
1987 if (!ipa_is_ssa_with_stmt_def (cond))
1988 return;
1989
1990 def = SSA_NAME_DEF_STMT (cond);
1991
1992 if (is_gimple_assign (def)
1993 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1994 {
1995 cond = gimple_assign_rhs1 (def);
1996 if (!ipa_is_ssa_with_stmt_def (cond))
1997 return;
1998 def = SSA_NAME_DEF_STMT (cond);
1999 }
2000
2001 tree rec2;
2002 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2003 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2004 == ptrmemfunc_vbit_in_delta),
2005 NULL);
2006 if (rec != rec2)
2007 return;
2008
2009 index = ipa_get_param_decl_index (info, rec);
2010 if (index >= 0
2011 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2012 {
2013 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2014 cs->indirect_info->offset = offset;
2015 cs->indirect_info->agg_contents = 1;
2016 cs->indirect_info->member_ptr = 1;
2017 }
2018
2019 return;
2020 }
2021
2022 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2023 object referenced in the expression is a formal parameter of the caller
2024 FBI->node (described by FBI->info), create a call note for the
2025 statement. */
2026
2027 static void
2028 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2029 gcall *call, tree target)
2030 {
2031 tree obj = OBJ_TYPE_REF_OBJECT (target);
2032 int index;
2033 HOST_WIDE_INT anc_offset;
2034
2035 if (!flag_devirtualize)
2036 return;
2037
2038 if (TREE_CODE (obj) != SSA_NAME)
2039 return;
2040
2041 struct ipa_node_params *info = fbi->info;
2042 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2043 {
2044 struct ipa_jump_func jfunc;
2045 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2046 return;
2047
2048 anc_offset = 0;
2049 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2050 gcc_assert (index >= 0);
2051 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2052 call, &jfunc))
2053 return;
2054 }
2055 else
2056 {
2057 struct ipa_jump_func jfunc;
2058 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2059 tree expr;
2060
2061 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2062 if (!expr)
2063 return;
2064 index = ipa_get_param_decl_index (info,
2065 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2066 gcc_assert (index >= 0);
2067 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2068 call, &jfunc, anc_offset))
2069 return;
2070 }
2071
2072 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2073 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2074 ii->offset = anc_offset;
2075 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2076 ii->otr_type = obj_type_ref_class (target);
2077 ii->polymorphic = 1;
2078 }
2079
2080 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2081 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2082 containing intermediate information about each formal parameter. */
2083
2084 static void
2085 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2086 {
2087 tree target = gimple_call_fn (call);
2088
2089 if (!target
2090 || (TREE_CODE (target) != SSA_NAME
2091 && !virtual_method_call_p (target)))
2092 return;
2093
2094 struct cgraph_edge *cs = fbi->node->get_edge (call);
2095 /* If we previously turned the call into a direct call, there is
2096 no need to analyze. */
2097 if (cs && !cs->indirect_unknown_callee)
2098 return;
2099
2100 if (cs->indirect_info->polymorphic && flag_devirtualize)
2101 {
2102 tree instance;
2103 tree target = gimple_call_fn (call);
2104 ipa_polymorphic_call_context context (current_function_decl,
2105 target, call, &instance);
2106
2107 gcc_checking_assert (cs->indirect_info->otr_type
2108 == obj_type_ref_class (target));
2109 gcc_checking_assert (cs->indirect_info->otr_token
2110 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2111
2112 cs->indirect_info->vptr_changed
2113 = !context.get_dynamic_type (instance,
2114 OBJ_TYPE_REF_OBJECT (target),
2115 obj_type_ref_class (target), call);
2116 cs->indirect_info->context = context;
2117 }
2118
2119 if (TREE_CODE (target) == SSA_NAME)
2120 ipa_analyze_indirect_call_uses (fbi, call, target);
2121 else if (virtual_method_call_p (target))
2122 ipa_analyze_virtual_call_uses (fbi, call, target);
2123 }
2124
2125
2126 /* Analyze the call statement STMT with respect to formal parameters (described
2127 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2128 formal parameters are called. */
2129
2130 static void
2131 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2132 {
2133 if (is_gimple_call (stmt))
2134 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2135 }
2136
2137 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2138 If OP is a parameter declaration, mark it as used in the info structure
2139 passed in DATA. */
2140
2141 static bool
2142 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2143 {
2144 struct ipa_node_params *info = (struct ipa_node_params *) data;
2145
2146 op = get_base_address (op);
2147 if (op
2148 && TREE_CODE (op) == PARM_DECL)
2149 {
2150 int index = ipa_get_param_decl_index (info, op);
2151 gcc_assert (index >= 0);
2152 ipa_set_param_used (info, index, true);
2153 }
2154
2155 return false;
2156 }
2157
2158 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2159 the findings in various structures of the associated ipa_node_params
2160 structure, such as parameter flags, notes etc. FBI holds various data about
2161 the function being analyzed. */
2162
2163 static void
2164 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2165 {
2166 gimple_stmt_iterator gsi;
2167 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2168 {
2169 gimple *stmt = gsi_stmt (gsi);
2170
2171 if (is_gimple_debug (stmt))
2172 continue;
2173
2174 ipa_analyze_stmt_uses (fbi, stmt);
2175 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2176 visit_ref_for_mod_analysis,
2177 visit_ref_for_mod_analysis,
2178 visit_ref_for_mod_analysis);
2179 }
2180 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2181 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2182 visit_ref_for_mod_analysis,
2183 visit_ref_for_mod_analysis,
2184 visit_ref_for_mod_analysis);
2185 }
2186
2187 /* Calculate controlled uses of parameters of NODE. */
2188
2189 static void
2190 ipa_analyze_controlled_uses (struct cgraph_node *node)
2191 {
2192 struct ipa_node_params *info = IPA_NODE_REF (node);
2193
2194 for (int i = 0; i < ipa_get_param_count (info); i++)
2195 {
2196 tree parm = ipa_get_param (info, i);
2197 int controlled_uses = 0;
2198
2199 /* For SSA regs see if parameter is used. For non-SSA we compute
2200 the flag during modification analysis. */
2201 if (is_gimple_reg (parm))
2202 {
2203 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2204 parm);
2205 if (ddef && !has_zero_uses (ddef))
2206 {
2207 imm_use_iterator imm_iter;
2208 use_operand_p use_p;
2209
2210 ipa_set_param_used (info, i, true);
2211 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2212 if (!is_gimple_call (USE_STMT (use_p)))
2213 {
2214 if (!is_gimple_debug (USE_STMT (use_p)))
2215 {
2216 controlled_uses = IPA_UNDESCRIBED_USE;
2217 break;
2218 }
2219 }
2220 else
2221 controlled_uses++;
2222 }
2223 else
2224 controlled_uses = 0;
2225 }
2226 else
2227 controlled_uses = IPA_UNDESCRIBED_USE;
2228 ipa_set_controlled_uses (info, i, controlled_uses);
2229 }
2230 }
2231
2232 /* Free stuff in BI. */
2233
2234 static void
2235 free_ipa_bb_info (struct ipa_bb_info *bi)
2236 {
2237 bi->cg_edges.release ();
2238 bi->param_aa_statuses.release ();
2239 }
2240
2241 /* Dominator walker driving the analysis. */
2242
2243 class analysis_dom_walker : public dom_walker
2244 {
2245 public:
2246 analysis_dom_walker (struct ipa_func_body_info *fbi)
2247 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2248
2249 virtual edge before_dom_children (basic_block);
2250
2251 private:
2252 struct ipa_func_body_info *m_fbi;
2253 };
2254
2255 edge
2256 analysis_dom_walker::before_dom_children (basic_block bb)
2257 {
2258 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2259 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2260 return NULL;
2261 }
2262
2263 /* Release body info FBI. */
2264
2265 void
2266 ipa_release_body_info (struct ipa_func_body_info *fbi)
2267 {
2268 int i;
2269 struct ipa_bb_info *bi;
2270
2271 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2272 free_ipa_bb_info (bi);
2273 fbi->bb_infos.release ();
2274 }
2275
2276 /* Initialize the array describing properties of formal parameters
2277 of NODE, analyze their uses and compute jump functions associated
2278 with actual arguments of calls from within NODE. */
2279
2280 void
2281 ipa_analyze_node (struct cgraph_node *node)
2282 {
2283 struct ipa_func_body_info fbi;
2284 struct ipa_node_params *info;
2285
2286 ipa_check_create_node_params ();
2287 ipa_check_create_edge_args ();
2288 info = IPA_NODE_REF (node);
2289
2290 if (info->analysis_done)
2291 return;
2292 info->analysis_done = 1;
2293
2294 if (ipa_func_spec_opts_forbid_analysis_p (node))
2295 {
2296 for (int i = 0; i < ipa_get_param_count (info); i++)
2297 {
2298 ipa_set_param_used (info, i, true);
2299 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2300 }
2301 return;
2302 }
2303
2304 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2305 push_cfun (func);
2306 calculate_dominance_info (CDI_DOMINATORS);
2307 ipa_initialize_node_params (node);
2308 ipa_analyze_controlled_uses (node);
2309
2310 fbi.node = node;
2311 fbi.info = IPA_NODE_REF (node);
2312 fbi.bb_infos = vNULL;
2313 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2314 fbi.param_count = ipa_get_param_count (info);
2315 fbi.aa_walked = 0;
2316
2317 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2318 {
2319 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2320 bi->cg_edges.safe_push (cs);
2321 }
2322
2323 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2324 {
2325 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2326 bi->cg_edges.safe_push (cs);
2327 }
2328
2329 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2330
2331 ipa_release_body_info (&fbi);
2332 free_dominance_info (CDI_DOMINATORS);
2333 pop_cfun ();
2334 }
2335
2336 /* Update the jump functions associated with call graph edge E when the call
2337 graph edge CS is being inlined, assuming that E->caller is already (possibly
2338 indirectly) inlined into CS->callee and that E has not been inlined. */
2339
2340 static void
2341 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2342 struct cgraph_edge *e)
2343 {
2344 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2345 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2346 int count = ipa_get_cs_argument_count (args);
2347 int i;
2348
2349 for (i = 0; i < count; i++)
2350 {
2351 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2352 struct ipa_polymorphic_call_context *dst_ctx
2353 = ipa_get_ith_polymorhic_call_context (args, i);
2354
2355 if (dst->type == IPA_JF_ANCESTOR)
2356 {
2357 struct ipa_jump_func *src;
2358 int dst_fid = dst->value.ancestor.formal_id;
2359 struct ipa_polymorphic_call_context *src_ctx
2360 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2361
2362 /* Variable number of arguments can cause havoc if we try to access
2363 one that does not exist in the inlined edge. So make sure we
2364 don't. */
2365 if (dst_fid >= ipa_get_cs_argument_count (top))
2366 {
2367 ipa_set_jf_unknown (dst);
2368 continue;
2369 }
2370
2371 src = ipa_get_ith_jump_func (top, dst_fid);
2372
2373 if (src_ctx && !src_ctx->useless_p ())
2374 {
2375 struct ipa_polymorphic_call_context ctx = *src_ctx;
2376
2377 /* TODO: Make type preserved safe WRT contexts. */
2378 if (!ipa_get_jf_ancestor_type_preserved (dst))
2379 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2380 ctx.offset_by (dst->value.ancestor.offset);
2381 if (!ctx.useless_p ())
2382 {
2383 if (!dst_ctx)
2384 {
2385 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2386 count);
2387 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2388 }
2389
2390 dst_ctx->combine_with (ctx);
2391 }
2392 }
2393
2394 if (src->agg.items
2395 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2396 {
2397 struct ipa_agg_jf_item *item;
2398 int j;
2399
2400 /* Currently we do not produce clobber aggregate jump functions,
2401 replace with merging when we do. */
2402 gcc_assert (!dst->agg.items);
2403
2404 dst->agg.items = vec_safe_copy (src->agg.items);
2405 dst->agg.by_ref = src->agg.by_ref;
2406 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2407 item->offset -= dst->value.ancestor.offset;
2408 }
2409
2410 if (src->type == IPA_JF_PASS_THROUGH
2411 && src->value.pass_through.operation == NOP_EXPR)
2412 {
2413 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2414 dst->value.ancestor.agg_preserved &=
2415 src->value.pass_through.agg_preserved;
2416 }
2417 else if (src->type == IPA_JF_ANCESTOR)
2418 {
2419 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2420 dst->value.ancestor.offset += src->value.ancestor.offset;
2421 dst->value.ancestor.agg_preserved &=
2422 src->value.ancestor.agg_preserved;
2423 }
2424 else
2425 ipa_set_jf_unknown (dst);
2426 }
2427 else if (dst->type == IPA_JF_PASS_THROUGH)
2428 {
2429 struct ipa_jump_func *src;
2430 /* We must check range due to calls with variable number of arguments
2431 and we cannot combine jump functions with operations. */
2432 if (dst->value.pass_through.operation == NOP_EXPR
2433 && (dst->value.pass_through.formal_id
2434 < ipa_get_cs_argument_count (top)))
2435 {
2436 int dst_fid = dst->value.pass_through.formal_id;
2437 src = ipa_get_ith_jump_func (top, dst_fid);
2438 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2439 struct ipa_polymorphic_call_context *src_ctx
2440 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2441
2442 if (src_ctx && !src_ctx->useless_p ())
2443 {
2444 struct ipa_polymorphic_call_context ctx = *src_ctx;
2445
2446 /* TODO: Make type preserved safe WRT contexts. */
2447 if (!ipa_get_jf_pass_through_type_preserved (dst))
2448 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2449 if (!ctx.useless_p ())
2450 {
2451 if (!dst_ctx)
2452 {
2453 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2454 count);
2455 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2456 }
2457 dst_ctx->combine_with (ctx);
2458 }
2459 }
2460 switch (src->type)
2461 {
2462 case IPA_JF_UNKNOWN:
2463 ipa_set_jf_unknown (dst);
2464 break;
2465 case IPA_JF_CONST:
2466 ipa_set_jf_cst_copy (dst, src);
2467 break;
2468
2469 case IPA_JF_PASS_THROUGH:
2470 {
2471 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2472 enum tree_code operation;
2473 operation = ipa_get_jf_pass_through_operation (src);
2474
2475 if (operation == NOP_EXPR)
2476 {
2477 bool agg_p;
2478 agg_p = dst_agg_p
2479 && ipa_get_jf_pass_through_agg_preserved (src);
2480 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2481 }
2482 else
2483 {
2484 tree operand = ipa_get_jf_pass_through_operand (src);
2485 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2486 operation);
2487 }
2488 break;
2489 }
2490 case IPA_JF_ANCESTOR:
2491 {
2492 bool agg_p;
2493 agg_p = dst_agg_p
2494 && ipa_get_jf_ancestor_agg_preserved (src);
2495 ipa_set_ancestor_jf (dst,
2496 ipa_get_jf_ancestor_offset (src),
2497 ipa_get_jf_ancestor_formal_id (src),
2498 agg_p);
2499 break;
2500 }
2501 default:
2502 gcc_unreachable ();
2503 }
2504
2505 if (src->agg.items
2506 && (dst_agg_p || !src->agg.by_ref))
2507 {
2508 /* Currently we do not produce clobber aggregate jump
2509 functions, replace with merging when we do. */
2510 gcc_assert (!dst->agg.items);
2511
2512 dst->agg.by_ref = src->agg.by_ref;
2513 dst->agg.items = vec_safe_copy (src->agg.items);
2514 }
2515 }
2516 else
2517 ipa_set_jf_unknown (dst);
2518 }
2519 }
2520 }
2521
2522 /* If TARGET is an addr_expr of a function declaration, make it the
2523 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2524 Otherwise, return NULL. */
2525
2526 struct cgraph_edge *
2527 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2528 bool speculative)
2529 {
2530 struct cgraph_node *callee;
2531 struct inline_edge_summary *es = inline_edge_summary (ie);
2532 bool unreachable = false;
2533
2534 if (TREE_CODE (target) == ADDR_EXPR)
2535 target = TREE_OPERAND (target, 0);
2536 if (TREE_CODE (target) != FUNCTION_DECL)
2537 {
2538 target = canonicalize_constructor_val (target, NULL);
2539 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2540 {
2541 /* Member pointer call that goes through a VMT lookup. */
2542 if (ie->indirect_info->member_ptr
2543 /* Or if target is not an invariant expression and we do not
2544 know if it will evaulate to function at runtime.
2545 This can happen when folding through &VAR, where &VAR
2546 is IP invariant, but VAR itself is not.
2547
2548 TODO: Revisit this when GCC 5 is branched. It seems that
2549 member_ptr check is not needed and that we may try to fold
2550 the expression and see if VAR is readonly. */
2551 || !is_gimple_ip_invariant (target))
2552 {
2553 if (dump_enabled_p ())
2554 {
2555 location_t loc = gimple_location_safe (ie->call_stmt);
2556 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2557 "discovered direct call non-invariant "
2558 "%s/%i\n",
2559 ie->caller->name (), ie->caller->order);
2560 }
2561 return NULL;
2562 }
2563
2564
2565 if (dump_enabled_p ())
2566 {
2567 location_t loc = gimple_location_safe (ie->call_stmt);
2568 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2569 "discovered direct call to non-function in %s/%i, "
2570 "making it __builtin_unreachable\n",
2571 ie->caller->name (), ie->caller->order);
2572 }
2573
2574 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2575 callee = cgraph_node::get_create (target);
2576 unreachable = true;
2577 }
2578 else
2579 callee = cgraph_node::get (target);
2580 }
2581 else
2582 callee = cgraph_node::get (target);
2583
2584 /* Because may-edges are not explicitely represented and vtable may be external,
2585 we may create the first reference to the object in the unit. */
2586 if (!callee || callee->global.inlined_to)
2587 {
2588
2589 /* We are better to ensure we can refer to it.
2590 In the case of static functions we are out of luck, since we already
2591 removed its body. In the case of public functions we may or may
2592 not introduce the reference. */
2593 if (!canonicalize_constructor_val (target, NULL)
2594 || !TREE_PUBLIC (target))
2595 {
2596 if (dump_file)
2597 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2598 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2599 xstrdup_for_dump (ie->caller->name ()),
2600 ie->caller->order,
2601 xstrdup_for_dump (ie->callee->name ()),
2602 ie->callee->order);
2603 return NULL;
2604 }
2605 callee = cgraph_node::get_create (target);
2606 }
2607
2608 /* If the edge is already speculated. */
2609 if (speculative && ie->speculative)
2610 {
2611 struct cgraph_edge *e2;
2612 struct ipa_ref *ref;
2613 ie->speculative_call_info (e2, ie, ref);
2614 if (e2->callee->ultimate_alias_target ()
2615 != callee->ultimate_alias_target ())
2616 {
2617 if (dump_file)
2618 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2619 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2620 xstrdup_for_dump (ie->caller->name ()),
2621 ie->caller->order,
2622 xstrdup_for_dump (callee->name ()),
2623 callee->order,
2624 xstrdup_for_dump (e2->callee->name ()),
2625 e2->callee->order);
2626 }
2627 else
2628 {
2629 if (dump_file)
2630 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2631 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2632 xstrdup_for_dump (ie->caller->name ()),
2633 ie->caller->order,
2634 xstrdup_for_dump (callee->name ()),
2635 callee->order);
2636 }
2637 return NULL;
2638 }
2639
2640 if (!dbg_cnt (devirt))
2641 return NULL;
2642
2643 ipa_check_create_node_params ();
2644
2645 /* We can not make edges to inline clones. It is bug that someone removed
2646 the cgraph node too early. */
2647 gcc_assert (!callee->global.inlined_to);
2648
2649 if (dump_file && !unreachable)
2650 {
2651 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2652 "(%s/%i -> %s/%i), for stmt ",
2653 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2654 speculative ? "speculative" : "known",
2655 xstrdup_for_dump (ie->caller->name ()),
2656 ie->caller->order,
2657 xstrdup_for_dump (callee->name ()),
2658 callee->order);
2659 if (ie->call_stmt)
2660 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2661 else
2662 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2663 }
2664 if (dump_enabled_p ())
2665 {
2666 location_t loc = gimple_location_safe (ie->call_stmt);
2667
2668 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2669 "converting indirect call in %s to direct call to %s\n",
2670 ie->caller->name (), callee->name ());
2671 }
2672 if (!speculative)
2673 {
2674 struct cgraph_edge *orig = ie;
2675 ie = ie->make_direct (callee);
2676 /* If we resolved speculative edge the cost is already up to date
2677 for direct call (adjusted by inline_edge_duplication_hook). */
2678 if (ie == orig)
2679 {
2680 es = inline_edge_summary (ie);
2681 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2682 - eni_size_weights.call_cost);
2683 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2684 - eni_time_weights.call_cost);
2685 }
2686 }
2687 else
2688 {
2689 if (!callee->can_be_discarded_p ())
2690 {
2691 cgraph_node *alias;
2692 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2693 if (alias)
2694 callee = alias;
2695 }
2696 /* make_speculative will update ie's cost to direct call cost. */
2697 ie = ie->make_speculative
2698 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2699 }
2700
2701 return ie;
2702 }
2703
2704 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2705 return NULL if there is not any. BY_REF specifies whether the value has to
2706 be passed by reference or by value. */
2707
2708 tree
2709 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2710 HOST_WIDE_INT offset, bool by_ref)
2711 {
2712 struct ipa_agg_jf_item *item;
2713 int i;
2714
2715 if (by_ref != agg->by_ref)
2716 return NULL;
2717
2718 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2719 if (item->offset == offset)
2720 {
2721 /* Currently we do not have clobber values, return NULL for them once
2722 we do. */
2723 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2724 return item->value;
2725 }
2726 return NULL;
2727 }
2728
2729 /* Remove a reference to SYMBOL from the list of references of a node given by
2730 reference description RDESC. Return true if the reference has been
2731 successfully found and removed. */
2732
2733 static bool
2734 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2735 {
2736 struct ipa_ref *to_del;
2737 struct cgraph_edge *origin;
2738
2739 origin = rdesc->cs;
2740 if (!origin)
2741 return false;
2742 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2743 origin->lto_stmt_uid);
2744 if (!to_del)
2745 return false;
2746
2747 to_del->remove_reference ();
2748 if (dump_file)
2749 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2750 xstrdup_for_dump (origin->caller->name ()),
2751 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2752 return true;
2753 }
2754
2755 /* If JFUNC has a reference description with refcount different from
2756 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2757 NULL. JFUNC must be a constant jump function. */
2758
2759 static struct ipa_cst_ref_desc *
2760 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2761 {
2762 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2763 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2764 return rdesc;
2765 else
2766 return NULL;
2767 }
2768
2769 /* If the value of constant jump function JFUNC is an address of a function
2770 declaration, return the associated call graph node. Otherwise return
2771 NULL. */
2772
2773 static cgraph_node *
2774 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2775 {
2776 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2777 tree cst = ipa_get_jf_constant (jfunc);
2778 if (TREE_CODE (cst) != ADDR_EXPR
2779 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2780 return NULL;
2781
2782 return cgraph_node::get (TREE_OPERAND (cst, 0));
2783 }
2784
2785
2786 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2787 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2788 the edge specified in the rdesc. Return false if either the symbol or the
2789 reference could not be found, otherwise return true. */
2790
2791 static bool
2792 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2793 {
2794 struct ipa_cst_ref_desc *rdesc;
2795 if (jfunc->type == IPA_JF_CONST
2796 && (rdesc = jfunc_rdesc_usable (jfunc))
2797 && --rdesc->refcount == 0)
2798 {
2799 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2800 if (!symbol)
2801 return false;
2802
2803 return remove_described_reference (symbol, rdesc);
2804 }
2805 return true;
2806 }
2807
2808 /* Try to find a destination for indirect edge IE that corresponds to a simple
2809 call or a call of a member function pointer and where the destination is a
2810 pointer formal parameter described by jump function JFUNC. If it can be
2811 determined, return the newly direct edge, otherwise return NULL.
2812 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2813
2814 static struct cgraph_edge *
2815 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2816 struct ipa_jump_func *jfunc,
2817 struct ipa_node_params *new_root_info)
2818 {
2819 struct cgraph_edge *cs;
2820 tree target;
2821 bool agg_contents = ie->indirect_info->agg_contents;
2822
2823 if (ie->indirect_info->agg_contents)
2824 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2825 ie->indirect_info->offset,
2826 ie->indirect_info->by_ref);
2827 else
2828 target = ipa_value_from_jfunc (new_root_info, jfunc);
2829 if (!target)
2830 return NULL;
2831 cs = ipa_make_edge_direct_to_target (ie, target);
2832
2833 if (cs && !agg_contents)
2834 {
2835 bool ok;
2836 gcc_checking_assert (cs->callee
2837 && (cs != ie
2838 || jfunc->type != IPA_JF_CONST
2839 || !cgraph_node_for_jfunc (jfunc)
2840 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2841 ok = try_decrement_rdesc_refcount (jfunc);
2842 gcc_checking_assert (ok);
2843 }
2844
2845 return cs;
2846 }
2847
2848 /* Return the target to be used in cases of impossible devirtualization. IE
2849 and target (the latter can be NULL) are dumped when dumping is enabled. */
2850
2851 tree
2852 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2853 {
2854 if (dump_file)
2855 {
2856 if (target)
2857 fprintf (dump_file,
2858 "Type inconsistent devirtualization: %s/%i->%s\n",
2859 ie->caller->name (), ie->caller->order,
2860 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2861 else
2862 fprintf (dump_file,
2863 "No devirtualization target in %s/%i\n",
2864 ie->caller->name (), ie->caller->order);
2865 }
2866 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2867 cgraph_node::get_create (new_target);
2868 return new_target;
2869 }
2870
2871 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2872 call based on a formal parameter which is described by jump function JFUNC
2873 and if it can be determined, make it direct and return the direct edge.
2874 Otherwise, return NULL. CTX describes the polymorphic context that the
2875 parameter the call is based on brings along with it. */
2876
2877 static struct cgraph_edge *
2878 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2879 struct ipa_jump_func *jfunc,
2880 struct ipa_polymorphic_call_context ctx)
2881 {
2882 tree target = NULL;
2883 bool speculative = false;
2884
2885 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2886 return NULL;
2887
2888 gcc_assert (!ie->indirect_info->by_ref);
2889
2890 /* Try to do lookup via known virtual table pointer value. */
2891 if (!ie->indirect_info->vptr_changed
2892 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2893 {
2894 tree vtable;
2895 unsigned HOST_WIDE_INT offset;
2896 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2897 ie->indirect_info->offset,
2898 true);
2899 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2900 {
2901 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2902 vtable, offset);
2903 if (t)
2904 {
2905 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2906 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2907 || !possible_polymorphic_call_target_p
2908 (ie, cgraph_node::get (t)))
2909 {
2910 /* Do not speculate builtin_unreachable, it is stupid! */
2911 if (!ie->indirect_info->vptr_changed)
2912 target = ipa_impossible_devirt_target (ie, target);
2913 }
2914 else
2915 {
2916 target = t;
2917 speculative = ie->indirect_info->vptr_changed;
2918 }
2919 }
2920 }
2921 }
2922
2923 ipa_polymorphic_call_context ie_context (ie);
2924 vec <cgraph_node *>targets;
2925 bool final;
2926
2927 ctx.offset_by (ie->indirect_info->offset);
2928 if (ie->indirect_info->vptr_changed)
2929 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2930 ie->indirect_info->otr_type);
2931 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2932 targets = possible_polymorphic_call_targets
2933 (ie->indirect_info->otr_type,
2934 ie->indirect_info->otr_token,
2935 ctx, &final);
2936 if (final && targets.length () <= 1)
2937 {
2938 speculative = false;
2939 if (targets.length () == 1)
2940 target = targets[0]->decl;
2941 else
2942 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2943 }
2944 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2945 && !ie->speculative && ie->maybe_hot_p ())
2946 {
2947 cgraph_node *n;
2948 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2949 ie->indirect_info->otr_token,
2950 ie->indirect_info->context);
2951 if (n)
2952 {
2953 target = n->decl;
2954 speculative = true;
2955 }
2956 }
2957
2958 if (target)
2959 {
2960 if (!possible_polymorphic_call_target_p
2961 (ie, cgraph_node::get_create (target)))
2962 {
2963 if (speculative)
2964 return NULL;
2965 target = ipa_impossible_devirt_target (ie, target);
2966 }
2967 return ipa_make_edge_direct_to_target (ie, target, speculative);
2968 }
2969 else
2970 return NULL;
2971 }
2972
2973 /* Update the param called notes associated with NODE when CS is being inlined,
2974 assuming NODE is (potentially indirectly) inlined into CS->callee.
2975 Moreover, if the callee is discovered to be constant, create a new cgraph
2976 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2977 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2978
2979 static bool
2980 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2981 struct cgraph_node *node,
2982 vec<cgraph_edge *> *new_edges)
2983 {
2984 struct ipa_edge_args *top;
2985 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2986 struct ipa_node_params *new_root_info;
2987 bool res = false;
2988
2989 ipa_check_create_edge_args ();
2990 top = IPA_EDGE_REF (cs);
2991 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2992 ? cs->caller->global.inlined_to
2993 : cs->caller);
2994
2995 for (ie = node->indirect_calls; ie; ie = next_ie)
2996 {
2997 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2998 struct ipa_jump_func *jfunc;
2999 int param_index;
3000 cgraph_node *spec_target = NULL;
3001
3002 next_ie = ie->next_callee;
3003
3004 if (ici->param_index == -1)
3005 continue;
3006
3007 /* We must check range due to calls with variable number of arguments: */
3008 if (ici->param_index >= ipa_get_cs_argument_count (top))
3009 {
3010 ici->param_index = -1;
3011 continue;
3012 }
3013
3014 param_index = ici->param_index;
3015 jfunc = ipa_get_ith_jump_func (top, param_index);
3016
3017 if (ie->speculative)
3018 {
3019 struct cgraph_edge *de;
3020 struct ipa_ref *ref;
3021 ie->speculative_call_info (de, ie, ref);
3022 spec_target = de->callee;
3023 }
3024
3025 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3026 new_direct_edge = NULL;
3027 else if (ici->polymorphic)
3028 {
3029 ipa_polymorphic_call_context ctx;
3030 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3031 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3032 }
3033 else
3034 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3035 new_root_info);
3036 /* If speculation was removed, then we need to do nothing. */
3037 if (new_direct_edge && new_direct_edge != ie
3038 && new_direct_edge->callee == spec_target)
3039 {
3040 new_direct_edge->indirect_inlining_edge = 1;
3041 top = IPA_EDGE_REF (cs);
3042 res = true;
3043 if (!new_direct_edge->speculative)
3044 continue;
3045 }
3046 else if (new_direct_edge)
3047 {
3048 new_direct_edge->indirect_inlining_edge = 1;
3049 if (new_direct_edge->call_stmt)
3050 new_direct_edge->call_stmt_cannot_inline_p
3051 = !gimple_check_call_matching_types (
3052 new_direct_edge->call_stmt,
3053 new_direct_edge->callee->decl, false);
3054 if (new_edges)
3055 {
3056 new_edges->safe_push (new_direct_edge);
3057 res = true;
3058 }
3059 top = IPA_EDGE_REF (cs);
3060 /* If speculative edge was introduced we still need to update
3061 call info of the indirect edge. */
3062 if (!new_direct_edge->speculative)
3063 continue;
3064 }
3065 if (jfunc->type == IPA_JF_PASS_THROUGH
3066 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3067 {
3068 if (ici->agg_contents
3069 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3070 && !ici->polymorphic)
3071 ici->param_index = -1;
3072 else
3073 {
3074 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3075 if (ici->polymorphic
3076 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3077 ici->vptr_changed = true;
3078 }
3079 }
3080 else if (jfunc->type == IPA_JF_ANCESTOR)
3081 {
3082 if (ici->agg_contents
3083 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3084 && !ici->polymorphic)
3085 ici->param_index = -1;
3086 else
3087 {
3088 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3089 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3090 if (ici->polymorphic
3091 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3092 ici->vptr_changed = true;
3093 }
3094 }
3095 else
3096 /* Either we can find a destination for this edge now or never. */
3097 ici->param_index = -1;
3098 }
3099
3100 return res;
3101 }
3102
3103 /* Recursively traverse subtree of NODE (including node) made of inlined
3104 cgraph_edges when CS has been inlined and invoke
3105 update_indirect_edges_after_inlining on all nodes and
3106 update_jump_functions_after_inlining on all non-inlined edges that lead out
3107 of this subtree. Newly discovered indirect edges will be added to
3108 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3109 created. */
3110
3111 static bool
3112 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3113 struct cgraph_node *node,
3114 vec<cgraph_edge *> *new_edges)
3115 {
3116 struct cgraph_edge *e;
3117 bool res;
3118
3119 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3120
3121 for (e = node->callees; e; e = e->next_callee)
3122 if (!e->inline_failed)
3123 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3124 else
3125 update_jump_functions_after_inlining (cs, e);
3126 for (e = node->indirect_calls; e; e = e->next_callee)
3127 update_jump_functions_after_inlining (cs, e);
3128
3129 return res;
3130 }
3131
3132 /* Combine two controlled uses counts as done during inlining. */
3133
3134 static int
3135 combine_controlled_uses_counters (int c, int d)
3136 {
3137 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3138 return IPA_UNDESCRIBED_USE;
3139 else
3140 return c + d - 1;
3141 }
3142
3143 /* Propagate number of controlled users from CS->caleee to the new root of the
3144 tree of inlined nodes. */
3145
3146 static void
3147 propagate_controlled_uses (struct cgraph_edge *cs)
3148 {
3149 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3150 struct cgraph_node *new_root = cs->caller->global.inlined_to
3151 ? cs->caller->global.inlined_to : cs->caller;
3152 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3153 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3154 int count, i;
3155
3156 count = MIN (ipa_get_cs_argument_count (args),
3157 ipa_get_param_count (old_root_info));
3158 for (i = 0; i < count; i++)
3159 {
3160 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3161 struct ipa_cst_ref_desc *rdesc;
3162
3163 if (jf->type == IPA_JF_PASS_THROUGH)
3164 {
3165 int src_idx, c, d;
3166 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3167 c = ipa_get_controlled_uses (new_root_info, src_idx);
3168 d = ipa_get_controlled_uses (old_root_info, i);
3169
3170 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3171 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3172 c = combine_controlled_uses_counters (c, d);
3173 ipa_set_controlled_uses (new_root_info, src_idx, c);
3174 if (c == 0 && new_root_info->ipcp_orig_node)
3175 {
3176 struct cgraph_node *n;
3177 struct ipa_ref *ref;
3178 tree t = new_root_info->known_csts[src_idx];
3179
3180 if (t && TREE_CODE (t) == ADDR_EXPR
3181 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3182 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3183 && (ref = new_root->find_reference (n, NULL, 0)))
3184 {
3185 if (dump_file)
3186 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3187 "reference from %s/%i to %s/%i.\n",
3188 xstrdup_for_dump (new_root->name ()),
3189 new_root->order,
3190 xstrdup_for_dump (n->name ()), n->order);
3191 ref->remove_reference ();
3192 }
3193 }
3194 }
3195 else if (jf->type == IPA_JF_CONST
3196 && (rdesc = jfunc_rdesc_usable (jf)))
3197 {
3198 int d = ipa_get_controlled_uses (old_root_info, i);
3199 int c = rdesc->refcount;
3200 rdesc->refcount = combine_controlled_uses_counters (c, d);
3201 if (rdesc->refcount == 0)
3202 {
3203 tree cst = ipa_get_jf_constant (jf);
3204 struct cgraph_node *n;
3205 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3206 && TREE_CODE (TREE_OPERAND (cst, 0))
3207 == FUNCTION_DECL);
3208 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3209 if (n)
3210 {
3211 struct cgraph_node *clone;
3212 bool ok;
3213 ok = remove_described_reference (n, rdesc);
3214 gcc_checking_assert (ok);
3215
3216 clone = cs->caller;
3217 while (clone->global.inlined_to
3218 && clone != rdesc->cs->caller
3219 && IPA_NODE_REF (clone)->ipcp_orig_node)
3220 {
3221 struct ipa_ref *ref;
3222 ref = clone->find_reference (n, NULL, 0);
3223 if (ref)
3224 {
3225 if (dump_file)
3226 fprintf (dump_file, "ipa-prop: Removing "
3227 "cloning-created reference "
3228 "from %s/%i to %s/%i.\n",
3229 xstrdup_for_dump (clone->name ()),
3230 clone->order,
3231 xstrdup_for_dump (n->name ()),
3232 n->order);
3233 ref->remove_reference ();
3234 }
3235 clone = clone->callers->caller;
3236 }
3237 }
3238 }
3239 }
3240 }
3241
3242 for (i = ipa_get_param_count (old_root_info);
3243 i < ipa_get_cs_argument_count (args);
3244 i++)
3245 {
3246 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3247
3248 if (jf->type == IPA_JF_CONST)
3249 {
3250 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3251 if (rdesc)
3252 rdesc->refcount = IPA_UNDESCRIBED_USE;
3253 }
3254 else if (jf->type == IPA_JF_PASS_THROUGH)
3255 ipa_set_controlled_uses (new_root_info,
3256 jf->value.pass_through.formal_id,
3257 IPA_UNDESCRIBED_USE);
3258 }
3259 }
3260
3261 /* Update jump functions and call note functions on inlining the call site CS.
3262 CS is expected to lead to a node already cloned by
3263 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3264 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3265 created. */
3266
3267 bool
3268 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3269 vec<cgraph_edge *> *new_edges)
3270 {
3271 bool changed;
3272 /* Do nothing if the preparation phase has not been carried out yet
3273 (i.e. during early inlining). */
3274 if (!ipa_node_params_sum)
3275 return false;
3276 gcc_assert (ipa_edge_args_vector);
3277
3278 propagate_controlled_uses (cs);
3279 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3280
3281 return changed;
3282 }
3283
3284 /* Frees all dynamically allocated structures that the argument info points
3285 to. */
3286
3287 void
3288 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3289 {
3290 vec_free (args->jump_functions);
3291 memset (args, 0, sizeof (*args));
3292 }
3293
3294 /* Free all ipa_edge structures. */
3295
3296 void
3297 ipa_free_all_edge_args (void)
3298 {
3299 int i;
3300 struct ipa_edge_args *args;
3301
3302 if (!ipa_edge_args_vector)
3303 return;
3304
3305 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3306 ipa_free_edge_args_substructures (args);
3307
3308 vec_free (ipa_edge_args_vector);
3309 }
3310
3311 /* Frees all dynamically allocated structures that the param info points
3312 to. */
3313
3314 ipa_node_params::~ipa_node_params ()
3315 {
3316 descriptors.release ();
3317 free (lattices);
3318 /* Lattice values and their sources are deallocated with their alocation
3319 pool. */
3320 known_csts.release ();
3321 known_contexts.release ();
3322
3323 lattices = NULL;
3324 ipcp_orig_node = NULL;
3325 analysis_done = 0;
3326 node_enqueued = 0;
3327 do_clone_for_all_contexts = 0;
3328 is_all_contexts_clone = 0;
3329 node_dead = 0;
3330 }
3331
3332 /* Free all ipa_node_params structures. */
3333
3334 void
3335 ipa_free_all_node_params (void)
3336 {
3337 delete ipa_node_params_sum;
3338 ipa_node_params_sum = NULL;
3339 }
3340
3341 /* Grow ipcp_transformations if necessary. */
3342
3343 void
3344 ipcp_grow_transformations_if_necessary (void)
3345 {
3346 if (vec_safe_length (ipcp_transformations)
3347 <= (unsigned) symtab->cgraph_max_uid)
3348 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3349 }
3350
3351 /* Set the aggregate replacements of NODE to be AGGVALS. */
3352
3353 void
3354 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3355 struct ipa_agg_replacement_value *aggvals)
3356 {
3357 ipcp_grow_transformations_if_necessary ();
3358 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3359 }
3360
3361 /* Hook that is called by cgraph.c when an edge is removed. */
3362
3363 static void
3364 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3365 {
3366 struct ipa_edge_args *args;
3367
3368 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3369 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3370 return;
3371
3372 args = IPA_EDGE_REF (cs);
3373 if (args->jump_functions)
3374 {
3375 struct ipa_jump_func *jf;
3376 int i;
3377 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3378 {
3379 struct ipa_cst_ref_desc *rdesc;
3380 try_decrement_rdesc_refcount (jf);
3381 if (jf->type == IPA_JF_CONST
3382 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3383 && rdesc->cs == cs)
3384 rdesc->cs = NULL;
3385 }
3386 }
3387
3388 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3389 }
3390
3391 /* Hook that is called by cgraph.c when an edge is duplicated. */
3392
3393 static void
3394 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3395 void *)
3396 {
3397 struct ipa_edge_args *old_args, *new_args;
3398 unsigned int i;
3399
3400 ipa_check_create_edge_args ();
3401
3402 old_args = IPA_EDGE_REF (src);
3403 new_args = IPA_EDGE_REF (dst);
3404
3405 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3406 if (old_args->polymorphic_call_contexts)
3407 new_args->polymorphic_call_contexts
3408 = vec_safe_copy (old_args->polymorphic_call_contexts);
3409
3410 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3411 {
3412 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3413 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3414
3415 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3416
3417 if (src_jf->type == IPA_JF_CONST)
3418 {
3419 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3420
3421 if (!src_rdesc)
3422 dst_jf->value.constant.rdesc = NULL;
3423 else if (src->caller == dst->caller)
3424 {
3425 struct ipa_ref *ref;
3426 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3427 gcc_checking_assert (n);
3428 ref = src->caller->find_reference (n, src->call_stmt,
3429 src->lto_stmt_uid);
3430 gcc_checking_assert (ref);
3431 dst->caller->clone_reference (ref, ref->stmt);
3432
3433 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3434 dst_rdesc->cs = dst;
3435 dst_rdesc->refcount = src_rdesc->refcount;
3436 dst_rdesc->next_duplicate = NULL;
3437 dst_jf->value.constant.rdesc = dst_rdesc;
3438 }
3439 else if (src_rdesc->cs == src)
3440 {
3441 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3442 dst_rdesc->cs = dst;
3443 dst_rdesc->refcount = src_rdesc->refcount;
3444 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3445 src_rdesc->next_duplicate = dst_rdesc;
3446 dst_jf->value.constant.rdesc = dst_rdesc;
3447 }
3448 else
3449 {
3450 struct ipa_cst_ref_desc *dst_rdesc;
3451 /* This can happen during inlining, when a JFUNC can refer to a
3452 reference taken in a function up in the tree of inline clones.
3453 We need to find the duplicate that refers to our tree of
3454 inline clones. */
3455
3456 gcc_assert (dst->caller->global.inlined_to);
3457 for (dst_rdesc = src_rdesc->next_duplicate;
3458 dst_rdesc;
3459 dst_rdesc = dst_rdesc->next_duplicate)
3460 {
3461 struct cgraph_node *top;
3462 top = dst_rdesc->cs->caller->global.inlined_to
3463 ? dst_rdesc->cs->caller->global.inlined_to
3464 : dst_rdesc->cs->caller;
3465 if (dst->caller->global.inlined_to == top)
3466 break;
3467 }
3468 gcc_assert (dst_rdesc);
3469 dst_jf->value.constant.rdesc = dst_rdesc;
3470 }
3471 }
3472 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3473 && src->caller == dst->caller)
3474 {
3475 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3476 ? dst->caller->global.inlined_to : dst->caller;
3477 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3478 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3479
3480 int c = ipa_get_controlled_uses (root_info, idx);
3481 if (c != IPA_UNDESCRIBED_USE)
3482 {
3483 c++;
3484 ipa_set_controlled_uses (root_info, idx, c);
3485 }
3486 }
3487 }
3488 }
3489
3490 /* Analyze newly added function into callgraph. */
3491
3492 static void
3493 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3494 {
3495 if (node->has_gimple_body_p ())
3496 ipa_analyze_node (node);
3497 }
3498
3499 /* Hook that is called by summary when a node is duplicated. */
3500
3501 void
3502 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3503 ipa_node_params *old_info,
3504 ipa_node_params *new_info)
3505 {
3506 ipa_agg_replacement_value *old_av, *new_av;
3507
3508 new_info->descriptors = old_info->descriptors.copy ();
3509 new_info->lattices = NULL;
3510 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3511
3512 new_info->analysis_done = old_info->analysis_done;
3513 new_info->node_enqueued = old_info->node_enqueued;
3514 new_info->versionable = old_info->versionable;
3515
3516 old_av = ipa_get_agg_replacements_for_node (src);
3517 if (old_av)
3518 {
3519 new_av = NULL;
3520 while (old_av)
3521 {
3522 struct ipa_agg_replacement_value *v;
3523
3524 v = ggc_alloc<ipa_agg_replacement_value> ();
3525 memcpy (v, old_av, sizeof (*v));
3526 v->next = new_av;
3527 new_av = v;
3528 old_av = old_av->next;
3529 }
3530 ipa_set_node_agg_value_chain (dst, new_av);
3531 }
3532
3533 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3534
3535 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3536 {
3537 ipcp_grow_transformations_if_necessary ();
3538 src_trans = ipcp_get_transformation_summary (src);
3539 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3540 vec<ipa_alignment, va_gc> *&dst_alignments
3541 = ipcp_get_transformation_summary (dst)->alignments;
3542 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3543 for (unsigned i = 0; i < src_alignments->length (); ++i)
3544 dst_alignments->quick_push ((*src_alignments)[i]);
3545 }
3546 }
3547
3548 /* Register our cgraph hooks if they are not already there. */
3549
3550 void
3551 ipa_register_cgraph_hooks (void)
3552 {
3553 ipa_check_create_node_params ();
3554
3555 if (!edge_removal_hook_holder)
3556 edge_removal_hook_holder =
3557 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3558 if (!edge_duplication_hook_holder)
3559 edge_duplication_hook_holder =
3560 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3561 function_insertion_hook_holder =
3562 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3563 }
3564
3565 /* Unregister our cgraph hooks if they are not already there. */
3566
3567 static void
3568 ipa_unregister_cgraph_hooks (void)
3569 {
3570 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3571 edge_removal_hook_holder = NULL;
3572 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3573 edge_duplication_hook_holder = NULL;
3574 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3575 function_insertion_hook_holder = NULL;
3576 }
3577
3578 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3579 longer needed after ipa-cp. */
3580
3581 void
3582 ipa_free_all_structures_after_ipa_cp (void)
3583 {
3584 if (!optimize && !in_lto_p)
3585 {
3586 ipa_free_all_edge_args ();
3587 ipa_free_all_node_params ();
3588 ipcp_sources_pool.release ();
3589 ipcp_cst_values_pool.release ();
3590 ipcp_poly_ctx_values_pool.release ();
3591 ipcp_agg_lattice_pool.release ();
3592 ipa_unregister_cgraph_hooks ();
3593 ipa_refdesc_pool.release ();
3594 }
3595 }
3596
3597 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3598 longer needed after indirect inlining. */
3599
3600 void
3601 ipa_free_all_structures_after_iinln (void)
3602 {
3603 ipa_free_all_edge_args ();
3604 ipa_free_all_node_params ();
3605 ipa_unregister_cgraph_hooks ();
3606 ipcp_sources_pool.release ();
3607 ipcp_cst_values_pool.release ();
3608 ipcp_poly_ctx_values_pool.release ();
3609 ipcp_agg_lattice_pool.release ();
3610 ipa_refdesc_pool.release ();
3611 }
3612
3613 /* Print ipa_tree_map data structures of all functions in the
3614 callgraph to F. */
3615
3616 void
3617 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3618 {
3619 int i, count;
3620 struct ipa_node_params *info;
3621
3622 if (!node->definition)
3623 return;
3624 info = IPA_NODE_REF (node);
3625 fprintf (f, " function %s/%i parameter descriptors:\n",
3626 node->name (), node->order);
3627 count = ipa_get_param_count (info);
3628 for (i = 0; i < count; i++)
3629 {
3630 int c;
3631
3632 fprintf (f, " ");
3633 ipa_dump_param (f, info, i);
3634 if (ipa_is_param_used (info, i))
3635 fprintf (f, " used");
3636 c = ipa_get_controlled_uses (info, i);
3637 if (c == IPA_UNDESCRIBED_USE)
3638 fprintf (f, " undescribed_use");
3639 else
3640 fprintf (f, " controlled_uses=%i", c);
3641 fprintf (f, "\n");
3642 }
3643 }
3644
3645 /* Print ipa_tree_map data structures of all functions in the
3646 callgraph to F. */
3647
3648 void
3649 ipa_print_all_params (FILE * f)
3650 {
3651 struct cgraph_node *node;
3652
3653 fprintf (f, "\nFunction parameters:\n");
3654 FOR_EACH_FUNCTION (node)
3655 ipa_print_node_params (f, node);
3656 }
3657
3658 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3659
3660 vec<tree>
3661 ipa_get_vector_of_formal_parms (tree fndecl)
3662 {
3663 vec<tree> args;
3664 int count;
3665 tree parm;
3666
3667 gcc_assert (!flag_wpa);
3668 count = count_formal_params (fndecl);
3669 args.create (count);
3670 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3671 args.quick_push (parm);
3672
3673 return args;
3674 }
3675
3676 /* Return a heap allocated vector containing types of formal parameters of
3677 function type FNTYPE. */
3678
3679 vec<tree>
3680 ipa_get_vector_of_formal_parm_types (tree fntype)
3681 {
3682 vec<tree> types;
3683 int count = 0;
3684 tree t;
3685
3686 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3687 count++;
3688
3689 types.create (count);
3690 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3691 types.quick_push (TREE_VALUE (t));
3692
3693 return types;
3694 }
3695
3696 /* Modify the function declaration FNDECL and its type according to the plan in
3697 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3698 to reflect the actual parameters being modified which are determined by the
3699 base_index field. */
3700
3701 void
3702 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3703 {
3704 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3705 tree orig_type = TREE_TYPE (fndecl);
3706 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3707
3708 /* The following test is an ugly hack, some functions simply don't have any
3709 arguments in their type. This is probably a bug but well... */
3710 bool care_for_types = (old_arg_types != NULL_TREE);
3711 bool last_parm_void;
3712 vec<tree> otypes;
3713 if (care_for_types)
3714 {
3715 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3716 == void_type_node);
3717 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3718 if (last_parm_void)
3719 gcc_assert (oparms.length () + 1 == otypes.length ());
3720 else
3721 gcc_assert (oparms.length () == otypes.length ());
3722 }
3723 else
3724 {
3725 last_parm_void = false;
3726 otypes.create (0);
3727 }
3728
3729 int len = adjustments.length ();
3730 tree *link = &DECL_ARGUMENTS (fndecl);
3731 tree new_arg_types = NULL;
3732 for (int i = 0; i < len; i++)
3733 {
3734 struct ipa_parm_adjustment *adj;
3735 gcc_assert (link);
3736
3737 adj = &adjustments[i];
3738 tree parm;
3739 if (adj->op == IPA_PARM_OP_NEW)
3740 parm = NULL;
3741 else
3742 parm = oparms[adj->base_index];
3743 adj->base = parm;
3744
3745 if (adj->op == IPA_PARM_OP_COPY)
3746 {
3747 if (care_for_types)
3748 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3749 new_arg_types);
3750 *link = parm;
3751 link = &DECL_CHAIN (parm);
3752 }
3753 else if (adj->op != IPA_PARM_OP_REMOVE)
3754 {
3755 tree new_parm;
3756 tree ptype;
3757
3758 if (adj->by_ref)
3759 ptype = build_pointer_type (adj->type);
3760 else
3761 {
3762 ptype = adj->type;
3763 if (is_gimple_reg_type (ptype))
3764 {
3765 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3766 if (TYPE_ALIGN (ptype) < malign)
3767 ptype = build_aligned_type (ptype, malign);
3768 }
3769 }
3770
3771 if (care_for_types)
3772 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3773
3774 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3775 ptype);
3776 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3777 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3778 DECL_ARTIFICIAL (new_parm) = 1;
3779 DECL_ARG_TYPE (new_parm) = ptype;
3780 DECL_CONTEXT (new_parm) = fndecl;
3781 TREE_USED (new_parm) = 1;
3782 DECL_IGNORED_P (new_parm) = 1;
3783 layout_decl (new_parm, 0);
3784
3785 if (adj->op == IPA_PARM_OP_NEW)
3786 adj->base = NULL;
3787 else
3788 adj->base = parm;
3789 adj->new_decl = new_parm;
3790
3791 *link = new_parm;
3792 link = &DECL_CHAIN (new_parm);
3793 }
3794 }
3795
3796 *link = NULL_TREE;
3797
3798 tree new_reversed = NULL;
3799 if (care_for_types)
3800 {
3801 new_reversed = nreverse (new_arg_types);
3802 if (last_parm_void)
3803 {
3804 if (new_reversed)
3805 TREE_CHAIN (new_arg_types) = void_list_node;
3806 else
3807 new_reversed = void_list_node;
3808 }
3809 }
3810
3811 /* Use copy_node to preserve as much as possible from original type
3812 (debug info, attribute lists etc.)
3813 Exception is METHOD_TYPEs must have THIS argument.
3814 When we are asked to remove it, we need to build new FUNCTION_TYPE
3815 instead. */
3816 tree new_type = NULL;
3817 if (TREE_CODE (orig_type) != METHOD_TYPE
3818 || (adjustments[0].op == IPA_PARM_OP_COPY
3819 && adjustments[0].base_index == 0))
3820 {
3821 new_type = build_distinct_type_copy (orig_type);
3822 TYPE_ARG_TYPES (new_type) = new_reversed;
3823 }
3824 else
3825 {
3826 new_type
3827 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3828 new_reversed));
3829 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3830 DECL_VINDEX (fndecl) = NULL_TREE;
3831 }
3832
3833 /* When signature changes, we need to clear builtin info. */
3834 if (DECL_BUILT_IN (fndecl))
3835 {
3836 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3837 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3838 }
3839
3840 TREE_TYPE (fndecl) = new_type;
3841 DECL_VIRTUAL_P (fndecl) = 0;
3842 DECL_LANG_SPECIFIC (fndecl) = NULL;
3843 otypes.release ();
3844 oparms.release ();
3845 }
3846
3847 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3848 If this is a directly recursive call, CS must be NULL. Otherwise it must
3849 contain the corresponding call graph edge. */
3850
3851 void
3852 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3853 ipa_parm_adjustment_vec adjustments)
3854 {
3855 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3856 vec<tree> vargs;
3857 vec<tree, va_gc> **debug_args = NULL;
3858 gcall *new_stmt;
3859 gimple_stmt_iterator gsi, prev_gsi;
3860 tree callee_decl;
3861 int i, len;
3862
3863 len = adjustments.length ();
3864 vargs.create (len);
3865 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3866 current_node->remove_stmt_references (stmt);
3867
3868 gsi = gsi_for_stmt (stmt);
3869 prev_gsi = gsi;
3870 gsi_prev (&prev_gsi);
3871 for (i = 0; i < len; i++)
3872 {
3873 struct ipa_parm_adjustment *adj;
3874
3875 adj = &adjustments[i];
3876
3877 if (adj->op == IPA_PARM_OP_COPY)
3878 {
3879 tree arg = gimple_call_arg (stmt, adj->base_index);
3880
3881 vargs.quick_push (arg);
3882 }
3883 else if (adj->op != IPA_PARM_OP_REMOVE)
3884 {
3885 tree expr, base, off;
3886 location_t loc;
3887 unsigned int deref_align = 0;
3888 bool deref_base = false;
3889
3890 /* We create a new parameter out of the value of the old one, we can
3891 do the following kind of transformations:
3892
3893 - A scalar passed by reference is converted to a scalar passed by
3894 value. (adj->by_ref is false and the type of the original
3895 actual argument is a pointer to a scalar).
3896
3897 - A part of an aggregate is passed instead of the whole aggregate.
3898 The part can be passed either by value or by reference, this is
3899 determined by value of adj->by_ref. Moreover, the code below
3900 handles both situations when the original aggregate is passed by
3901 value (its type is not a pointer) and when it is passed by
3902 reference (it is a pointer to an aggregate).
3903
3904 When the new argument is passed by reference (adj->by_ref is true)
3905 it must be a part of an aggregate and therefore we form it by
3906 simply taking the address of a reference inside the original
3907 aggregate. */
3908
3909 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3910 base = gimple_call_arg (stmt, adj->base_index);
3911 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3912 : EXPR_LOCATION (base);
3913
3914 if (TREE_CODE (base) != ADDR_EXPR
3915 && POINTER_TYPE_P (TREE_TYPE (base)))
3916 off = build_int_cst (adj->alias_ptr_type,
3917 adj->offset / BITS_PER_UNIT);
3918 else
3919 {
3920 HOST_WIDE_INT base_offset;
3921 tree prev_base;
3922 bool addrof;
3923
3924 if (TREE_CODE (base) == ADDR_EXPR)
3925 {
3926 base = TREE_OPERAND (base, 0);
3927 addrof = true;
3928 }
3929 else
3930 addrof = false;
3931 prev_base = base;
3932 base = get_addr_base_and_unit_offset (base, &base_offset);
3933 /* Aggregate arguments can have non-invariant addresses. */
3934 if (!base)
3935 {
3936 base = build_fold_addr_expr (prev_base);
3937 off = build_int_cst (adj->alias_ptr_type,
3938 adj->offset / BITS_PER_UNIT);
3939 }
3940 else if (TREE_CODE (base) == MEM_REF)
3941 {
3942 if (!addrof)
3943 {
3944 deref_base = true;
3945 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3946 }
3947 off = build_int_cst (adj->alias_ptr_type,
3948 base_offset
3949 + adj->offset / BITS_PER_UNIT);
3950 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3951 off);
3952 base = TREE_OPERAND (base, 0);
3953 }
3954 else
3955 {
3956 off = build_int_cst (adj->alias_ptr_type,
3957 base_offset
3958 + adj->offset / BITS_PER_UNIT);
3959 base = build_fold_addr_expr (base);
3960 }
3961 }
3962
3963 if (!adj->by_ref)
3964 {
3965 tree type = adj->type;
3966 unsigned int align;
3967 unsigned HOST_WIDE_INT misalign;
3968
3969 if (deref_base)
3970 {
3971 align = deref_align;
3972 misalign = 0;
3973 }
3974 else
3975 {
3976 get_pointer_alignment_1 (base, &align, &misalign);
3977 if (TYPE_ALIGN (type) > align)
3978 align = TYPE_ALIGN (type);
3979 }
3980 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3981 * BITS_PER_UNIT);
3982 misalign = misalign & (align - 1);
3983 if (misalign != 0)
3984 align = (misalign & -misalign);
3985 if (align < TYPE_ALIGN (type))
3986 type = build_aligned_type (type, align);
3987 base = force_gimple_operand_gsi (&gsi, base,
3988 true, NULL, true, GSI_SAME_STMT);
3989 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3990 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3991 /* If expr is not a valid gimple call argument emit
3992 a load into a temporary. */
3993 if (is_gimple_reg_type (TREE_TYPE (expr)))
3994 {
3995 gimple *tem = gimple_build_assign (NULL_TREE, expr);
3996 if (gimple_in_ssa_p (cfun))
3997 {
3998 gimple_set_vuse (tem, gimple_vuse (stmt));
3999 expr = make_ssa_name (TREE_TYPE (expr), tem);
4000 }
4001 else
4002 expr = create_tmp_reg (TREE_TYPE (expr));
4003 gimple_assign_set_lhs (tem, expr);
4004 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4005 }
4006 }
4007 else
4008 {
4009 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4010 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4011 expr = build_fold_addr_expr (expr);
4012 expr = force_gimple_operand_gsi (&gsi, expr,
4013 true, NULL, true, GSI_SAME_STMT);
4014 }
4015 vargs.quick_push (expr);
4016 }
4017 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4018 {
4019 unsigned int ix;
4020 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4021 gimple *def_temp;
4022
4023 arg = gimple_call_arg (stmt, adj->base_index);
4024 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4025 {
4026 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4027 continue;
4028 arg = fold_convert_loc (gimple_location (stmt),
4029 TREE_TYPE (origin), arg);
4030 }
4031 if (debug_args == NULL)
4032 debug_args = decl_debug_args_insert (callee_decl);
4033 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4034 if (ddecl == origin)
4035 {
4036 ddecl = (**debug_args)[ix + 1];
4037 break;
4038 }
4039 if (ddecl == NULL)
4040 {
4041 ddecl = make_node (DEBUG_EXPR_DECL);
4042 DECL_ARTIFICIAL (ddecl) = 1;
4043 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4044 DECL_MODE (ddecl) = DECL_MODE (origin);
4045
4046 vec_safe_push (*debug_args, origin);
4047 vec_safe_push (*debug_args, ddecl);
4048 }
4049 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4050 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4051 }
4052 }
4053
4054 if (dump_file && (dump_flags & TDF_DETAILS))
4055 {
4056 fprintf (dump_file, "replacing stmt:");
4057 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4058 }
4059
4060 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4061 vargs.release ();
4062 if (gimple_call_lhs (stmt))
4063 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4064
4065 gimple_set_block (new_stmt, gimple_block (stmt));
4066 if (gimple_has_location (stmt))
4067 gimple_set_location (new_stmt, gimple_location (stmt));
4068 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4069 gimple_call_copy_flags (new_stmt, stmt);
4070 if (gimple_in_ssa_p (cfun))
4071 {
4072 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4073 if (gimple_vdef (stmt))
4074 {
4075 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4076 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4077 }
4078 }
4079
4080 if (dump_file && (dump_flags & TDF_DETAILS))
4081 {
4082 fprintf (dump_file, "with stmt:");
4083 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4084 fprintf (dump_file, "\n");
4085 }
4086 gsi_replace (&gsi, new_stmt, true);
4087 if (cs)
4088 cs->set_call_stmt (new_stmt);
4089 do
4090 {
4091 current_node->record_stmt_references (gsi_stmt (gsi));
4092 gsi_prev (&gsi);
4093 }
4094 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4095 }
4096
4097 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4098 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4099 specifies whether the function should care about type incompatibility the
4100 current and new expressions. If it is false, the function will leave
4101 incompatibility issues to the caller. Return true iff the expression
4102 was modified. */
4103
4104 bool
4105 ipa_modify_expr (tree *expr, bool convert,
4106 ipa_parm_adjustment_vec adjustments)
4107 {
4108 struct ipa_parm_adjustment *cand
4109 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4110 if (!cand)
4111 return false;
4112
4113 tree src;
4114 if (cand->by_ref)
4115 {
4116 src = build_simple_mem_ref (cand->new_decl);
4117 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4118 }
4119 else
4120 src = cand->new_decl;
4121
4122 if (dump_file && (dump_flags & TDF_DETAILS))
4123 {
4124 fprintf (dump_file, "About to replace expr ");
4125 print_generic_expr (dump_file, *expr, 0);
4126 fprintf (dump_file, " with ");
4127 print_generic_expr (dump_file, src, 0);
4128 fprintf (dump_file, "\n");
4129 }
4130
4131 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4132 {
4133 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4134 *expr = vce;
4135 }
4136 else
4137 *expr = src;
4138 return true;
4139 }
4140
4141 /* If T is an SSA_NAME, return NULL if it is not a default def or
4142 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4143 the base variable is always returned, regardless if it is a default
4144 def. Return T if it is not an SSA_NAME. */
4145
4146 static tree
4147 get_ssa_base_param (tree t, bool ignore_default_def)
4148 {
4149 if (TREE_CODE (t) == SSA_NAME)
4150 {
4151 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4152 return SSA_NAME_VAR (t);
4153 else
4154 return NULL_TREE;
4155 }
4156 return t;
4157 }
4158
4159 /* Given an expression, return an adjustment entry specifying the
4160 transformation to be done on EXPR. If no suitable adjustment entry
4161 was found, returns NULL.
4162
4163 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4164 default def, otherwise bail on them.
4165
4166 If CONVERT is non-NULL, this function will set *CONVERT if the
4167 expression provided is a component reference. ADJUSTMENTS is the
4168 adjustments vector. */
4169
4170 ipa_parm_adjustment *
4171 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4172 ipa_parm_adjustment_vec adjustments,
4173 bool ignore_default_def)
4174 {
4175 if (TREE_CODE (**expr) == BIT_FIELD_REF
4176 || TREE_CODE (**expr) == IMAGPART_EXPR
4177 || TREE_CODE (**expr) == REALPART_EXPR)
4178 {
4179 *expr = &TREE_OPERAND (**expr, 0);
4180 if (convert)
4181 *convert = true;
4182 }
4183
4184 HOST_WIDE_INT offset, size, max_size;
4185 bool reverse;
4186 tree base
4187 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4188 if (!base || size == -1 || max_size == -1)
4189 return NULL;
4190
4191 if (TREE_CODE (base) == MEM_REF)
4192 {
4193 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4194 base = TREE_OPERAND (base, 0);
4195 }
4196
4197 base = get_ssa_base_param (base, ignore_default_def);
4198 if (!base || TREE_CODE (base) != PARM_DECL)
4199 return NULL;
4200
4201 struct ipa_parm_adjustment *cand = NULL;
4202 unsigned int len = adjustments.length ();
4203 for (unsigned i = 0; i < len; i++)
4204 {
4205 struct ipa_parm_adjustment *adj = &adjustments[i];
4206
4207 if (adj->base == base
4208 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4209 {
4210 cand = adj;
4211 break;
4212 }
4213 }
4214
4215 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4216 return NULL;
4217 return cand;
4218 }
4219
4220 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4221
4222 static bool
4223 index_in_adjustments_multiple_times_p (int base_index,
4224 ipa_parm_adjustment_vec adjustments)
4225 {
4226 int i, len = adjustments.length ();
4227 bool one = false;
4228
4229 for (i = 0; i < len; i++)
4230 {
4231 struct ipa_parm_adjustment *adj;
4232 adj = &adjustments[i];
4233
4234 if (adj->base_index == base_index)
4235 {
4236 if (one)
4237 return true;
4238 else
4239 one = true;
4240 }
4241 }
4242 return false;
4243 }
4244
4245
4246 /* Return adjustments that should have the same effect on function parameters
4247 and call arguments as if they were first changed according to adjustments in
4248 INNER and then by adjustments in OUTER. */
4249
4250 ipa_parm_adjustment_vec
4251 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4252 ipa_parm_adjustment_vec outer)
4253 {
4254 int i, outlen = outer.length ();
4255 int inlen = inner.length ();
4256 int removals = 0;
4257 ipa_parm_adjustment_vec adjustments, tmp;
4258
4259 tmp.create (inlen);
4260 for (i = 0; i < inlen; i++)
4261 {
4262 struct ipa_parm_adjustment *n;
4263 n = &inner[i];
4264
4265 if (n->op == IPA_PARM_OP_REMOVE)
4266 removals++;
4267 else
4268 {
4269 /* FIXME: Handling of new arguments are not implemented yet. */
4270 gcc_assert (n->op != IPA_PARM_OP_NEW);
4271 tmp.quick_push (*n);
4272 }
4273 }
4274
4275 adjustments.create (outlen + removals);
4276 for (i = 0; i < outlen; i++)
4277 {
4278 struct ipa_parm_adjustment r;
4279 struct ipa_parm_adjustment *out = &outer[i];
4280 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4281
4282 memset (&r, 0, sizeof (r));
4283 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4284 if (out->op == IPA_PARM_OP_REMOVE)
4285 {
4286 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4287 {
4288 r.op = IPA_PARM_OP_REMOVE;
4289 adjustments.quick_push (r);
4290 }
4291 continue;
4292 }
4293 else
4294 {
4295 /* FIXME: Handling of new arguments are not implemented yet. */
4296 gcc_assert (out->op != IPA_PARM_OP_NEW);
4297 }
4298
4299 r.base_index = in->base_index;
4300 r.type = out->type;
4301
4302 /* FIXME: Create nonlocal value too. */
4303
4304 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4305 r.op = IPA_PARM_OP_COPY;
4306 else if (in->op == IPA_PARM_OP_COPY)
4307 r.offset = out->offset;
4308 else if (out->op == IPA_PARM_OP_COPY)
4309 r.offset = in->offset;
4310 else
4311 r.offset = in->offset + out->offset;
4312 adjustments.quick_push (r);
4313 }
4314
4315 for (i = 0; i < inlen; i++)
4316 {
4317 struct ipa_parm_adjustment *n = &inner[i];
4318
4319 if (n->op == IPA_PARM_OP_REMOVE)
4320 adjustments.quick_push (*n);
4321 }
4322
4323 tmp.release ();
4324 return adjustments;
4325 }
4326
4327 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4328 friendly way, assuming they are meant to be applied to FNDECL. */
4329
4330 void
4331 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4332 tree fndecl)
4333 {
4334 int i, len = adjustments.length ();
4335 bool first = true;
4336 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4337
4338 fprintf (file, "IPA param adjustments: ");
4339 for (i = 0; i < len; i++)
4340 {
4341 struct ipa_parm_adjustment *adj;
4342 adj = &adjustments[i];
4343
4344 if (!first)
4345 fprintf (file, " ");
4346 else
4347 first = false;
4348
4349 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4350 print_generic_expr (file, parms[adj->base_index], 0);
4351 if (adj->base)
4352 {
4353 fprintf (file, ", base: ");
4354 print_generic_expr (file, adj->base, 0);
4355 }
4356 if (adj->new_decl)
4357 {
4358 fprintf (file, ", new_decl: ");
4359 print_generic_expr (file, adj->new_decl, 0);
4360 }
4361 if (adj->new_ssa_base)
4362 {
4363 fprintf (file, ", new_ssa_base: ");
4364 print_generic_expr (file, adj->new_ssa_base, 0);
4365 }
4366
4367 if (adj->op == IPA_PARM_OP_COPY)
4368 fprintf (file, ", copy_param");
4369 else if (adj->op == IPA_PARM_OP_REMOVE)
4370 fprintf (file, ", remove_param");
4371 else
4372 fprintf (file, ", offset %li", (long) adj->offset);
4373 if (adj->by_ref)
4374 fprintf (file, ", by_ref");
4375 print_node_brief (file, ", type: ", adj->type, 0);
4376 fprintf (file, "\n");
4377 }
4378 parms.release ();
4379 }
4380
4381 /* Dump the AV linked list. */
4382
4383 void
4384 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4385 {
4386 bool comma = false;
4387 fprintf (f, " Aggregate replacements:");
4388 for (; av; av = av->next)
4389 {
4390 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4391 av->index, av->offset);
4392 print_generic_expr (f, av->value, 0);
4393 comma = true;
4394 }
4395 fprintf (f, "\n");
4396 }
4397
4398 /* Stream out jump function JUMP_FUNC to OB. */
4399
4400 static void
4401 ipa_write_jump_function (struct output_block *ob,
4402 struct ipa_jump_func *jump_func)
4403 {
4404 struct ipa_agg_jf_item *item;
4405 struct bitpack_d bp;
4406 int i, count;
4407
4408 streamer_write_uhwi (ob, jump_func->type);
4409 switch (jump_func->type)
4410 {
4411 case IPA_JF_UNKNOWN:
4412 break;
4413 case IPA_JF_CONST:
4414 gcc_assert (
4415 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4416 stream_write_tree (ob, jump_func->value.constant.value, true);
4417 break;
4418 case IPA_JF_PASS_THROUGH:
4419 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4420 if (jump_func->value.pass_through.operation == NOP_EXPR)
4421 {
4422 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4423 bp = bitpack_create (ob->main_stream);
4424 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4425 streamer_write_bitpack (&bp);
4426 }
4427 else
4428 {
4429 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4430 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4431 }
4432 break;
4433 case IPA_JF_ANCESTOR:
4434 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4435 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4436 bp = bitpack_create (ob->main_stream);
4437 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4438 streamer_write_bitpack (&bp);
4439 break;
4440 }
4441
4442 count = vec_safe_length (jump_func->agg.items);
4443 streamer_write_uhwi (ob, count);
4444 if (count)
4445 {
4446 bp = bitpack_create (ob->main_stream);
4447 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4448 streamer_write_bitpack (&bp);
4449 }
4450
4451 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4452 {
4453 streamer_write_uhwi (ob, item->offset);
4454 stream_write_tree (ob, item->value, true);
4455 }
4456
4457 bp = bitpack_create (ob->main_stream);
4458 bp_pack_value (&bp, jump_func->alignment.known, 1);
4459 streamer_write_bitpack (&bp);
4460 if (jump_func->alignment.known)
4461 {
4462 streamer_write_uhwi (ob, jump_func->alignment.align);
4463 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4464 }
4465 }
4466
4467 /* Read in jump function JUMP_FUNC from IB. */
4468
4469 static void
4470 ipa_read_jump_function (struct lto_input_block *ib,
4471 struct ipa_jump_func *jump_func,
4472 struct cgraph_edge *cs,
4473 struct data_in *data_in)
4474 {
4475 enum jump_func_type jftype;
4476 enum tree_code operation;
4477 int i, count;
4478
4479 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4480 switch (jftype)
4481 {
4482 case IPA_JF_UNKNOWN:
4483 ipa_set_jf_unknown (jump_func);
4484 break;
4485 case IPA_JF_CONST:
4486 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4487 break;
4488 case IPA_JF_PASS_THROUGH:
4489 operation = (enum tree_code) streamer_read_uhwi (ib);
4490 if (operation == NOP_EXPR)
4491 {
4492 int formal_id = streamer_read_uhwi (ib);
4493 struct bitpack_d bp = streamer_read_bitpack (ib);
4494 bool agg_preserved = bp_unpack_value (&bp, 1);
4495 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4496 }
4497 else
4498 {
4499 tree operand = stream_read_tree (ib, data_in);
4500 int formal_id = streamer_read_uhwi (ib);
4501 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4502 operation);
4503 }
4504 break;
4505 case IPA_JF_ANCESTOR:
4506 {
4507 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4508 int formal_id = streamer_read_uhwi (ib);
4509 struct bitpack_d bp = streamer_read_bitpack (ib);
4510 bool agg_preserved = bp_unpack_value (&bp, 1);
4511 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4512 break;
4513 }
4514 }
4515
4516 count = streamer_read_uhwi (ib);
4517 vec_alloc (jump_func->agg.items, count);
4518 if (count)
4519 {
4520 struct bitpack_d bp = streamer_read_bitpack (ib);
4521 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4522 }
4523 for (i = 0; i < count; i++)
4524 {
4525 struct ipa_agg_jf_item item;
4526 item.offset = streamer_read_uhwi (ib);
4527 item.value = stream_read_tree (ib, data_in);
4528 jump_func->agg.items->quick_push (item);
4529 }
4530
4531 struct bitpack_d bp = streamer_read_bitpack (ib);
4532 bool alignment_known = bp_unpack_value (&bp, 1);
4533 if (alignment_known)
4534 {
4535 jump_func->alignment.known = true;
4536 jump_func->alignment.align = streamer_read_uhwi (ib);
4537 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4538 }
4539 else
4540 jump_func->alignment.known = false;
4541 }
4542
4543 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4544 relevant to indirect inlining to OB. */
4545
4546 static void
4547 ipa_write_indirect_edge_info (struct output_block *ob,
4548 struct cgraph_edge *cs)
4549 {
4550 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4551 struct bitpack_d bp;
4552
4553 streamer_write_hwi (ob, ii->param_index);
4554 bp = bitpack_create (ob->main_stream);
4555 bp_pack_value (&bp, ii->polymorphic, 1);
4556 bp_pack_value (&bp, ii->agg_contents, 1);
4557 bp_pack_value (&bp, ii->member_ptr, 1);
4558 bp_pack_value (&bp, ii->by_ref, 1);
4559 bp_pack_value (&bp, ii->vptr_changed, 1);
4560 streamer_write_bitpack (&bp);
4561 if (ii->agg_contents || ii->polymorphic)
4562 streamer_write_hwi (ob, ii->offset);
4563 else
4564 gcc_assert (ii->offset == 0);
4565
4566 if (ii->polymorphic)
4567 {
4568 streamer_write_hwi (ob, ii->otr_token);
4569 stream_write_tree (ob, ii->otr_type, true);
4570 ii->context.stream_out (ob);
4571 }
4572 }
4573
4574 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4575 relevant to indirect inlining from IB. */
4576
4577 static void
4578 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4579 struct data_in *data_in,
4580 struct cgraph_edge *cs)
4581 {
4582 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4583 struct bitpack_d bp;
4584
4585 ii->param_index = (int) streamer_read_hwi (ib);
4586 bp = streamer_read_bitpack (ib);
4587 ii->polymorphic = bp_unpack_value (&bp, 1);
4588 ii->agg_contents = bp_unpack_value (&bp, 1);
4589 ii->member_ptr = bp_unpack_value (&bp, 1);
4590 ii->by_ref = bp_unpack_value (&bp, 1);
4591 ii->vptr_changed = bp_unpack_value (&bp, 1);
4592 if (ii->agg_contents || ii->polymorphic)
4593 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4594 else
4595 ii->offset = 0;
4596 if (ii->polymorphic)
4597 {
4598 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4599 ii->otr_type = stream_read_tree (ib, data_in);
4600 ii->context.stream_in (ib, data_in);
4601 }
4602 }
4603
4604 /* Stream out NODE info to OB. */
4605
4606 static void
4607 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4608 {
4609 int node_ref;
4610 lto_symtab_encoder_t encoder;
4611 struct ipa_node_params *info = IPA_NODE_REF (node);
4612 int j;
4613 struct cgraph_edge *e;
4614 struct bitpack_d bp;
4615
4616 encoder = ob->decl_state->symtab_node_encoder;
4617 node_ref = lto_symtab_encoder_encode (encoder, node);
4618 streamer_write_uhwi (ob, node_ref);
4619
4620 streamer_write_uhwi (ob, ipa_get_param_count (info));
4621 for (j = 0; j < ipa_get_param_count (info); j++)
4622 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4623 bp = bitpack_create (ob->main_stream);
4624 gcc_assert (info->analysis_done
4625 || ipa_get_param_count (info) == 0);
4626 gcc_assert (!info->node_enqueued);
4627 gcc_assert (!info->ipcp_orig_node);
4628 for (j = 0; j < ipa_get_param_count (info); j++)
4629 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4630 streamer_write_bitpack (&bp);
4631 for (j = 0; j < ipa_get_param_count (info); j++)
4632 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4633 for (e = node->callees; e; e = e->next_callee)
4634 {
4635 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4636
4637 streamer_write_uhwi (ob,
4638 ipa_get_cs_argument_count (args) * 2
4639 + (args->polymorphic_call_contexts != NULL));
4640 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4641 {
4642 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4643 if (args->polymorphic_call_contexts != NULL)
4644 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4645 }
4646 }
4647 for (e = node->indirect_calls; e; e = e->next_callee)
4648 {
4649 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4650
4651 streamer_write_uhwi (ob,
4652 ipa_get_cs_argument_count (args) * 2
4653 + (args->polymorphic_call_contexts != NULL));
4654 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4655 {
4656 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4657 if (args->polymorphic_call_contexts != NULL)
4658 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4659 }
4660 ipa_write_indirect_edge_info (ob, e);
4661 }
4662 }
4663
4664 /* Stream in NODE info from IB. */
4665
4666 static void
4667 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4668 struct data_in *data_in)
4669 {
4670 struct ipa_node_params *info = IPA_NODE_REF (node);
4671 int k;
4672 struct cgraph_edge *e;
4673 struct bitpack_d bp;
4674
4675 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4676
4677 for (k = 0; k < ipa_get_param_count (info); k++)
4678 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4679
4680 bp = streamer_read_bitpack (ib);
4681 if (ipa_get_param_count (info) != 0)
4682 info->analysis_done = true;
4683 info->node_enqueued = false;
4684 for (k = 0; k < ipa_get_param_count (info); k++)
4685 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4686 for (k = 0; k < ipa_get_param_count (info); k++)
4687 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4688 for (e = node->callees; e; e = e->next_callee)
4689 {
4690 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4691 int count = streamer_read_uhwi (ib);
4692 bool contexts_computed = count & 1;
4693 count /= 2;
4694
4695 if (!count)
4696 continue;
4697 vec_safe_grow_cleared (args->jump_functions, count);
4698 if (contexts_computed)
4699 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4700
4701 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4702 {
4703 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4704 data_in);
4705 if (contexts_computed)
4706 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4707 }
4708 }
4709 for (e = node->indirect_calls; e; e = e->next_callee)
4710 {
4711 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4712 int count = streamer_read_uhwi (ib);
4713 bool contexts_computed = count & 1;
4714 count /= 2;
4715
4716 if (count)
4717 {
4718 vec_safe_grow_cleared (args->jump_functions, count);
4719 if (contexts_computed)
4720 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4721 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4722 {
4723 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4724 data_in);
4725 if (contexts_computed)
4726 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4727 }
4728 }
4729 ipa_read_indirect_edge_info (ib, data_in, e);
4730 }
4731 }
4732
4733 /* Write jump functions for nodes in SET. */
4734
4735 void
4736 ipa_prop_write_jump_functions (void)
4737 {
4738 struct cgraph_node *node;
4739 struct output_block *ob;
4740 unsigned int count = 0;
4741 lto_symtab_encoder_iterator lsei;
4742 lto_symtab_encoder_t encoder;
4743
4744 if (!ipa_node_params_sum)
4745 return;
4746
4747 ob = create_output_block (LTO_section_jump_functions);
4748 encoder = ob->decl_state->symtab_node_encoder;
4749 ob->symbol = NULL;
4750 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4751 lsei_next_function_in_partition (&lsei))
4752 {
4753 node = lsei_cgraph_node (lsei);
4754 if (node->has_gimple_body_p ()
4755 && IPA_NODE_REF (node) != NULL)
4756 count++;
4757 }
4758
4759 streamer_write_uhwi (ob, count);
4760
4761 /* Process all of the functions. */
4762 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4763 lsei_next_function_in_partition (&lsei))
4764 {
4765 node = lsei_cgraph_node (lsei);
4766 if (node->has_gimple_body_p ()
4767 && IPA_NODE_REF (node) != NULL)
4768 ipa_write_node_info (ob, node);
4769 }
4770 streamer_write_char_stream (ob->main_stream, 0);
4771 produce_asm (ob, NULL);
4772 destroy_output_block (ob);
4773 }
4774
4775 /* Read section in file FILE_DATA of length LEN with data DATA. */
4776
4777 static void
4778 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4779 size_t len)
4780 {
4781 const struct lto_function_header *header =
4782 (const struct lto_function_header *) data;
4783 const int cfg_offset = sizeof (struct lto_function_header);
4784 const int main_offset = cfg_offset + header->cfg_size;
4785 const int string_offset = main_offset + header->main_size;
4786 struct data_in *data_in;
4787 unsigned int i;
4788 unsigned int count;
4789
4790 lto_input_block ib_main ((const char *) data + main_offset,
4791 header->main_size, file_data->mode_table);
4792
4793 data_in =
4794 lto_data_in_create (file_data, (const char *) data + string_offset,
4795 header->string_size, vNULL);
4796 count = streamer_read_uhwi (&ib_main);
4797
4798 for (i = 0; i < count; i++)
4799 {
4800 unsigned int index;
4801 struct cgraph_node *node;
4802 lto_symtab_encoder_t encoder;
4803
4804 index = streamer_read_uhwi (&ib_main);
4805 encoder = file_data->symtab_node_encoder;
4806 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4807 index));
4808 gcc_assert (node->definition);
4809 ipa_read_node_info (&ib_main, node, data_in);
4810 }
4811 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4812 len);
4813 lto_data_in_delete (data_in);
4814 }
4815
4816 /* Read ipcp jump functions. */
4817
4818 void
4819 ipa_prop_read_jump_functions (void)
4820 {
4821 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4822 struct lto_file_decl_data *file_data;
4823 unsigned int j = 0;
4824
4825 ipa_check_create_node_params ();
4826 ipa_check_create_edge_args ();
4827 ipa_register_cgraph_hooks ();
4828
4829 while ((file_data = file_data_vec[j++]))
4830 {
4831 size_t len;
4832 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4833
4834 if (data)
4835 ipa_prop_read_section (file_data, data, len);
4836 }
4837 }
4838
4839 /* After merging units, we can get mismatch in argument counts.
4840 Also decl merging might've rendered parameter lists obsolete.
4841 Also compute called_with_variable_arg info. */
4842
4843 void
4844 ipa_update_after_lto_read (void)
4845 {
4846 ipa_check_create_node_params ();
4847 ipa_check_create_edge_args ();
4848 }
4849
4850 void
4851 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4852 {
4853 int node_ref;
4854 unsigned int count = 0;
4855 lto_symtab_encoder_t encoder;
4856 struct ipa_agg_replacement_value *aggvals, *av;
4857
4858 aggvals = ipa_get_agg_replacements_for_node (node);
4859 encoder = ob->decl_state->symtab_node_encoder;
4860 node_ref = lto_symtab_encoder_encode (encoder, node);
4861 streamer_write_uhwi (ob, node_ref);
4862
4863 for (av = aggvals; av; av = av->next)
4864 count++;
4865 streamer_write_uhwi (ob, count);
4866
4867 for (av = aggvals; av; av = av->next)
4868 {
4869 struct bitpack_d bp;
4870
4871 streamer_write_uhwi (ob, av->offset);
4872 streamer_write_uhwi (ob, av->index);
4873 stream_write_tree (ob, av->value, true);
4874
4875 bp = bitpack_create (ob->main_stream);
4876 bp_pack_value (&bp, av->by_ref, 1);
4877 streamer_write_bitpack (&bp);
4878 }
4879
4880 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4881 if (ts && vec_safe_length (ts->alignments) > 0)
4882 {
4883 count = ts->alignments->length ();
4884
4885 streamer_write_uhwi (ob, count);
4886 for (unsigned i = 0; i < count; ++i)
4887 {
4888 ipa_alignment *parm_al = &(*ts->alignments)[i];
4889
4890 struct bitpack_d bp;
4891 bp = bitpack_create (ob->main_stream);
4892 bp_pack_value (&bp, parm_al->known, 1);
4893 streamer_write_bitpack (&bp);
4894 if (parm_al->known)
4895 {
4896 streamer_write_uhwi (ob, parm_al->align);
4897 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4898 parm_al->misalign);
4899 }
4900 }
4901 }
4902 else
4903 streamer_write_uhwi (ob, 0);
4904 }
4905
4906 /* Stream in the aggregate value replacement chain for NODE from IB. */
4907
4908 static void
4909 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4910 data_in *data_in)
4911 {
4912 struct ipa_agg_replacement_value *aggvals = NULL;
4913 unsigned int count, i;
4914
4915 count = streamer_read_uhwi (ib);
4916 for (i = 0; i <count; i++)
4917 {
4918 struct ipa_agg_replacement_value *av;
4919 struct bitpack_d bp;
4920
4921 av = ggc_alloc<ipa_agg_replacement_value> ();
4922 av->offset = streamer_read_uhwi (ib);
4923 av->index = streamer_read_uhwi (ib);
4924 av->value = stream_read_tree (ib, data_in);
4925 bp = streamer_read_bitpack (ib);
4926 av->by_ref = bp_unpack_value (&bp, 1);
4927 av->next = aggvals;
4928 aggvals = av;
4929 }
4930 ipa_set_node_agg_value_chain (node, aggvals);
4931
4932 count = streamer_read_uhwi (ib);
4933 if (count > 0)
4934 {
4935 ipcp_grow_transformations_if_necessary ();
4936
4937 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4938 vec_safe_grow_cleared (ts->alignments, count);
4939
4940 for (i = 0; i < count; i++)
4941 {
4942 ipa_alignment *parm_al;
4943 parm_al = &(*ts->alignments)[i];
4944 struct bitpack_d bp;
4945 bp = streamer_read_bitpack (ib);
4946 parm_al->known = bp_unpack_value (&bp, 1);
4947 if (parm_al->known)
4948 {
4949 parm_al->align = streamer_read_uhwi (ib);
4950 parm_al->misalign
4951 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4952 0, parm_al->align);
4953 }
4954 }
4955 }
4956 }
4957
4958 /* Write all aggregate replacement for nodes in set. */
4959
4960 void
4961 ipcp_write_transformation_summaries (void)
4962 {
4963 struct cgraph_node *node;
4964 struct output_block *ob;
4965 unsigned int count = 0;
4966 lto_symtab_encoder_iterator lsei;
4967 lto_symtab_encoder_t encoder;
4968
4969 ob = create_output_block (LTO_section_ipcp_transform);
4970 encoder = ob->decl_state->symtab_node_encoder;
4971 ob->symbol = NULL;
4972 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4973 lsei_next_function_in_partition (&lsei))
4974 {
4975 node = lsei_cgraph_node (lsei);
4976 if (node->has_gimple_body_p ())
4977 count++;
4978 }
4979
4980 streamer_write_uhwi (ob, count);
4981
4982 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4983 lsei_next_function_in_partition (&lsei))
4984 {
4985 node = lsei_cgraph_node (lsei);
4986 if (node->has_gimple_body_p ())
4987 write_ipcp_transformation_info (ob, node);
4988 }
4989 streamer_write_char_stream (ob->main_stream, 0);
4990 produce_asm (ob, NULL);
4991 destroy_output_block (ob);
4992 }
4993
4994 /* Read replacements section in file FILE_DATA of length LEN with data
4995 DATA. */
4996
4997 static void
4998 read_replacements_section (struct lto_file_decl_data *file_data,
4999 const char *data,
5000 size_t len)
5001 {
5002 const struct lto_function_header *header =
5003 (const struct lto_function_header *) data;
5004 const int cfg_offset = sizeof (struct lto_function_header);
5005 const int main_offset = cfg_offset + header->cfg_size;
5006 const int string_offset = main_offset + header->main_size;
5007 struct data_in *data_in;
5008 unsigned int i;
5009 unsigned int count;
5010
5011 lto_input_block ib_main ((const char *) data + main_offset,
5012 header->main_size, file_data->mode_table);
5013
5014 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5015 header->string_size, vNULL);
5016 count = streamer_read_uhwi (&ib_main);
5017
5018 for (i = 0; i < count; i++)
5019 {
5020 unsigned int index;
5021 struct cgraph_node *node;
5022 lto_symtab_encoder_t encoder;
5023
5024 index = streamer_read_uhwi (&ib_main);
5025 encoder = file_data->symtab_node_encoder;
5026 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5027 index));
5028 gcc_assert (node->definition);
5029 read_ipcp_transformation_info (&ib_main, node, data_in);
5030 }
5031 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5032 len);
5033 lto_data_in_delete (data_in);
5034 }
5035
5036 /* Read IPA-CP aggregate replacements. */
5037
5038 void
5039 ipcp_read_transformation_summaries (void)
5040 {
5041 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5042 struct lto_file_decl_data *file_data;
5043 unsigned int j = 0;
5044
5045 while ((file_data = file_data_vec[j++]))
5046 {
5047 size_t len;
5048 const char *data = lto_get_section_data (file_data,
5049 LTO_section_ipcp_transform,
5050 NULL, &len);
5051 if (data)
5052 read_replacements_section (file_data, data, len);
5053 }
5054 }
5055
5056 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5057 NODE. */
5058
5059 static void
5060 adjust_agg_replacement_values (struct cgraph_node *node,
5061 struct ipa_agg_replacement_value *aggval)
5062 {
5063 struct ipa_agg_replacement_value *v;
5064 int i, c = 0, d = 0, *adj;
5065
5066 if (!node->clone.combined_args_to_skip)
5067 return;
5068
5069 for (v = aggval; v; v = v->next)
5070 {
5071 gcc_assert (v->index >= 0);
5072 if (c < v->index)
5073 c = v->index;
5074 }
5075 c++;
5076
5077 adj = XALLOCAVEC (int, c);
5078 for (i = 0; i < c; i++)
5079 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5080 {
5081 adj[i] = -1;
5082 d++;
5083 }
5084 else
5085 adj[i] = i - d;
5086
5087 for (v = aggval; v; v = v->next)
5088 v->index = adj[v->index];
5089 }
5090
5091 /* Dominator walker driving the ipcp modification phase. */
5092
5093 class ipcp_modif_dom_walker : public dom_walker
5094 {
5095 public:
5096 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5097 vec<ipa_param_descriptor> descs,
5098 struct ipa_agg_replacement_value *av,
5099 bool *sc, bool *cc)
5100 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5101 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5102
5103 virtual edge before_dom_children (basic_block);
5104
5105 private:
5106 struct ipa_func_body_info *m_fbi;
5107 vec<ipa_param_descriptor> m_descriptors;
5108 struct ipa_agg_replacement_value *m_aggval;
5109 bool *m_something_changed, *m_cfg_changed;
5110 };
5111
5112 edge
5113 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5114 {
5115 gimple_stmt_iterator gsi;
5116 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5117 {
5118 struct ipa_agg_replacement_value *v;
5119 gimple *stmt = gsi_stmt (gsi);
5120 tree rhs, val, t;
5121 HOST_WIDE_INT offset, size;
5122 int index;
5123 bool by_ref, vce;
5124
5125 if (!gimple_assign_load_p (stmt))
5126 continue;
5127 rhs = gimple_assign_rhs1 (stmt);
5128 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5129 continue;
5130
5131 vce = false;
5132 t = rhs;
5133 while (handled_component_p (t))
5134 {
5135 /* V_C_E can do things like convert an array of integers to one
5136 bigger integer and similar things we do not handle below. */
5137 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5138 {
5139 vce = true;
5140 break;
5141 }
5142 t = TREE_OPERAND (t, 0);
5143 }
5144 if (vce)
5145 continue;
5146
5147 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5148 &offset, &size, &by_ref))
5149 continue;
5150 for (v = m_aggval; v; v = v->next)
5151 if (v->index == index
5152 && v->offset == offset)
5153 break;
5154 if (!v
5155 || v->by_ref != by_ref
5156 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5157 continue;
5158
5159 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5160 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5161 {
5162 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5163 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5164 else if (TYPE_SIZE (TREE_TYPE (rhs))
5165 == TYPE_SIZE (TREE_TYPE (v->value)))
5166 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5167 else
5168 {
5169 if (dump_file)
5170 {
5171 fprintf (dump_file, " const ");
5172 print_generic_expr (dump_file, v->value, 0);
5173 fprintf (dump_file, " can't be converted to type of ");
5174 print_generic_expr (dump_file, rhs, 0);
5175 fprintf (dump_file, "\n");
5176 }
5177 continue;
5178 }
5179 }
5180 else
5181 val = v->value;
5182
5183 if (dump_file && (dump_flags & TDF_DETAILS))
5184 {
5185 fprintf (dump_file, "Modifying stmt:\n ");
5186 print_gimple_stmt (dump_file, stmt, 0, 0);
5187 }
5188 gimple_assign_set_rhs_from_tree (&gsi, val);
5189 update_stmt (stmt);
5190
5191 if (dump_file && (dump_flags & TDF_DETAILS))
5192 {
5193 fprintf (dump_file, "into:\n ");
5194 print_gimple_stmt (dump_file, stmt, 0, 0);
5195 fprintf (dump_file, "\n");
5196 }
5197
5198 *m_something_changed = true;
5199 if (maybe_clean_eh_stmt (stmt)
5200 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5201 *m_cfg_changed = true;
5202 }
5203 return NULL;
5204 }
5205
5206 /* Update alignment of formal parameters as described in
5207 ipcp_transformation_summary. */
5208
5209 static void
5210 ipcp_update_alignments (struct cgraph_node *node)
5211 {
5212 tree fndecl = node->decl;
5213 tree parm = DECL_ARGUMENTS (fndecl);
5214 tree next_parm = parm;
5215 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5216 if (!ts || vec_safe_length (ts->alignments) == 0)
5217 return;
5218 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5219 unsigned count = alignments.length ();
5220
5221 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5222 {
5223 if (node->clone.combined_args_to_skip
5224 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5225 continue;
5226 gcc_checking_assert (parm);
5227 next_parm = DECL_CHAIN (parm);
5228
5229 if (!alignments[i].known || !is_gimple_reg (parm))
5230 continue;
5231 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5232 if (!ddef)
5233 continue;
5234
5235 if (dump_file)
5236 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5237 "misalignment to %u\n", i, alignments[i].align,
5238 alignments[i].misalign);
5239
5240 struct ptr_info_def *pi = get_ptr_info (ddef);
5241 gcc_checking_assert (pi);
5242 unsigned old_align;
5243 unsigned old_misalign;
5244 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5245
5246 if (old_known
5247 && old_align >= alignments[i].align)
5248 {
5249 if (dump_file)
5250 fprintf (dump_file, " But the alignment was already %u.\n",
5251 old_align);
5252 continue;
5253 }
5254 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5255 }
5256 }
5257
5258 /* IPCP transformation phase doing propagation of aggregate values. */
5259
5260 unsigned int
5261 ipcp_transform_function (struct cgraph_node *node)
5262 {
5263 vec<ipa_param_descriptor> descriptors = vNULL;
5264 struct ipa_func_body_info fbi;
5265 struct ipa_agg_replacement_value *aggval;
5266 int param_count;
5267 bool cfg_changed = false, something_changed = false;
5268
5269 gcc_checking_assert (cfun);
5270 gcc_checking_assert (current_function_decl);
5271
5272 if (dump_file)
5273 fprintf (dump_file, "Modification phase of node %s/%i\n",
5274 node->name (), node->order);
5275
5276 ipcp_update_alignments (node);
5277 aggval = ipa_get_agg_replacements_for_node (node);
5278 if (!aggval)
5279 return 0;
5280 param_count = count_formal_params (node->decl);
5281 if (param_count == 0)
5282 return 0;
5283 adjust_agg_replacement_values (node, aggval);
5284 if (dump_file)
5285 ipa_dump_agg_replacement_values (dump_file, aggval);
5286
5287 fbi.node = node;
5288 fbi.info = NULL;
5289 fbi.bb_infos = vNULL;
5290 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5291 fbi.param_count = param_count;
5292 fbi.aa_walked = 0;
5293
5294 descriptors.safe_grow_cleared (param_count);
5295 ipa_populate_param_decls (node, descriptors);
5296 calculate_dominance_info (CDI_DOMINATORS);
5297 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5298 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5299
5300 int i;
5301 struct ipa_bb_info *bi;
5302 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5303 free_ipa_bb_info (bi);
5304 fbi.bb_infos.release ();
5305 free_dominance_info (CDI_DOMINATORS);
5306 (*ipcp_transformations)[node->uid].agg_values = NULL;
5307 (*ipcp_transformations)[node->uid].alignments = NULL;
5308 descriptors.release ();
5309
5310 if (!something_changed)
5311 return 0;
5312 else if (cfg_changed)
5313 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5314 else
5315 return TODO_update_ssa_only_virtuals;
5316 }