re PR tree-optimization/43833 (false warning: array subscript is above array bounds...
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "expr.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "diagnostic.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
92
93 /* Enumeration of all aggregate reductions we can do. */
94 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
95 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
96 SRA_MODE_INTRA }; /* late intraprocedural SRA */
97
98 /* Global variable describing which aggregate reduction we are performing at
99 the moment. */
100 static enum sra_mode sra_mode;
101
102 struct assign_link;
103
104 /* ACCESS represents each access to an aggregate variable (as a whole or a
105 part). It can also represent a group of accesses that refer to exactly the
106 same fragment of an aggregate (i.e. those that have exactly the same offset
107 and size). Such representatives for a single aggregate, once determined,
108 are linked in a linked list and have the group fields set.
109
110 Moreover, when doing intraprocedural SRA, a tree is built from those
111 representatives (by the means of first_child and next_sibling pointers), in
112 which all items in a subtree are "within" the root, i.e. their offset is
113 greater or equal to offset of the root and offset+size is smaller or equal
114 to offset+size of the root. Children of an access are sorted by offset.
115
116 Note that accesses to parts of vector and complex number types always
117 represented by an access to the whole complex number or a vector. It is a
118 duty of the modifying functions to replace them appropriately. */
119
120 struct access
121 {
122 /* Values returned by `get_ref_base_and_extent' for each component reference
123 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
124 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
125 HOST_WIDE_INT offset;
126 HOST_WIDE_INT size;
127 tree base;
128
129 /* Expression. It is context dependent so do not use it to create new
130 expressions to access the original aggregate. See PR 42154 for a
131 testcase. */
132 tree expr;
133 /* Type. */
134 tree type;
135
136 /* The statement this access belongs to. */
137 gimple stmt;
138
139 /* Next group representative for this aggregate. */
140 struct access *next_grp;
141
142 /* Pointer to the group representative. Pointer to itself if the struct is
143 the representative. */
144 struct access *group_representative;
145
146 /* If this access has any children (in terms of the definition above), this
147 points to the first one. */
148 struct access *first_child;
149
150 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
151 described above. In IPA-SRA this is a pointer to the next access
152 belonging to the same group (having the same representative). */
153 struct access *next_sibling;
154
155 /* Pointers to the first and last element in the linked list of assign
156 links. */
157 struct assign_link *first_link, *last_link;
158
159 /* Pointer to the next access in the work queue. */
160 struct access *next_queued;
161
162 /* Replacement variable for this access "region." Never to be accessed
163 directly, always only by the means of get_access_replacement() and only
164 when grp_to_be_replaced flag is set. */
165 tree replacement_decl;
166
167 /* Is this particular access write access? */
168 unsigned write : 1;
169
170 /* Is this access an artificial one created to scalarize some record
171 entirely? */
172 unsigned total_scalarization : 1;
173
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
176
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
180
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
184
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
188
189 /* Other passes of the analysis use this bit to make function
190 analyze_access_subtree create scalar replacements for this group if
191 possible. */
192 unsigned grp_hint : 1;
193
194 /* Is the subtree rooted in this access fully covered by scalar
195 replacements? */
196 unsigned grp_covered : 1;
197
198 /* If set to true, this access and all below it in an access tree must not be
199 scalarized. */
200 unsigned grp_unscalarizable_region : 1;
201
202 /* Whether data have been written to parts of the aggregate covered by this
203 access which is not to be scalarized. This flag is propagated up in the
204 access tree. */
205 unsigned grp_unscalarized_data : 1;
206
207 /* Does this access and/or group contain a write access through a
208 BIT_FIELD_REF? */
209 unsigned grp_partial_lhs : 1;
210
211 /* Set when a scalar replacement should be created for this variable. We do
212 the decision and creation at different places because create_tmp_var
213 cannot be called from within FOR_EACH_REFERENCED_VAR. */
214 unsigned grp_to_be_replaced : 1;
215
216 /* Is it possible that the group refers to data which might be (directly or
217 otherwise) modified? */
218 unsigned grp_maybe_modified : 1;
219
220 /* Set when this is a representative of a pointer to scalar (i.e. by
221 reference) parameter which we consider for turning into a plain scalar
222 (i.e. a by value parameter). */
223 unsigned grp_scalar_ptr : 1;
224
225 /* Set when we discover that this pointer is not safe to dereference in the
226 caller. */
227 unsigned grp_not_necessarilly_dereferenced : 1;
228 };
229
230 typedef struct access *access_p;
231
232 DEF_VEC_P (access_p);
233 DEF_VEC_ALLOC_P (access_p, heap);
234
235 /* Alloc pool for allocating access structures. */
236 static alloc_pool access_pool;
237
238 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
239 are used to propagate subaccesses from rhs to lhs as long as they don't
240 conflict with what is already there. */
241 struct assign_link
242 {
243 struct access *lacc, *racc;
244 struct assign_link *next;
245 };
246
247 /* Alloc pool for allocating assign link structures. */
248 static alloc_pool link_pool;
249
250 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
251 static struct pointer_map_t *base_access_vec;
252
253 /* Bitmap of candidates. */
254 static bitmap candidate_bitmap;
255
256 /* Bitmap of candidates which we should try to entirely scalarize away and
257 those which cannot be (because they are and need be used as a whole). */
258 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
259
260 /* Obstack for creation of fancy names. */
261 static struct obstack name_obstack;
262
263 /* Head of a linked list of accesses that need to have its subaccesses
264 propagated to their assignment counterparts. */
265 static struct access *work_queue_head;
266
267 /* Number of parameters of the analyzed function when doing early ipa SRA. */
268 static int func_param_count;
269
270 /* scan_function sets the following to true if it encounters a call to
271 __builtin_apply_args. */
272 static bool encountered_apply_args;
273
274 /* Set by scan_function when it finds a recursive call. */
275 static bool encountered_recursive_call;
276
277 /* Set by scan_function when it finds a recursive call with less actual
278 arguments than formal parameters.. */
279 static bool encountered_unchangable_recursive_call;
280
281 /* This is a table in which for each basic block and parameter there is a
282 distance (offset + size) in that parameter which is dereferenced and
283 accessed in that BB. */
284 static HOST_WIDE_INT *bb_dereferences;
285 /* Bitmap of BBs that can cause the function to "stop" progressing by
286 returning, throwing externally, looping infinitely or calling a function
287 which might abort etc.. */
288 static bitmap final_bbs;
289
290 /* Representative of no accesses at all. */
291 static struct access no_accesses_representant;
292
293 /* Predicate to test the special value. */
294
295 static inline bool
296 no_accesses_p (struct access *access)
297 {
298 return access == &no_accesses_representant;
299 }
300
301 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
302 representative fields are dumped, otherwise those which only describe the
303 individual access are. */
304
305 static struct
306 {
307 /* Number of processed aggregates is readily available in
308 analyze_all_variable_accesses and so is not stored here. */
309
310 /* Number of created scalar replacements. */
311 int replacements;
312
313 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
314 expression. */
315 int exprs;
316
317 /* Number of statements created by generate_subtree_copies. */
318 int subtree_copies;
319
320 /* Number of statements created by load_assign_lhs_subreplacements. */
321 int subreplacements;
322
323 /* Number of times sra_modify_assign has deleted a statement. */
324 int deleted;
325
326 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
327 RHS reparately due to type conversions or nonexistent matching
328 references. */
329 int separate_lhs_rhs_handling;
330
331 /* Number of parameters that were removed because they were unused. */
332 int deleted_unused_parameters;
333
334 /* Number of scalars passed as parameters by reference that have been
335 converted to be passed by value. */
336 int scalar_by_ref_to_by_val;
337
338 /* Number of aggregate parameters that were replaced by one or more of their
339 components. */
340 int aggregate_params_reduced;
341
342 /* Numbber of components created when splitting aggregate parameters. */
343 int param_reductions_created;
344 } sra_stats;
345
346 static void
347 dump_access (FILE *f, struct access *access, bool grp)
348 {
349 fprintf (f, "access { ");
350 fprintf (f, "base = (%d)'", DECL_UID (access->base));
351 print_generic_expr (f, access->base, 0);
352 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
353 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
354 fprintf (f, ", expr = ");
355 print_generic_expr (f, access->expr, 0);
356 fprintf (f, ", type = ");
357 print_generic_expr (f, access->type, 0);
358 if (grp)
359 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
360 "grp_read = %d, grp_hint = %d, "
361 "grp_covered = %d, grp_unscalarizable_region = %d, "
362 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
363 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
364 "grp_not_necessarilly_dereferenced = %d\n",
365 access->grp_write, access->total_scalarization,
366 access->grp_read, access->grp_hint,
367 access->grp_covered, access->grp_unscalarizable_region,
368 access->grp_unscalarized_data, access->grp_partial_lhs,
369 access->grp_to_be_replaced, access->grp_maybe_modified,
370 access->grp_not_necessarilly_dereferenced);
371 else
372 fprintf (f, ", write = %d, total_scalarization = %d, "
373 "grp_partial_lhs = %d\n",
374 access->write, access->total_scalarization,
375 access->grp_partial_lhs);
376 }
377
378 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
379
380 static void
381 dump_access_tree_1 (FILE *f, struct access *access, int level)
382 {
383 do
384 {
385 int i;
386
387 for (i = 0; i < level; i++)
388 fputs ("* ", dump_file);
389
390 dump_access (f, access, true);
391
392 if (access->first_child)
393 dump_access_tree_1 (f, access->first_child, level + 1);
394
395 access = access->next_sibling;
396 }
397 while (access);
398 }
399
400 /* Dump all access trees for a variable, given the pointer to the first root in
401 ACCESS. */
402
403 static void
404 dump_access_tree (FILE *f, struct access *access)
405 {
406 for (; access; access = access->next_grp)
407 dump_access_tree_1 (f, access, 0);
408 }
409
410 /* Return true iff ACC is non-NULL and has subaccesses. */
411
412 static inline bool
413 access_has_children_p (struct access *acc)
414 {
415 return acc && acc->first_child;
416 }
417
418 /* Return a vector of pointers to accesses for the variable given in BASE or
419 NULL if there is none. */
420
421 static VEC (access_p, heap) *
422 get_base_access_vector (tree base)
423 {
424 void **slot;
425
426 slot = pointer_map_contains (base_access_vec, base);
427 if (!slot)
428 return NULL;
429 else
430 return *(VEC (access_p, heap) **) slot;
431 }
432
433 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
434 in ACCESS. Return NULL if it cannot be found. */
435
436 static struct access *
437 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
438 HOST_WIDE_INT size)
439 {
440 while (access && (access->offset != offset || access->size != size))
441 {
442 struct access *child = access->first_child;
443
444 while (child && (child->offset + child->size <= offset))
445 child = child->next_sibling;
446 access = child;
447 }
448
449 return access;
450 }
451
452 /* Return the first group representative for DECL or NULL if none exists. */
453
454 static struct access *
455 get_first_repr_for_decl (tree base)
456 {
457 VEC (access_p, heap) *access_vec;
458
459 access_vec = get_base_access_vector (base);
460 if (!access_vec)
461 return NULL;
462
463 return VEC_index (access_p, access_vec, 0);
464 }
465
466 /* Find an access representative for the variable BASE and given OFFSET and
467 SIZE. Requires that access trees have already been built. Return NULL if
468 it cannot be found. */
469
470 static struct access *
471 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
472 HOST_WIDE_INT size)
473 {
474 struct access *access;
475
476 access = get_first_repr_for_decl (base);
477 while (access && (access->offset + access->size <= offset))
478 access = access->next_grp;
479 if (!access)
480 return NULL;
481
482 return find_access_in_subtree (access, offset, size);
483 }
484
485 /* Add LINK to the linked list of assign links of RACC. */
486 static void
487 add_link_to_rhs (struct access *racc, struct assign_link *link)
488 {
489 gcc_assert (link->racc == racc);
490
491 if (!racc->first_link)
492 {
493 gcc_assert (!racc->last_link);
494 racc->first_link = link;
495 }
496 else
497 racc->last_link->next = link;
498
499 racc->last_link = link;
500 link->next = NULL;
501 }
502
503 /* Move all link structures in their linked list in OLD_RACC to the linked list
504 in NEW_RACC. */
505 static void
506 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
507 {
508 if (!old_racc->first_link)
509 {
510 gcc_assert (!old_racc->last_link);
511 return;
512 }
513
514 if (new_racc->first_link)
515 {
516 gcc_assert (!new_racc->last_link->next);
517 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
518
519 new_racc->last_link->next = old_racc->first_link;
520 new_racc->last_link = old_racc->last_link;
521 }
522 else
523 {
524 gcc_assert (!new_racc->last_link);
525
526 new_racc->first_link = old_racc->first_link;
527 new_racc->last_link = old_racc->last_link;
528 }
529 old_racc->first_link = old_racc->last_link = NULL;
530 }
531
532 /* Add ACCESS to the work queue (which is actually a stack). */
533
534 static void
535 add_access_to_work_queue (struct access *access)
536 {
537 if (!access->grp_queued)
538 {
539 gcc_assert (!access->next_queued);
540 access->next_queued = work_queue_head;
541 access->grp_queued = 1;
542 work_queue_head = access;
543 }
544 }
545
546 /* Pop an access from the work queue, and return it, assuming there is one. */
547
548 static struct access *
549 pop_access_from_work_queue (void)
550 {
551 struct access *access = work_queue_head;
552
553 work_queue_head = access->next_queued;
554 access->next_queued = NULL;
555 access->grp_queued = 0;
556 return access;
557 }
558
559
560 /* Allocate necessary structures. */
561
562 static void
563 sra_initialize (void)
564 {
565 candidate_bitmap = BITMAP_ALLOC (NULL);
566 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
567 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
568 gcc_obstack_init (&name_obstack);
569 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
570 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
571 base_access_vec = pointer_map_create ();
572 memset (&sra_stats, 0, sizeof (sra_stats));
573 encountered_apply_args = false;
574 encountered_recursive_call = false;
575 encountered_unchangable_recursive_call = false;
576 }
577
578 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
579
580 static bool
581 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
582 void *data ATTRIBUTE_UNUSED)
583 {
584 VEC (access_p, heap) *access_vec;
585 access_vec = (VEC (access_p, heap) *) *value;
586 VEC_free (access_p, heap, access_vec);
587
588 return true;
589 }
590
591 /* Deallocate all general structures. */
592
593 static void
594 sra_deinitialize (void)
595 {
596 BITMAP_FREE (candidate_bitmap);
597 BITMAP_FREE (should_scalarize_away_bitmap);
598 BITMAP_FREE (cannot_scalarize_away_bitmap);
599 free_alloc_pool (access_pool);
600 free_alloc_pool (link_pool);
601 obstack_free (&name_obstack, NULL);
602
603 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
604 pointer_map_destroy (base_access_vec);
605 }
606
607 /* Remove DECL from candidates for SRA and write REASON to the dump file if
608 there is one. */
609 static void
610 disqualify_candidate (tree decl, const char *reason)
611 {
612 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
613
614 if (dump_file && (dump_flags & TDF_DETAILS))
615 {
616 fprintf (dump_file, "! Disqualifying ");
617 print_generic_expr (dump_file, decl, 0);
618 fprintf (dump_file, " - %s\n", reason);
619 }
620 }
621
622 /* Return true iff the type contains a field or an element which does not allow
623 scalarization. */
624
625 static bool
626 type_internals_preclude_sra_p (tree type)
627 {
628 tree fld;
629 tree et;
630
631 switch (TREE_CODE (type))
632 {
633 case RECORD_TYPE:
634 case UNION_TYPE:
635 case QUAL_UNION_TYPE:
636 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
637 if (TREE_CODE (fld) == FIELD_DECL)
638 {
639 tree ft = TREE_TYPE (fld);
640
641 if (TREE_THIS_VOLATILE (fld)
642 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
643 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
644 || !host_integerp (DECL_SIZE (fld), 1))
645 return true;
646
647 if (AGGREGATE_TYPE_P (ft)
648 && type_internals_preclude_sra_p (ft))
649 return true;
650 }
651
652 return false;
653
654 case ARRAY_TYPE:
655 et = TREE_TYPE (type);
656
657 if (AGGREGATE_TYPE_P (et))
658 return type_internals_preclude_sra_p (et);
659 else
660 return false;
661
662 default:
663 return false;
664 }
665 }
666
667 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
668 base variable if it is. Return T if it is not an SSA_NAME. */
669
670 static tree
671 get_ssa_base_param (tree t)
672 {
673 if (TREE_CODE (t) == SSA_NAME)
674 {
675 if (SSA_NAME_IS_DEFAULT_DEF (t))
676 return SSA_NAME_VAR (t);
677 else
678 return NULL_TREE;
679 }
680 return t;
681 }
682
683 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
684 belongs to, unless the BB has already been marked as a potentially
685 final. */
686
687 static void
688 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
689 {
690 basic_block bb = gimple_bb (stmt);
691 int idx, parm_index = 0;
692 tree parm;
693
694 if (bitmap_bit_p (final_bbs, bb->index))
695 return;
696
697 for (parm = DECL_ARGUMENTS (current_function_decl);
698 parm && parm != base;
699 parm = TREE_CHAIN (parm))
700 parm_index++;
701
702 gcc_assert (parm_index < func_param_count);
703
704 idx = bb->index * func_param_count + parm_index;
705 if (bb_dereferences[idx] < dist)
706 bb_dereferences[idx] = dist;
707 }
708
709 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
710 the three fields. Also add it to the vector of accesses corresponding to
711 the base. Finally, return the new access. */
712
713 static struct access *
714 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
715 {
716 VEC (access_p, heap) *vec;
717 struct access *access;
718 void **slot;
719
720 access = (struct access *) pool_alloc (access_pool);
721 memset (access, 0, sizeof (struct access));
722 access->base = base;
723 access->offset = offset;
724 access->size = size;
725
726 slot = pointer_map_contains (base_access_vec, base);
727 if (slot)
728 vec = (VEC (access_p, heap) *) *slot;
729 else
730 vec = VEC_alloc (access_p, heap, 32);
731
732 VEC_safe_push (access_p, heap, vec, access);
733
734 *((struct VEC (access_p,heap) **)
735 pointer_map_insert (base_access_vec, base)) = vec;
736
737 return access;
738 }
739
740 /* Create and insert access for EXPR. Return created access, or NULL if it is
741 not possible. */
742
743 static struct access *
744 create_access (tree expr, gimple stmt, bool write)
745 {
746 struct access *access;
747 HOST_WIDE_INT offset, size, max_size;
748 tree base = expr;
749 bool ptr, unscalarizable_region = false;
750
751 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
752
753 if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
754 {
755 base = get_ssa_base_param (TREE_OPERAND (base, 0));
756 if (!base)
757 return NULL;
758 ptr = true;
759 }
760 else
761 ptr = false;
762
763 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
764 return NULL;
765
766 if (sra_mode == SRA_MODE_EARLY_IPA)
767 {
768 if (size < 0 || size != max_size)
769 {
770 disqualify_candidate (base, "Encountered a variable sized access.");
771 return NULL;
772 }
773 if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
774 {
775 disqualify_candidate (base,
776 "Encountered an acces not aligned to a byte.");
777 return NULL;
778 }
779
780 if (ptr)
781 mark_parm_dereference (base, offset + size, stmt);
782 }
783 else
784 {
785 if (size != max_size)
786 {
787 size = max_size;
788 unscalarizable_region = true;
789 }
790 if (size < 0)
791 {
792 disqualify_candidate (base, "Encountered an unconstrained access.");
793 return NULL;
794 }
795 }
796
797 access = create_access_1 (base, offset, size);
798 access->expr = expr;
799 access->type = TREE_TYPE (expr);
800 access->write = write;
801 access->grp_unscalarizable_region = unscalarizable_region;
802 access->stmt = stmt;
803
804 return access;
805 }
806
807
808 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
809 register types or (recursively) records with only these two kinds of fields.
810 It also returns false if any of these records has a zero-size field as its
811 last field. */
812
813 static bool
814 type_consists_of_records_p (tree type)
815 {
816 tree fld;
817 bool last_fld_has_zero_size = false;
818
819 if (TREE_CODE (type) != RECORD_TYPE)
820 return false;
821
822 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
823 if (TREE_CODE (fld) == FIELD_DECL)
824 {
825 tree ft = TREE_TYPE (fld);
826
827 if (!is_gimple_reg_type (ft)
828 && !type_consists_of_records_p (ft))
829 return false;
830
831 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
832 }
833
834 if (last_fld_has_zero_size)
835 return false;
836
837 return true;
838 }
839
840 /* Create total_scalarization accesses for all scalar type fields in DECL that
841 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
842 must be the top-most VAR_DECL representing the variable, OFFSET must be the
843 offset of DECL within BASE. */
844
845 static void
846 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
847 {
848 tree fld, decl_type = TREE_TYPE (decl);
849
850 for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
851 if (TREE_CODE (fld) == FIELD_DECL)
852 {
853 HOST_WIDE_INT pos = offset + int_bit_position (fld);
854 tree ft = TREE_TYPE (fld);
855
856 if (is_gimple_reg_type (ft))
857 {
858 struct access *access;
859 HOST_WIDE_INT size;
860 tree expr;
861 bool ok;
862
863 size = tree_low_cst (DECL_SIZE (fld), 1);
864 expr = base;
865 ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
866 ft, false);
867 gcc_assert (ok);
868
869 access = create_access_1 (base, pos, size);
870 access->expr = expr;
871 access->type = ft;
872 access->total_scalarization = 1;
873 /* Accesses for intraprocedural SRA can have their stmt NULL. */
874 }
875 else
876 completely_scalarize_record (base, fld, pos);
877 }
878 }
879
880
881 /* Search the given tree for a declaration by skipping handled components and
882 exclude it from the candidates. */
883
884 static void
885 disqualify_base_of_expr (tree t, const char *reason)
886 {
887 while (handled_component_p (t))
888 t = TREE_OPERAND (t, 0);
889
890 if (sra_mode == SRA_MODE_EARLY_IPA)
891 {
892 if (INDIRECT_REF_P (t))
893 t = TREE_OPERAND (t, 0);
894 t = get_ssa_base_param (t);
895 }
896
897 if (t && DECL_P (t))
898 disqualify_candidate (t, reason);
899 }
900
901 /* Scan expression EXPR and create access structures for all accesses to
902 candidates for scalarization. Return the created access or NULL if none is
903 created. */
904
905 static struct access *
906 build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
907 {
908 struct access *ret = NULL;
909 tree expr = *expr_ptr;
910 bool partial_ref;
911
912 if (TREE_CODE (expr) == BIT_FIELD_REF
913 || TREE_CODE (expr) == IMAGPART_EXPR
914 || TREE_CODE (expr) == REALPART_EXPR)
915 {
916 expr = TREE_OPERAND (expr, 0);
917 partial_ref = true;
918 }
919 else
920 partial_ref = false;
921
922 /* We need to dive through V_C_Es in order to get the size of its parameter
923 and not the result type. Ada produces such statements. We are also
924 capable of handling the topmost V_C_E but not any of those buried in other
925 handled components. */
926 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
927 expr = TREE_OPERAND (expr, 0);
928
929 if (contains_view_convert_expr_p (expr))
930 {
931 disqualify_base_of_expr (expr, "V_C_E under a different handled "
932 "component.");
933 return NULL;
934 }
935
936 switch (TREE_CODE (expr))
937 {
938 case INDIRECT_REF:
939 if (sra_mode != SRA_MODE_EARLY_IPA)
940 return NULL;
941 /* fall through */
942 case VAR_DECL:
943 case PARM_DECL:
944 case RESULT_DECL:
945 case COMPONENT_REF:
946 case ARRAY_REF:
947 case ARRAY_RANGE_REF:
948 ret = create_access (expr, stmt, write);
949 break;
950
951 default:
952 break;
953 }
954
955 if (write && partial_ref && ret)
956 ret->grp_partial_lhs = 1;
957
958 return ret;
959 }
960
961 /* Callback of scan_function. Scan expression EXPR and create access
962 structures for all accesses to candidates for scalarization. Return true if
963 any access has been inserted. */
964
965 static bool
966 build_access_from_expr (tree *expr_ptr,
967 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
968 void *data ATTRIBUTE_UNUSED)
969 {
970 struct access *access;
971
972 access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
973 if (access)
974 {
975 /* This means the aggregate is accesses as a whole in a way other than an
976 assign statement and thus cannot be removed even if we had a scalar
977 replacement for everything. */
978 if (cannot_scalarize_away_bitmap)
979 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
980 return true;
981 }
982 return false;
983 }
984
985 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
986 modes in which it matters, return true iff they have been disqualified. RHS
987 may be NULL, in that case ignore it. If we scalarize an aggregate in
988 intra-SRA we may need to add statements after each statement. This is not
989 possible if a statement unconditionally has to end the basic block. */
990 static bool
991 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
992 {
993 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
994 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
995 {
996 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
997 if (rhs)
998 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
999 return true;
1000 }
1001 return false;
1002 }
1003
1004
1005 /* Result code for scan_assign callback for scan_function. */
1006 enum scan_assign_result { SRA_SA_NONE, /* nothing done for the stmt */
1007 SRA_SA_PROCESSED, /* stmt analyzed/changed */
1008 SRA_SA_REMOVED }; /* stmt redundant and eliminated */
1009
1010
1011 /* Callback of scan_function. Scan expressions occuring in the statement
1012 pointed to by STMT_EXPR, create access structures for all accesses to
1013 candidates for scalarization and remove those candidates which occur in
1014 statements or expressions that prevent them from being split apart. Return
1015 true if any access has been inserted. */
1016
1017 static enum scan_assign_result
1018 build_accesses_from_assign (gimple *stmt_ptr,
1019 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1020 void *data ATTRIBUTE_UNUSED)
1021 {
1022 gimple stmt = *stmt_ptr;
1023 tree *lhs_ptr, *rhs_ptr;
1024 struct access *lacc, *racc;
1025
1026 if (!gimple_assign_single_p (stmt))
1027 return SRA_SA_NONE;
1028
1029 lhs_ptr = gimple_assign_lhs_ptr (stmt);
1030 rhs_ptr = gimple_assign_rhs1_ptr (stmt);
1031
1032 if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
1033 return SRA_SA_NONE;
1034
1035 racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
1036 lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
1037
1038 if (racc)
1039 {
1040 racc->grp_assignment_read = 1;
1041 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1042 && !is_gimple_reg_type (racc->type))
1043 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1044 }
1045
1046 if (lacc && racc
1047 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1048 && !lacc->grp_unscalarizable_region
1049 && !racc->grp_unscalarizable_region
1050 && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
1051 /* FIXME: Turn the following line into an assert after PR 40058 is
1052 fixed. */
1053 && lacc->size == racc->size
1054 && useless_type_conversion_p (lacc->type, racc->type))
1055 {
1056 struct assign_link *link;
1057
1058 link = (struct assign_link *) pool_alloc (link_pool);
1059 memset (link, 0, sizeof (struct assign_link));
1060
1061 link->lacc = lacc;
1062 link->racc = racc;
1063
1064 add_link_to_rhs (racc, link);
1065 }
1066
1067 return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
1068 }
1069
1070 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1071 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1072
1073 static bool
1074 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1075 void *data ATTRIBUTE_UNUSED)
1076 {
1077 op = get_base_address (op);
1078 if (op
1079 && DECL_P (op))
1080 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1081
1082 return false;
1083 }
1084
1085 /* Return true iff callsite CALL has at least as many actual arguments as there
1086 are formal parameters of the function currently processed by IPA-SRA. */
1087
1088 static inline bool
1089 callsite_has_enough_arguments_p (gimple call)
1090 {
1091 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1092 }
1093
1094 /* Scan function and look for interesting statements. Return true if any has
1095 been found or processed, as indicated by callbacks. SCAN_EXPR is a callback
1096 called on all expressions within statements except assign statements and
1097 those deemed entirely unsuitable for some reason (all operands in such
1098 statements and expression are removed from candidate_bitmap). SCAN_ASSIGN
1099 is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
1100 called on assign statements and those call statements which have a lhs, it
1101 can be NULL. ANALYSIS_STAGE is true when running in the analysis stage of a
1102 pass and thus no statement is being modified. DATA is a pointer passed to
1103 all callbacks. If any single callback returns true, this function also
1104 returns true, otherwise it returns false. */
1105
1106 static bool
1107 scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
1108 enum scan_assign_result (*scan_assign) (gimple *,
1109 gimple_stmt_iterator *,
1110 void *),
1111 bool (*handle_ssa_defs)(gimple, void *),
1112 bool analysis_stage, void *data)
1113 {
1114 gimple_stmt_iterator gsi;
1115 basic_block bb;
1116 unsigned i;
1117 tree *t;
1118 bool ret = false;
1119
1120 FOR_EACH_BB (bb)
1121 {
1122 bool bb_changed = false;
1123
1124 if (handle_ssa_defs)
1125 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1126 ret |= handle_ssa_defs (gsi_stmt (gsi), data);
1127
1128 gsi = gsi_start_bb (bb);
1129 while (!gsi_end_p (gsi))
1130 {
1131 gimple stmt = gsi_stmt (gsi);
1132 enum scan_assign_result assign_result;
1133 bool any = false, deleted = false;
1134
1135 if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
1136 bitmap_set_bit (final_bbs, bb->index);
1137 switch (gimple_code (stmt))
1138 {
1139 case GIMPLE_RETURN:
1140 t = gimple_return_retval_ptr (stmt);
1141 if (*t != NULL_TREE)
1142 any |= scan_expr (t, &gsi, false, data);
1143 if (analysis_stage && final_bbs)
1144 bitmap_set_bit (final_bbs, bb->index);
1145 break;
1146
1147 case GIMPLE_ASSIGN:
1148 assign_result = scan_assign (&stmt, &gsi, data);
1149 any |= assign_result == SRA_SA_PROCESSED;
1150 deleted = assign_result == SRA_SA_REMOVED;
1151 if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1152 any |= handle_ssa_defs (stmt, data);
1153 break;
1154
1155 case GIMPLE_CALL:
1156 /* Operands must be processed before the lhs. */
1157 for (i = 0; i < gimple_call_num_args (stmt); i++)
1158 {
1159 tree *argp = gimple_call_arg_ptr (stmt, i);
1160 any |= scan_expr (argp, &gsi, false, data);
1161 }
1162
1163 if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
1164 {
1165 tree dest = gimple_call_fndecl (stmt);
1166 int flags = gimple_call_flags (stmt);
1167
1168 if (dest)
1169 {
1170 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1171 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1172 encountered_apply_args = true;
1173 if (cgraph_get_node (dest)
1174 == cgraph_get_node (current_function_decl))
1175 {
1176 encountered_recursive_call = true;
1177 if (!callsite_has_enough_arguments_p (stmt))
1178 encountered_unchangable_recursive_call = true;
1179 }
1180 }
1181
1182 if (final_bbs
1183 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1184 bitmap_set_bit (final_bbs, bb->index);
1185 }
1186
1187 if (gimple_call_lhs (stmt))
1188 {
1189 tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1190 if (!analysis_stage
1191 || !disqualify_ops_if_throwing_stmt (stmt,
1192 *lhs_ptr, NULL))
1193 {
1194 any |= scan_expr (lhs_ptr, &gsi, true, data);
1195 if (handle_ssa_defs)
1196 any |= handle_ssa_defs (stmt, data);
1197 }
1198 }
1199 break;
1200
1201 case GIMPLE_ASM:
1202 if (analysis_stage)
1203 {
1204 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1205 asm_visit_addr);
1206 if (final_bbs)
1207 bitmap_set_bit (final_bbs, bb->index);
1208 }
1209 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1210 {
1211 tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1212 any |= scan_expr (op, &gsi, false, data);
1213 }
1214 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1215 {
1216 tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1217 any |= scan_expr (op, &gsi, true, data);
1218 }
1219 break;
1220
1221 default:
1222 break;
1223 }
1224
1225 if (any)
1226 {
1227 ret = true;
1228
1229 if (!analysis_stage)
1230 {
1231 bb_changed = true;
1232 update_stmt (stmt);
1233 maybe_clean_eh_stmt (stmt);
1234 }
1235 }
1236 if (deleted)
1237 bb_changed = true;
1238 else
1239 {
1240 gsi_next (&gsi);
1241 ret = true;
1242 }
1243 }
1244 if (!analysis_stage && bb_changed && sra_mode == SRA_MODE_EARLY_IPA)
1245 gimple_purge_dead_eh_edges (bb);
1246 }
1247
1248 return ret;
1249 }
1250
1251 /* Helper of QSORT function. There are pointers to accesses in the array. An
1252 access is considered smaller than another if it has smaller offset or if the
1253 offsets are the same but is size is bigger. */
1254
1255 static int
1256 compare_access_positions (const void *a, const void *b)
1257 {
1258 const access_p *fp1 = (const access_p *) a;
1259 const access_p *fp2 = (const access_p *) b;
1260 const access_p f1 = *fp1;
1261 const access_p f2 = *fp2;
1262
1263 if (f1->offset != f2->offset)
1264 return f1->offset < f2->offset ? -1 : 1;
1265
1266 if (f1->size == f2->size)
1267 {
1268 if (f1->type == f2->type)
1269 return 0;
1270 /* Put any non-aggregate type before any aggregate type. */
1271 else if (!is_gimple_reg_type (f1->type)
1272 && is_gimple_reg_type (f2->type))
1273 return 1;
1274 else if (is_gimple_reg_type (f1->type)
1275 && !is_gimple_reg_type (f2->type))
1276 return -1;
1277 /* Put any complex or vector type before any other scalar type. */
1278 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1279 && TREE_CODE (f1->type) != VECTOR_TYPE
1280 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1281 || TREE_CODE (f2->type) == VECTOR_TYPE))
1282 return 1;
1283 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1284 || TREE_CODE (f1->type) == VECTOR_TYPE)
1285 && TREE_CODE (f2->type) != COMPLEX_TYPE
1286 && TREE_CODE (f2->type) != VECTOR_TYPE)
1287 return -1;
1288 /* Put the integral type with the bigger precision first. */
1289 else if (INTEGRAL_TYPE_P (f1->type)
1290 && INTEGRAL_TYPE_P (f2->type))
1291 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1292 /* Put any integral type with non-full precision last. */
1293 else if (INTEGRAL_TYPE_P (f1->type)
1294 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1295 != TYPE_PRECISION (f1->type)))
1296 return 1;
1297 else if (INTEGRAL_TYPE_P (f2->type)
1298 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1299 != TYPE_PRECISION (f2->type)))
1300 return -1;
1301 /* Stabilize the sort. */
1302 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1303 }
1304
1305 /* We want the bigger accesses first, thus the opposite operator in the next
1306 line: */
1307 return f1->size > f2->size ? -1 : 1;
1308 }
1309
1310
1311 /* Append a name of the declaration to the name obstack. A helper function for
1312 make_fancy_name. */
1313
1314 static void
1315 make_fancy_decl_name (tree decl)
1316 {
1317 char buffer[32];
1318
1319 tree name = DECL_NAME (decl);
1320 if (name)
1321 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1322 IDENTIFIER_LENGTH (name));
1323 else
1324 {
1325 sprintf (buffer, "D%u", DECL_UID (decl));
1326 obstack_grow (&name_obstack, buffer, strlen (buffer));
1327 }
1328 }
1329
1330 /* Helper for make_fancy_name. */
1331
1332 static void
1333 make_fancy_name_1 (tree expr)
1334 {
1335 char buffer[32];
1336 tree index;
1337
1338 if (DECL_P (expr))
1339 {
1340 make_fancy_decl_name (expr);
1341 return;
1342 }
1343
1344 switch (TREE_CODE (expr))
1345 {
1346 case COMPONENT_REF:
1347 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1348 obstack_1grow (&name_obstack, '$');
1349 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1350 break;
1351
1352 case ARRAY_REF:
1353 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1354 obstack_1grow (&name_obstack, '$');
1355 /* Arrays with only one element may not have a constant as their
1356 index. */
1357 index = TREE_OPERAND (expr, 1);
1358 if (TREE_CODE (index) != INTEGER_CST)
1359 break;
1360 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1361 obstack_grow (&name_obstack, buffer, strlen (buffer));
1362
1363 break;
1364
1365 case BIT_FIELD_REF:
1366 case REALPART_EXPR:
1367 case IMAGPART_EXPR:
1368 gcc_unreachable (); /* we treat these as scalars. */
1369 break;
1370 default:
1371 break;
1372 }
1373 }
1374
1375 /* Create a human readable name for replacement variable of ACCESS. */
1376
1377 static char *
1378 make_fancy_name (tree expr)
1379 {
1380 make_fancy_name_1 (expr);
1381 obstack_1grow (&name_obstack, '\0');
1382 return XOBFINISH (&name_obstack, char *);
1383 }
1384
1385 /* Helper function for build_ref_for_offset. */
1386
1387 static bool
1388 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1389 tree exp_type)
1390 {
1391 while (1)
1392 {
1393 tree fld;
1394 tree tr_size, index, minidx;
1395 HOST_WIDE_INT el_size;
1396
1397 if (offset == 0 && exp_type
1398 && types_compatible_p (exp_type, type))
1399 return true;
1400
1401 switch (TREE_CODE (type))
1402 {
1403 case UNION_TYPE:
1404 case QUAL_UNION_TYPE:
1405 case RECORD_TYPE:
1406 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1407 {
1408 HOST_WIDE_INT pos, size;
1409 tree expr, *expr_ptr;
1410
1411 if (TREE_CODE (fld) != FIELD_DECL)
1412 continue;
1413
1414 pos = int_bit_position (fld);
1415 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1416 tr_size = DECL_SIZE (fld);
1417 if (!tr_size || !host_integerp (tr_size, 1))
1418 continue;
1419 size = tree_low_cst (tr_size, 1);
1420 if (size == 0)
1421 {
1422 if (pos != offset)
1423 continue;
1424 }
1425 else if (pos > offset || (pos + size) <= offset)
1426 continue;
1427
1428 if (res)
1429 {
1430 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1431 NULL_TREE);
1432 expr_ptr = &expr;
1433 }
1434 else
1435 expr_ptr = NULL;
1436 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1437 offset - pos, exp_type))
1438 {
1439 if (res)
1440 *res = expr;
1441 return true;
1442 }
1443 }
1444 return false;
1445
1446 case ARRAY_TYPE:
1447 tr_size = TYPE_SIZE (TREE_TYPE (type));
1448 if (!tr_size || !host_integerp (tr_size, 1))
1449 return false;
1450 el_size = tree_low_cst (tr_size, 1);
1451
1452 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1453 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1454 return false;
1455 if (res)
1456 {
1457 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1458 if (!integer_zerop (minidx))
1459 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1460 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1461 NULL_TREE, NULL_TREE);
1462 }
1463 offset = offset % el_size;
1464 type = TREE_TYPE (type);
1465 break;
1466
1467 default:
1468 if (offset != 0)
1469 return false;
1470
1471 if (exp_type)
1472 return false;
1473 else
1474 return true;
1475 }
1476 }
1477 }
1478
1479 /* Construct an expression that would reference a part of aggregate *EXPR of
1480 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1481 function only determines whether it can build such a reference without
1482 actually doing it, otherwise, the tree it points to is unshared first and
1483 then used as a base for furhter sub-references.
1484
1485 FIXME: Eventually this should be replaced with
1486 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1487 minor rewrite of fold_stmt.
1488 */
1489
1490 bool
1491 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1492 tree exp_type, bool allow_ptr)
1493 {
1494 location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1495
1496 if (expr)
1497 *expr = unshare_expr (*expr);
1498
1499 if (allow_ptr && POINTER_TYPE_P (type))
1500 {
1501 type = TREE_TYPE (type);
1502 if (expr)
1503 *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1504 }
1505
1506 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1507 }
1508
1509 /* Return true iff TYPE is stdarg va_list type. */
1510
1511 static inline bool
1512 is_va_list_type (tree type)
1513 {
1514 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1515 }
1516
1517 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1518 those with type which is suitable for scalarization. */
1519
1520 static bool
1521 find_var_candidates (void)
1522 {
1523 tree var, type;
1524 referenced_var_iterator rvi;
1525 bool ret = false;
1526
1527 FOR_EACH_REFERENCED_VAR (var, rvi)
1528 {
1529 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1530 continue;
1531 type = TREE_TYPE (var);
1532
1533 if (!AGGREGATE_TYPE_P (type)
1534 || needs_to_live_in_memory (var)
1535 || TREE_THIS_VOLATILE (var)
1536 || !COMPLETE_TYPE_P (type)
1537 || !host_integerp (TYPE_SIZE (type), 1)
1538 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1539 || type_internals_preclude_sra_p (type)
1540 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1541 we also want to schedule it rather late. Thus we ignore it in
1542 the early pass. */
1543 || (sra_mode == SRA_MODE_EARLY_INTRA
1544 && is_va_list_type (type)))
1545 continue;
1546
1547 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1548
1549 if (dump_file && (dump_flags & TDF_DETAILS))
1550 {
1551 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1552 print_generic_expr (dump_file, var, 0);
1553 fprintf (dump_file, "\n");
1554 }
1555 ret = true;
1556 }
1557
1558 return ret;
1559 }
1560
1561 /* Sort all accesses for the given variable, check for partial overlaps and
1562 return NULL if there are any. If there are none, pick a representative for
1563 each combination of offset and size and create a linked list out of them.
1564 Return the pointer to the first representative and make sure it is the first
1565 one in the vector of accesses. */
1566
1567 static struct access *
1568 sort_and_splice_var_accesses (tree var)
1569 {
1570 int i, j, access_count;
1571 struct access *res, **prev_acc_ptr = &res;
1572 VEC (access_p, heap) *access_vec;
1573 bool first = true;
1574 HOST_WIDE_INT low = -1, high = 0;
1575
1576 access_vec = get_base_access_vector (var);
1577 if (!access_vec)
1578 return NULL;
1579 access_count = VEC_length (access_p, access_vec);
1580
1581 /* Sort by <OFFSET, SIZE>. */
1582 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1583 compare_access_positions);
1584
1585 i = 0;
1586 while (i < access_count)
1587 {
1588 struct access *access = VEC_index (access_p, access_vec, i);
1589 bool grp_write = access->write;
1590 bool grp_read = !access->write;
1591 bool grp_assignment_read = access->grp_assignment_read;
1592 bool multiple_reads = false;
1593 bool total_scalarization = access->total_scalarization;
1594 bool grp_partial_lhs = access->grp_partial_lhs;
1595 bool first_scalar = is_gimple_reg_type (access->type);
1596 bool unscalarizable_region = access->grp_unscalarizable_region;
1597
1598 if (first || access->offset >= high)
1599 {
1600 first = false;
1601 low = access->offset;
1602 high = access->offset + access->size;
1603 }
1604 else if (access->offset > low && access->offset + access->size > high)
1605 return NULL;
1606 else
1607 gcc_assert (access->offset >= low
1608 && access->offset + access->size <= high);
1609
1610 j = i + 1;
1611 while (j < access_count)
1612 {
1613 struct access *ac2 = VEC_index (access_p, access_vec, j);
1614 if (ac2->offset != access->offset || ac2->size != access->size)
1615 break;
1616 if (ac2->write)
1617 grp_write = true;
1618 else
1619 {
1620 if (grp_read)
1621 multiple_reads = true;
1622 else
1623 grp_read = true;
1624 }
1625 grp_assignment_read |= ac2->grp_assignment_read;
1626 grp_partial_lhs |= ac2->grp_partial_lhs;
1627 unscalarizable_region |= ac2->grp_unscalarizable_region;
1628 total_scalarization |= ac2->total_scalarization;
1629 relink_to_new_repr (access, ac2);
1630
1631 /* If there are both aggregate-type and scalar-type accesses with
1632 this combination of size and offset, the comparison function
1633 should have put the scalars first. */
1634 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1635 ac2->group_representative = access;
1636 j++;
1637 }
1638
1639 i = j;
1640
1641 access->group_representative = access;
1642 access->grp_write = grp_write;
1643 access->grp_read = grp_read;
1644 access->grp_assignment_read = grp_assignment_read;
1645 access->grp_hint = multiple_reads || total_scalarization;
1646 access->grp_partial_lhs = grp_partial_lhs;
1647 access->grp_unscalarizable_region = unscalarizable_region;
1648 if (access->first_link)
1649 add_access_to_work_queue (access);
1650
1651 *prev_acc_ptr = access;
1652 prev_acc_ptr = &access->next_grp;
1653 }
1654
1655 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1656 return res;
1657 }
1658
1659 /* Create a variable for the given ACCESS which determines the type, name and a
1660 few other properties. Return the variable declaration and store it also to
1661 ACCESS->replacement. */
1662
1663 static tree
1664 create_access_replacement (struct access *access)
1665 {
1666 tree repl;
1667
1668 repl = create_tmp_var (access->type, "SR");
1669 get_var_ann (repl);
1670 add_referenced_var (repl);
1671 mark_sym_for_renaming (repl);
1672
1673 if (!access->grp_partial_lhs
1674 && (TREE_CODE (access->type) == COMPLEX_TYPE
1675 || TREE_CODE (access->type) == VECTOR_TYPE))
1676 DECL_GIMPLE_REG_P (repl) = 1;
1677
1678 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1679 DECL_ARTIFICIAL (repl) = 1;
1680 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1681
1682 if (DECL_NAME (access->base)
1683 && !DECL_IGNORED_P (access->base)
1684 && !DECL_ARTIFICIAL (access->base))
1685 {
1686 char *pretty_name = make_fancy_name (access->expr);
1687
1688 DECL_NAME (repl) = get_identifier (pretty_name);
1689 obstack_free (&name_obstack, pretty_name);
1690
1691 SET_DECL_DEBUG_EXPR (repl, access->expr);
1692 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1693 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1694 }
1695 else
1696 TREE_NO_WARNING (repl) = 1;
1697
1698 if (dump_file)
1699 {
1700 fprintf (dump_file, "Created a replacement for ");
1701 print_generic_expr (dump_file, access->base, 0);
1702 fprintf (dump_file, " offset: %u, size: %u: ",
1703 (unsigned) access->offset, (unsigned) access->size);
1704 print_generic_expr (dump_file, repl, 0);
1705 fprintf (dump_file, "\n");
1706 }
1707 sra_stats.replacements++;
1708
1709 return repl;
1710 }
1711
1712 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1713
1714 static inline tree
1715 get_access_replacement (struct access *access)
1716 {
1717 gcc_assert (access->grp_to_be_replaced);
1718
1719 if (!access->replacement_decl)
1720 access->replacement_decl = create_access_replacement (access);
1721 return access->replacement_decl;
1722 }
1723
1724 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1725 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1726 to it is not "within" the root. */
1727
1728 static void
1729 build_access_subtree (struct access **access)
1730 {
1731 struct access *root = *access, *last_child = NULL;
1732 HOST_WIDE_INT limit = root->offset + root->size;
1733
1734 *access = (*access)->next_grp;
1735 while (*access && (*access)->offset + (*access)->size <= limit)
1736 {
1737 if (!last_child)
1738 root->first_child = *access;
1739 else
1740 last_child->next_sibling = *access;
1741 last_child = *access;
1742
1743 build_access_subtree (access);
1744 }
1745 }
1746
1747 /* Build a tree of access representatives, ACCESS is the pointer to the first
1748 one, others are linked in a list by the next_grp field. Decide about scalar
1749 replacements on the way, return true iff any are to be created. */
1750
1751 static void
1752 build_access_trees (struct access *access)
1753 {
1754 while (access)
1755 {
1756 struct access *root = access;
1757
1758 build_access_subtree (&access);
1759 root->next_grp = access;
1760 }
1761 }
1762
1763 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1764 array. */
1765
1766 static bool
1767 expr_with_var_bounded_array_refs_p (tree expr)
1768 {
1769 while (handled_component_p (expr))
1770 {
1771 if (TREE_CODE (expr) == ARRAY_REF
1772 && !host_integerp (array_ref_low_bound (expr), 0))
1773 return true;
1774 expr = TREE_OPERAND (expr, 0);
1775 }
1776 return false;
1777 }
1778
1779 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1780
1781 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1782 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1783 sorts of access flags appropriately along the way, notably always set
1784 grp_read and grp_assign_read according to MARK_READ and grp_write when
1785 MARK_WRITE is true. */
1786
1787 static bool
1788 analyze_access_subtree (struct access *root, bool allow_replacements,
1789 enum mark_read_status mark_read, bool mark_write)
1790 {
1791 struct access *child;
1792 HOST_WIDE_INT limit = root->offset + root->size;
1793 HOST_WIDE_INT covered_to = root->offset;
1794 bool scalar = is_gimple_reg_type (root->type);
1795 bool hole = false, sth_created = false;
1796 bool direct_read = root->grp_read;
1797
1798 if (mark_read == SRA_MR_ASSIGN_READ)
1799 {
1800 root->grp_read = 1;
1801 root->grp_assignment_read = 1;
1802 }
1803 if (mark_read == SRA_MR_READ)
1804 root->grp_read = 1;
1805 else if (root->grp_assignment_read)
1806 mark_read = SRA_MR_ASSIGN_READ;
1807 else if (root->grp_read)
1808 mark_read = SRA_MR_READ;
1809
1810 if (mark_write)
1811 root->grp_write = true;
1812 else if (root->grp_write)
1813 mark_write = true;
1814
1815 if (root->grp_unscalarizable_region)
1816 allow_replacements = false;
1817
1818 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1819 allow_replacements = false;
1820
1821 for (child = root->first_child; child; child = child->next_sibling)
1822 {
1823 if (!hole && child->offset < covered_to)
1824 hole = true;
1825 else
1826 covered_to += child->size;
1827
1828 sth_created |= analyze_access_subtree (child, allow_replacements,
1829 mark_read, mark_write);
1830
1831 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1832 hole |= !child->grp_covered;
1833 }
1834
1835 if (allow_replacements && scalar && !root->first_child
1836 && (root->grp_hint
1837 || (root->grp_write && (direct_read || root->grp_assignment_read)))
1838 /* We must not ICE later on when trying to build an access to the
1839 original data within the aggregate even when it is impossible to do in
1840 a defined way like in the PR 42703 testcase. Therefore we check
1841 pre-emptively here that we will be able to do that. */
1842 && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1843 root->type, false))
1844 {
1845 if (dump_file && (dump_flags & TDF_DETAILS))
1846 {
1847 fprintf (dump_file, "Marking ");
1848 print_generic_expr (dump_file, root->base, 0);
1849 fprintf (dump_file, " offset: %u, size: %u: ",
1850 (unsigned) root->offset, (unsigned) root->size);
1851 fprintf (dump_file, " to be replaced.\n");
1852 }
1853
1854 root->grp_to_be_replaced = 1;
1855 sth_created = true;
1856 hole = false;
1857 }
1858 else if (covered_to < limit)
1859 hole = true;
1860
1861 if (sth_created && !hole)
1862 {
1863 root->grp_covered = 1;
1864 return true;
1865 }
1866 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1867 root->grp_unscalarized_data = 1; /* not covered and written to */
1868 if (sth_created)
1869 return true;
1870 return false;
1871 }
1872
1873 /* Analyze all access trees linked by next_grp by the means of
1874 analyze_access_subtree. */
1875 static bool
1876 analyze_access_trees (struct access *access)
1877 {
1878 bool ret = false;
1879
1880 while (access)
1881 {
1882 if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1883 ret = true;
1884 access = access->next_grp;
1885 }
1886
1887 return ret;
1888 }
1889
1890 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1891 SIZE would conflict with an already existing one. If exactly such a child
1892 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1893
1894 static bool
1895 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1896 HOST_WIDE_INT size, struct access **exact_match)
1897 {
1898 struct access *child;
1899
1900 for (child = lacc->first_child; child; child = child->next_sibling)
1901 {
1902 if (child->offset == norm_offset && child->size == size)
1903 {
1904 *exact_match = child;
1905 return true;
1906 }
1907
1908 if (child->offset < norm_offset + size
1909 && child->offset + child->size > norm_offset)
1910 return true;
1911 }
1912
1913 return false;
1914 }
1915
1916 /* Create a new child access of PARENT, with all properties just like MODEL
1917 except for its offset and with its grp_write false and grp_read true.
1918 Return the new access or NULL if it cannot be created. Note that this access
1919 is created long after all splicing and sorting, it's not located in any
1920 access vector and is automatically a representative of its group. */
1921
1922 static struct access *
1923 create_artificial_child_access (struct access *parent, struct access *model,
1924 HOST_WIDE_INT new_offset)
1925 {
1926 struct access *access;
1927 struct access **child;
1928 tree expr = parent->base;;
1929
1930 gcc_assert (!model->grp_unscalarizable_region);
1931
1932 if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1933 model->type, false))
1934 return NULL;
1935
1936 access = (struct access *) pool_alloc (access_pool);
1937 memset (access, 0, sizeof (struct access));
1938 access->base = parent->base;
1939 access->expr = expr;
1940 access->offset = new_offset;
1941 access->size = model->size;
1942 access->type = model->type;
1943 access->grp_write = true;
1944 access->grp_read = false;
1945
1946 child = &parent->first_child;
1947 while (*child && (*child)->offset < new_offset)
1948 child = &(*child)->next_sibling;
1949
1950 access->next_sibling = *child;
1951 *child = access;
1952
1953 return access;
1954 }
1955
1956
1957 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1958 true if any new subaccess was created. Additionally, if RACC is a scalar
1959 access but LACC is not, change the type of the latter, if possible. */
1960
1961 static bool
1962 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1963 {
1964 struct access *rchild;
1965 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1966 bool ret = false;
1967
1968 if (is_gimple_reg_type (lacc->type)
1969 || lacc->grp_unscalarizable_region
1970 || racc->grp_unscalarizable_region)
1971 return false;
1972
1973 if (!lacc->first_child && !racc->first_child
1974 && is_gimple_reg_type (racc->type))
1975 {
1976 tree t = lacc->base;
1977
1978 if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1979 false))
1980 {
1981 lacc->expr = t;
1982 lacc->type = racc->type;
1983 }
1984 return false;
1985 }
1986
1987 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1988 {
1989 struct access *new_acc = NULL;
1990 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1991
1992 if (rchild->grp_unscalarizable_region)
1993 continue;
1994
1995 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1996 &new_acc))
1997 {
1998 if (new_acc)
1999 {
2000 rchild->grp_hint = 1;
2001 new_acc->grp_hint |= new_acc->grp_read;
2002 if (rchild->first_child)
2003 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2004 }
2005 continue;
2006 }
2007
2008 /* If a (part of) a union field is on the RHS of an assignment, it can
2009 have sub-accesses which do not make sense on the LHS (PR 40351).
2010 Check that this is not the case. */
2011 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
2012 rchild->type, false))
2013 continue;
2014
2015 rchild->grp_hint = 1;
2016 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2017 if (new_acc)
2018 {
2019 ret = true;
2020 if (racc->first_child)
2021 propagate_subaccesses_across_link (new_acc, rchild);
2022 }
2023 }
2024
2025 return ret;
2026 }
2027
2028 /* Propagate all subaccesses across assignment links. */
2029
2030 static void
2031 propagate_all_subaccesses (void)
2032 {
2033 while (work_queue_head)
2034 {
2035 struct access *racc = pop_access_from_work_queue ();
2036 struct assign_link *link;
2037
2038 gcc_assert (racc->first_link);
2039
2040 for (link = racc->first_link; link; link = link->next)
2041 {
2042 struct access *lacc = link->lacc;
2043
2044 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2045 continue;
2046 lacc = lacc->group_representative;
2047 if (propagate_subaccesses_across_link (lacc, racc)
2048 && lacc->first_link)
2049 add_access_to_work_queue (lacc);
2050 }
2051 }
2052 }
2053
2054 /* Go through all accesses collected throughout the (intraprocedural) analysis
2055 stage, exclude overlapping ones, identify representatives and build trees
2056 out of them, making decisions about scalarization on the way. Return true
2057 iff there are any to-be-scalarized variables after this stage. */
2058
2059 static bool
2060 analyze_all_variable_accesses (void)
2061 {
2062 int res = 0;
2063 bitmap tmp = BITMAP_ALLOC (NULL);
2064 bitmap_iterator bi;
2065 unsigned i, max_total_scalarization_size;
2066
2067 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2068 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2069
2070 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2071 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2072 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2073 {
2074 tree var = referenced_var (i);
2075
2076 if (TREE_CODE (var) == VAR_DECL
2077 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2078 <= max_total_scalarization_size)
2079 && type_consists_of_records_p (TREE_TYPE (var)))
2080 {
2081 completely_scalarize_record (var, var, 0);
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2083 {
2084 fprintf (dump_file, "Will attempt to totally scalarize ");
2085 print_generic_expr (dump_file, var, 0);
2086 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2087 }
2088 }
2089 }
2090
2091 bitmap_copy (tmp, candidate_bitmap);
2092 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2093 {
2094 tree var = referenced_var (i);
2095 struct access *access;
2096
2097 access = sort_and_splice_var_accesses (var);
2098 if (access)
2099 build_access_trees (access);
2100 else
2101 disqualify_candidate (var,
2102 "No or inhibitingly overlapping accesses.");
2103 }
2104
2105 propagate_all_subaccesses ();
2106
2107 bitmap_copy (tmp, candidate_bitmap);
2108 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2109 {
2110 tree var = referenced_var (i);
2111 struct access *access = get_first_repr_for_decl (var);
2112
2113 if (analyze_access_trees (access))
2114 {
2115 res++;
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 {
2118 fprintf (dump_file, "\nAccess trees for ");
2119 print_generic_expr (dump_file, var, 0);
2120 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2121 dump_access_tree (dump_file, access);
2122 fprintf (dump_file, "\n");
2123 }
2124 }
2125 else
2126 disqualify_candidate (var, "No scalar replacements to be created.");
2127 }
2128
2129 BITMAP_FREE (tmp);
2130
2131 if (res)
2132 {
2133 statistics_counter_event (cfun, "Scalarized aggregates", res);
2134 return true;
2135 }
2136 else
2137 return false;
2138 }
2139
2140 /* Return true iff a reference statement into aggregate AGG can be built for
2141 every single to-be-replaced accesses that is a child of ACCESS, its sibling
2142 or a child of its sibling. TOP_OFFSET is the offset from the processed
2143 access subtree that has to be subtracted from offset of each access. */
2144
2145 static bool
2146 ref_expr_for_all_replacements_p (struct access *access, tree agg,
2147 HOST_WIDE_INT top_offset)
2148 {
2149 do
2150 {
2151 if (access->grp_to_be_replaced
2152 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2153 access->offset - top_offset,
2154 access->type, false))
2155 return false;
2156
2157 if (access->first_child
2158 && !ref_expr_for_all_replacements_p (access->first_child, agg,
2159 top_offset))
2160 return false;
2161
2162 access = access->next_sibling;
2163 }
2164 while (access);
2165
2166 return true;
2167 }
2168
2169 /* Generate statements copying scalar replacements of accesses within a subtree
2170 into or out of AGG. ACCESS is the first child of the root of the subtree to
2171 be processed. AGG is an aggregate type expression (can be a declaration but
2172 does not have to be, it can for example also be an indirect_ref).
2173 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2174 from offsets of individual accesses to get corresponding offsets for AGG.
2175 If CHUNK_SIZE is non-null, copy only replacements in the interval
2176 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
2177 statement iterator used to place the new statements. WRITE should be true
2178 when the statements should write from AGG to the replacement and false if
2179 vice versa. if INSERT_AFTER is true, new statements will be added after the
2180 current statement in GSI, they will be added before the statement
2181 otherwise. */
2182
2183 static void
2184 generate_subtree_copies (struct access *access, tree agg,
2185 HOST_WIDE_INT top_offset,
2186 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2187 gimple_stmt_iterator *gsi, bool write,
2188 bool insert_after)
2189 {
2190 do
2191 {
2192 tree expr = agg;
2193
2194 if (chunk_size && access->offset >= start_offset + chunk_size)
2195 return;
2196
2197 if (access->grp_to_be_replaced
2198 && (chunk_size == 0
2199 || access->offset + access->size > start_offset))
2200 {
2201 tree repl = get_access_replacement (access);
2202 bool ref_found;
2203 gimple stmt;
2204
2205 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2206 access->offset - top_offset,
2207 access->type, false);
2208 gcc_assert (ref_found);
2209
2210 if (write)
2211 {
2212 if (access->grp_partial_lhs)
2213 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2214 !insert_after,
2215 insert_after ? GSI_NEW_STMT
2216 : GSI_SAME_STMT);
2217 stmt = gimple_build_assign (repl, expr);
2218 }
2219 else
2220 {
2221 TREE_NO_WARNING (repl) = 1;
2222 if (access->grp_partial_lhs)
2223 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2224 !insert_after,
2225 insert_after ? GSI_NEW_STMT
2226 : GSI_SAME_STMT);
2227 stmt = gimple_build_assign (expr, repl);
2228 }
2229
2230 if (insert_after)
2231 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2232 else
2233 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2234 update_stmt (stmt);
2235 sra_stats.subtree_copies++;
2236 }
2237
2238 if (access->first_child)
2239 generate_subtree_copies (access->first_child, agg, top_offset,
2240 start_offset, chunk_size, gsi,
2241 write, insert_after);
2242
2243 access = access->next_sibling;
2244 }
2245 while (access);
2246 }
2247
2248 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2249 the root of the subtree to be processed. GSI is the statement iterator used
2250 for inserting statements which are added after the current statement if
2251 INSERT_AFTER is true or before it otherwise. */
2252
2253 static void
2254 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2255 bool insert_after)
2256
2257 {
2258 struct access *child;
2259
2260 if (access->grp_to_be_replaced)
2261 {
2262 gimple stmt;
2263
2264 stmt = gimple_build_assign (get_access_replacement (access),
2265 fold_convert (access->type,
2266 integer_zero_node));
2267 if (insert_after)
2268 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2269 else
2270 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2271 update_stmt (stmt);
2272 }
2273
2274 for (child = access->first_child; child; child = child->next_sibling)
2275 init_subtree_with_zero (child, gsi, insert_after);
2276 }
2277
2278 /* Search for an access representative for the given expression EXPR and
2279 return it or NULL if it cannot be found. */
2280
2281 static struct access *
2282 get_access_for_expr (tree expr)
2283 {
2284 HOST_WIDE_INT offset, size, max_size;
2285 tree base;
2286
2287 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2288 a different size than the size of its argument and we need the latter
2289 one. */
2290 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2291 expr = TREE_OPERAND (expr, 0);
2292
2293 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2294 if (max_size == -1 || !DECL_P (base))
2295 return NULL;
2296
2297 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2298 return NULL;
2299
2300 return get_var_base_offset_size_access (base, offset, max_size);
2301 }
2302
2303 /* Callback for scan_function. Replace the expression EXPR with a scalar
2304 replacement if there is one and generate other statements to do type
2305 conversion or subtree copying if necessary. GSI is used to place newly
2306 created statements, WRITE is true if the expression is being written to (it
2307 is on a LHS of a statement or output in an assembly statement). */
2308
2309 static bool
2310 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2311 void *data ATTRIBUTE_UNUSED)
2312 {
2313 struct access *access;
2314 tree type, bfr;
2315
2316 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2317 {
2318 bfr = *expr;
2319 expr = &TREE_OPERAND (*expr, 0);
2320 }
2321 else
2322 bfr = NULL_TREE;
2323
2324 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2325 expr = &TREE_OPERAND (*expr, 0);
2326 access = get_access_for_expr (*expr);
2327 if (!access)
2328 return false;
2329 type = TREE_TYPE (*expr);
2330
2331 if (access->grp_to_be_replaced)
2332 {
2333 tree repl = get_access_replacement (access);
2334 /* If we replace a non-register typed access simply use the original
2335 access expression to extract the scalar component afterwards.
2336 This happens if scalarizing a function return value or parameter
2337 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2338 gcc.c-torture/compile/20011217-1.c.
2339
2340 We also want to use this when accessing a complex or vector which can
2341 be accessed as a different type too, potentially creating a need for
2342 type conversion (see PR42196) and when scalarized unions are involved
2343 in assembler statements (see PR42398). */
2344 if (!useless_type_conversion_p (type, access->type))
2345 {
2346 tree ref = access->base;
2347 bool ok;
2348
2349 ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2350 access->offset, access->type, false);
2351 gcc_assert (ok);
2352
2353 if (write)
2354 {
2355 gimple stmt;
2356
2357 if (access->grp_partial_lhs)
2358 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2359 false, GSI_NEW_STMT);
2360 stmt = gimple_build_assign (repl, ref);
2361 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2362 }
2363 else
2364 {
2365 gimple stmt;
2366
2367 if (access->grp_partial_lhs)
2368 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2369 true, GSI_SAME_STMT);
2370 stmt = gimple_build_assign (ref, repl);
2371 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2372 }
2373 }
2374 else
2375 *expr = repl;
2376 sra_stats.exprs++;
2377 }
2378
2379 if (access->first_child)
2380 {
2381 HOST_WIDE_INT start_offset, chunk_size;
2382 if (bfr
2383 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2384 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2385 {
2386 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2387 start_offset = access->offset
2388 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2389 }
2390 else
2391 start_offset = chunk_size = 0;
2392
2393 generate_subtree_copies (access->first_child, access->base, 0,
2394 start_offset, chunk_size, gsi, write, write);
2395 }
2396 return true;
2397 }
2398
2399 /* Where scalar replacements of the RHS have been written to when a replacement
2400 of a LHS of an assigments cannot be direclty loaded from a replacement of
2401 the RHS. */
2402 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2403 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2404 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2405
2406 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2407 base aggregate if there are unscalarized data or directly to LHS
2408 otherwise. */
2409
2410 static enum unscalarized_data_handling
2411 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2412 gimple_stmt_iterator *gsi)
2413 {
2414 if (top_racc->grp_unscalarized_data)
2415 {
2416 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2417 gsi, false, false);
2418 return SRA_UDH_RIGHT;
2419 }
2420 else
2421 {
2422 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2423 0, 0, gsi, false, false);
2424 return SRA_UDH_LEFT;
2425 }
2426 }
2427
2428
2429 /* Try to generate statements to load all sub-replacements in an access
2430 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2431 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2432 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2433 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2434 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2435 the rhs top aggregate has already been refreshed by contents of its scalar
2436 reductions and is set to true if this function has to do it. */
2437
2438 static void
2439 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2440 HOST_WIDE_INT left_offset,
2441 HOST_WIDE_INT right_offset,
2442 gimple_stmt_iterator *old_gsi,
2443 gimple_stmt_iterator *new_gsi,
2444 enum unscalarized_data_handling *refreshed,
2445 tree lhs)
2446 {
2447 location_t loc = EXPR_LOCATION (lacc->expr);
2448 do
2449 {
2450 if (lacc->grp_to_be_replaced)
2451 {
2452 struct access *racc;
2453 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2454 gimple stmt;
2455 tree rhs;
2456
2457 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2458 if (racc && racc->grp_to_be_replaced)
2459 {
2460 rhs = get_access_replacement (racc);
2461 if (!useless_type_conversion_p (lacc->type, racc->type))
2462 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2463 }
2464 else
2465 {
2466 /* No suitable access on the right hand side, need to load from
2467 the aggregate. See if we have to update it first... */
2468 if (*refreshed == SRA_UDH_NONE)
2469 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2470 lhs, old_gsi);
2471
2472 if (*refreshed == SRA_UDH_LEFT)
2473 {
2474 bool repl_found;
2475
2476 rhs = lacc->base;
2477 repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2478 lacc->offset, lacc->type,
2479 false);
2480 gcc_assert (repl_found);
2481 }
2482 else
2483 {
2484 bool repl_found;
2485
2486 rhs = top_racc->base;
2487 repl_found = build_ref_for_offset (&rhs,
2488 TREE_TYPE (top_racc->base),
2489 offset, lacc->type, false);
2490 gcc_assert (repl_found);
2491 }
2492 }
2493
2494 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2495 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2496 update_stmt (stmt);
2497 sra_stats.subreplacements++;
2498 }
2499 else if (*refreshed == SRA_UDH_NONE
2500 && lacc->grp_read && !lacc->grp_covered)
2501 *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2502 old_gsi);
2503
2504 if (lacc->first_child)
2505 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2506 left_offset, right_offset,
2507 old_gsi, new_gsi, refreshed, lhs);
2508 lacc = lacc->next_sibling;
2509 }
2510 while (lacc);
2511 }
2512
2513 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2514 to the assignment and GSI is the statement iterator pointing at it. Returns
2515 the same values as sra_modify_assign. */
2516
2517 static enum scan_assign_result
2518 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2519 {
2520 tree lhs = gimple_assign_lhs (*stmt);
2521 struct access *acc;
2522
2523 acc = get_access_for_expr (lhs);
2524 if (!acc)
2525 return SRA_SA_NONE;
2526
2527 if (VEC_length (constructor_elt,
2528 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2529 {
2530 /* I have never seen this code path trigger but if it can happen the
2531 following should handle it gracefully. */
2532 if (access_has_children_p (acc))
2533 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2534 true, true);
2535 return SRA_SA_PROCESSED;
2536 }
2537
2538 if (acc->grp_covered)
2539 {
2540 init_subtree_with_zero (acc, gsi, false);
2541 unlink_stmt_vdef (*stmt);
2542 gsi_remove (gsi, true);
2543 return SRA_SA_REMOVED;
2544 }
2545 else
2546 {
2547 init_subtree_with_zero (acc, gsi, true);
2548 return SRA_SA_PROCESSED;
2549 }
2550 }
2551
2552 /* Create a new suitable default definition SSA_NAME and replace all uses of
2553 SSA with it. */
2554
2555 static void
2556 replace_uses_with_default_def_ssa_name (tree ssa)
2557 {
2558 tree repl, decl = SSA_NAME_VAR (ssa);
2559 if (TREE_CODE (decl) == PARM_DECL)
2560 {
2561 tree tmp = create_tmp_reg (TREE_TYPE (decl), "SR");
2562
2563 get_var_ann (tmp);
2564 add_referenced_var (tmp);
2565 repl = make_ssa_name (tmp, gimple_build_nop ());
2566 set_default_def (tmp, repl);
2567 }
2568 else
2569 {
2570 repl = gimple_default_def (cfun, decl);
2571 if (!repl)
2572 {
2573 repl = make_ssa_name (decl, gimple_build_nop ());
2574 set_default_def (decl, repl);
2575 }
2576 }
2577
2578 replace_uses_by (ssa, repl);
2579 }
2580
2581 /* Callback of scan_function to process assign statements. It examines both
2582 sides of the statement, replaces them with a scalare replacement if there is
2583 one and generating copying of replacements if scalarized aggregates have been
2584 used in the assignment. STMT is a pointer to the assign statement, GSI is
2585 used to hold generated statements for type conversions and subtree
2586 copying. */
2587
2588 static enum scan_assign_result
2589 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2590 void *data ATTRIBUTE_UNUSED)
2591 {
2592 struct access *lacc, *racc;
2593 tree lhs, rhs;
2594 bool modify_this_stmt = false;
2595 bool force_gimple_rhs = false;
2596 location_t loc = gimple_location (*stmt);
2597 gimple_stmt_iterator orig_gsi = *gsi;
2598
2599 if (!gimple_assign_single_p (*stmt))
2600 return SRA_SA_NONE;
2601 lhs = gimple_assign_lhs (*stmt);
2602 rhs = gimple_assign_rhs1 (*stmt);
2603
2604 if (TREE_CODE (rhs) == CONSTRUCTOR)
2605 return sra_modify_constructor_assign (stmt, gsi);
2606
2607 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2608 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2609 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2610 {
2611 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2612 gsi, false, data);
2613 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2614 gsi, true, data);
2615 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2616 }
2617
2618 lacc = get_access_for_expr (lhs);
2619 racc = get_access_for_expr (rhs);
2620 if (!lacc && !racc)
2621 return SRA_SA_NONE;
2622
2623 if (lacc && lacc->grp_to_be_replaced)
2624 {
2625 lhs = get_access_replacement (lacc);
2626 gimple_assign_set_lhs (*stmt, lhs);
2627 modify_this_stmt = true;
2628 if (lacc->grp_partial_lhs)
2629 force_gimple_rhs = true;
2630 sra_stats.exprs++;
2631 }
2632
2633 if (racc && racc->grp_to_be_replaced)
2634 {
2635 rhs = get_access_replacement (racc);
2636 modify_this_stmt = true;
2637 if (racc->grp_partial_lhs)
2638 force_gimple_rhs = true;
2639 sra_stats.exprs++;
2640 }
2641
2642 if (modify_this_stmt)
2643 {
2644 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2645 {
2646 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2647 ??? This should move to fold_stmt which we simply should
2648 call after building a VIEW_CONVERT_EXPR here. */
2649 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2650 && !access_has_children_p (lacc))
2651 {
2652 tree expr = lhs;
2653 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2654 TREE_TYPE (rhs), false))
2655 {
2656 lhs = expr;
2657 gimple_assign_set_lhs (*stmt, expr);
2658 }
2659 }
2660 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2661 && !access_has_children_p (racc))
2662 {
2663 tree expr = rhs;
2664 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2665 TREE_TYPE (lhs), false))
2666 rhs = expr;
2667 }
2668 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2669 {
2670 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2671 if (is_gimple_reg_type (TREE_TYPE (lhs))
2672 && TREE_CODE (lhs) != SSA_NAME)
2673 force_gimple_rhs = true;
2674 }
2675 }
2676 }
2677
2678 /* From this point on, the function deals with assignments in between
2679 aggregates when at least one has scalar reductions of some of its
2680 components. There are three possible scenarios: Both the LHS and RHS have
2681 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2682
2683 In the first case, we would like to load the LHS components from RHS
2684 components whenever possible. If that is not possible, we would like to
2685 read it directly from the RHS (after updating it by storing in it its own
2686 components). If there are some necessary unscalarized data in the LHS,
2687 those will be loaded by the original assignment too. If neither of these
2688 cases happen, the original statement can be removed. Most of this is done
2689 by load_assign_lhs_subreplacements.
2690
2691 In the second case, we would like to store all RHS scalarized components
2692 directly into LHS and if they cover the aggregate completely, remove the
2693 statement too. In the third case, we want the LHS components to be loaded
2694 directly from the RHS (DSE will remove the original statement if it
2695 becomes redundant).
2696
2697 This is a bit complex but manageable when types match and when unions do
2698 not cause confusion in a way that we cannot really load a component of LHS
2699 from the RHS or vice versa (the access representing this level can have
2700 subaccesses that are accessible only through a different union field at a
2701 higher level - different from the one used in the examined expression).
2702 Unions are fun.
2703
2704 Therefore, I specially handle a fourth case, happening when there is a
2705 specific type cast or it is impossible to locate a scalarized subaccess on
2706 the other side of the expression. If that happens, I simply "refresh" the
2707 RHS by storing in it is scalarized components leave the original statement
2708 there to do the copying and then load the scalar replacements of the LHS.
2709 This is what the first branch does. */
2710
2711 if (gimple_has_volatile_ops (*stmt)
2712 || contains_view_convert_expr_p (rhs)
2713 || contains_view_convert_expr_p (lhs)
2714 || (access_has_children_p (racc)
2715 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2716 || (access_has_children_p (lacc)
2717 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2718 {
2719 if (access_has_children_p (racc))
2720 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2721 gsi, false, false);
2722 if (access_has_children_p (lacc))
2723 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2724 gsi, true, true);
2725 sra_stats.separate_lhs_rhs_handling++;
2726 }
2727 else
2728 {
2729 if (access_has_children_p (lacc) && access_has_children_p (racc))
2730 {
2731 gimple_stmt_iterator orig_gsi = *gsi;
2732 enum unscalarized_data_handling refreshed;
2733
2734 if (lacc->grp_read && !lacc->grp_covered)
2735 refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2736 else
2737 refreshed = SRA_UDH_NONE;
2738
2739 load_assign_lhs_subreplacements (lacc->first_child, racc,
2740 lacc->offset, racc->offset,
2741 &orig_gsi, gsi, &refreshed, lhs);
2742 if (refreshed != SRA_UDH_RIGHT)
2743 {
2744 if (*stmt == gsi_stmt (*gsi))
2745 gsi_next (gsi);
2746
2747 unlink_stmt_vdef (*stmt);
2748 gsi_remove (&orig_gsi, true);
2749 sra_stats.deleted++;
2750 return SRA_SA_REMOVED;
2751 }
2752 }
2753 else
2754 {
2755 if (racc)
2756 {
2757 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2758 {
2759 if (racc->first_child)
2760 generate_subtree_copies (racc->first_child, lhs,
2761 racc->offset, 0, 0, gsi,
2762 false, false);
2763 gcc_assert (*stmt == gsi_stmt (*gsi));
2764 if (TREE_CODE (lhs) == SSA_NAME)
2765 replace_uses_with_default_def_ssa_name (lhs);
2766
2767 unlink_stmt_vdef (*stmt);
2768 gsi_remove (gsi, true);
2769 sra_stats.deleted++;
2770 return SRA_SA_REMOVED;
2771 }
2772 else if (racc->first_child)
2773 generate_subtree_copies (racc->first_child, lhs,
2774 racc->offset, 0, 0, gsi, false, true);
2775 }
2776 if (access_has_children_p (lacc))
2777 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2778 0, 0, gsi, true, true);
2779 }
2780 }
2781
2782 /* This gimplification must be done after generate_subtree_copies, lest we
2783 insert the subtree copies in the middle of the gimplified sequence. */
2784 if (force_gimple_rhs)
2785 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2786 true, GSI_SAME_STMT);
2787 if (gimple_assign_rhs1 (*stmt) != rhs)
2788 {
2789 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2790 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2791 }
2792
2793 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2794 }
2795
2796 /* Generate statements initializing scalar replacements of parts of function
2797 parameters. */
2798
2799 static void
2800 initialize_parameter_reductions (void)
2801 {
2802 gimple_stmt_iterator gsi;
2803 gimple_seq seq = NULL;
2804 tree parm;
2805
2806 for (parm = DECL_ARGUMENTS (current_function_decl);
2807 parm;
2808 parm = TREE_CHAIN (parm))
2809 {
2810 VEC (access_p, heap) *access_vec;
2811 struct access *access;
2812
2813 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2814 continue;
2815 access_vec = get_base_access_vector (parm);
2816 if (!access_vec)
2817 continue;
2818
2819 if (!seq)
2820 {
2821 seq = gimple_seq_alloc ();
2822 gsi = gsi_start (seq);
2823 }
2824
2825 for (access = VEC_index (access_p, access_vec, 0);
2826 access;
2827 access = access->next_grp)
2828 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2829 }
2830
2831 if (seq)
2832 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2833 }
2834
2835 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2836 it reveals there are components of some aggregates to be scalarized, it runs
2837 the required transformations. */
2838 static unsigned int
2839 perform_intra_sra (void)
2840 {
2841 int ret = 0;
2842 sra_initialize ();
2843
2844 if (!find_var_candidates ())
2845 goto out;
2846
2847 if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2848 true, NULL))
2849 goto out;
2850
2851 if (!analyze_all_variable_accesses ())
2852 goto out;
2853
2854 scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2855 initialize_parameter_reductions ();
2856
2857 statistics_counter_event (cfun, "Scalar replacements created",
2858 sra_stats.replacements);
2859 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2860 statistics_counter_event (cfun, "Subtree copy stmts",
2861 sra_stats.subtree_copies);
2862 statistics_counter_event (cfun, "Subreplacement stmts",
2863 sra_stats.subreplacements);
2864 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2865 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2866 sra_stats.separate_lhs_rhs_handling);
2867
2868 ret = TODO_update_ssa;
2869
2870 out:
2871 sra_deinitialize ();
2872 return ret;
2873 }
2874
2875 /* Perform early intraprocedural SRA. */
2876 static unsigned int
2877 early_intra_sra (void)
2878 {
2879 sra_mode = SRA_MODE_EARLY_INTRA;
2880 return perform_intra_sra ();
2881 }
2882
2883 /* Perform "late" intraprocedural SRA. */
2884 static unsigned int
2885 late_intra_sra (void)
2886 {
2887 sra_mode = SRA_MODE_INTRA;
2888 return perform_intra_sra ();
2889 }
2890
2891
2892 static bool
2893 gate_intra_sra (void)
2894 {
2895 return flag_tree_sra != 0;
2896 }
2897
2898
2899 struct gimple_opt_pass pass_sra_early =
2900 {
2901 {
2902 GIMPLE_PASS,
2903 "esra", /* name */
2904 gate_intra_sra, /* gate */
2905 early_intra_sra, /* execute */
2906 NULL, /* sub */
2907 NULL, /* next */
2908 0, /* static_pass_number */
2909 TV_TREE_SRA, /* tv_id */
2910 PROP_cfg | PROP_ssa, /* properties_required */
2911 0, /* properties_provided */
2912 0, /* properties_destroyed */
2913 0, /* todo_flags_start */
2914 TODO_dump_func
2915 | TODO_update_ssa
2916 | TODO_ggc_collect
2917 | TODO_verify_ssa /* todo_flags_finish */
2918 }
2919 };
2920
2921 struct gimple_opt_pass pass_sra =
2922 {
2923 {
2924 GIMPLE_PASS,
2925 "sra", /* name */
2926 gate_intra_sra, /* gate */
2927 late_intra_sra, /* execute */
2928 NULL, /* sub */
2929 NULL, /* next */
2930 0, /* static_pass_number */
2931 TV_TREE_SRA, /* tv_id */
2932 PROP_cfg | PROP_ssa, /* properties_required */
2933 0, /* properties_provided */
2934 0, /* properties_destroyed */
2935 TODO_update_address_taken, /* todo_flags_start */
2936 TODO_dump_func
2937 | TODO_update_ssa
2938 | TODO_ggc_collect
2939 | TODO_verify_ssa /* todo_flags_finish */
2940 }
2941 };
2942
2943
2944 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2945 parameter. */
2946
2947 static bool
2948 is_unused_scalar_param (tree parm)
2949 {
2950 tree name;
2951 return (is_gimple_reg (parm)
2952 && (!(name = gimple_default_def (cfun, parm))
2953 || has_zero_uses (name)));
2954 }
2955
2956 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2957 examine whether there are any direct or otherwise infeasible ones. If so,
2958 return true, otherwise return false. PARM must be a gimple register with a
2959 non-NULL default definition. */
2960
2961 static bool
2962 ptr_parm_has_direct_uses (tree parm)
2963 {
2964 imm_use_iterator ui;
2965 gimple stmt;
2966 tree name = gimple_default_def (cfun, parm);
2967 bool ret = false;
2968
2969 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2970 {
2971 int uses_ok = 0;
2972 use_operand_p use_p;
2973
2974 if (is_gimple_debug (stmt))
2975 continue;
2976
2977 /* Valid uses include dereferences on the lhs and the rhs. */
2978 if (gimple_has_lhs (stmt))
2979 {
2980 tree lhs = gimple_get_lhs (stmt);
2981 while (handled_component_p (lhs))
2982 lhs = TREE_OPERAND (lhs, 0);
2983 if (INDIRECT_REF_P (lhs)
2984 && TREE_OPERAND (lhs, 0) == name)
2985 uses_ok++;
2986 }
2987 if (gimple_assign_single_p (stmt))
2988 {
2989 tree rhs = gimple_assign_rhs1 (stmt);
2990 while (handled_component_p (rhs))
2991 rhs = TREE_OPERAND (rhs, 0);
2992 if (INDIRECT_REF_P (rhs)
2993 && TREE_OPERAND (rhs, 0) == name)
2994 uses_ok++;
2995 }
2996 else if (is_gimple_call (stmt))
2997 {
2998 unsigned i;
2999 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3000 {
3001 tree arg = gimple_call_arg (stmt, i);
3002 while (handled_component_p (arg))
3003 arg = TREE_OPERAND (arg, 0);
3004 if (INDIRECT_REF_P (arg)
3005 && TREE_OPERAND (arg, 0) == name)
3006 uses_ok++;
3007 }
3008 }
3009
3010 /* If the number of valid uses does not match the number of
3011 uses in this stmt there is an unhandled use. */
3012 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3013 --uses_ok;
3014
3015 if (uses_ok != 0)
3016 ret = true;
3017
3018 if (ret)
3019 BREAK_FROM_IMM_USE_STMT (ui);
3020 }
3021
3022 return ret;
3023 }
3024
3025 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3026 them in candidate_bitmap. Note that these do not necessarily include
3027 parameter which are unused and thus can be removed. Return true iff any
3028 such candidate has been found. */
3029
3030 static bool
3031 find_param_candidates (void)
3032 {
3033 tree parm;
3034 int count = 0;
3035 bool ret = false;
3036
3037 for (parm = DECL_ARGUMENTS (current_function_decl);
3038 parm;
3039 parm = TREE_CHAIN (parm))
3040 {
3041 tree type = TREE_TYPE (parm);
3042
3043 count++;
3044
3045 if (TREE_THIS_VOLATILE (parm)
3046 || TREE_ADDRESSABLE (parm)
3047 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3048 continue;
3049
3050 if (is_unused_scalar_param (parm))
3051 {
3052 ret = true;
3053 continue;
3054 }
3055
3056 if (POINTER_TYPE_P (type))
3057 {
3058 type = TREE_TYPE (type);
3059
3060 if (TREE_CODE (type) == FUNCTION_TYPE
3061 || TYPE_VOLATILE (type)
3062 || !is_gimple_reg (parm)
3063 || is_va_list_type (type)
3064 || ptr_parm_has_direct_uses (parm))
3065 continue;
3066 }
3067 else if (!AGGREGATE_TYPE_P (type))
3068 continue;
3069
3070 if (!COMPLETE_TYPE_P (type)
3071 || !host_integerp (TYPE_SIZE (type), 1)
3072 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3073 || (AGGREGATE_TYPE_P (type)
3074 && type_internals_preclude_sra_p (type)))
3075 continue;
3076
3077 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3078 ret = true;
3079 if (dump_file && (dump_flags & TDF_DETAILS))
3080 {
3081 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3082 print_generic_expr (dump_file, parm, 0);
3083 fprintf (dump_file, "\n");
3084 }
3085 }
3086
3087 func_param_count = count;
3088 return ret;
3089 }
3090
3091 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3092 maybe_modified. */
3093
3094 static bool
3095 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3096 void *data)
3097 {
3098 struct access *repr = (struct access *) data;
3099
3100 repr->grp_maybe_modified = 1;
3101 return true;
3102 }
3103
3104 /* Analyze what representatives (in linked lists accessible from
3105 REPRESENTATIVES) can be modified by side effects of statements in the
3106 current function. */
3107
3108 static void
3109 analyze_modified_params (VEC (access_p, heap) *representatives)
3110 {
3111 int i;
3112
3113 for (i = 0; i < func_param_count; i++)
3114 {
3115 struct access *repr;
3116
3117 for (repr = VEC_index (access_p, representatives, i);
3118 repr;
3119 repr = repr->next_grp)
3120 {
3121 struct access *access;
3122 bitmap visited;
3123 ao_ref ar;
3124
3125 if (no_accesses_p (repr))
3126 continue;
3127 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3128 || repr->grp_maybe_modified)
3129 continue;
3130
3131 ao_ref_init (&ar, repr->expr);
3132 visited = BITMAP_ALLOC (NULL);
3133 for (access = repr; access; access = access->next_sibling)
3134 {
3135 /* All accesses are read ones, otherwise grp_maybe_modified would
3136 be trivially set. */
3137 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3138 mark_maybe_modified, repr, &visited);
3139 if (repr->grp_maybe_modified)
3140 break;
3141 }
3142 BITMAP_FREE (visited);
3143 }
3144 }
3145 }
3146
3147 /* Propagate distances in bb_dereferences in the opposite direction than the
3148 control flow edges, in each step storing the maximum of the current value
3149 and the minimum of all successors. These steps are repeated until the table
3150 stabilizes. Note that BBs which might terminate the functions (according to
3151 final_bbs bitmap) never updated in this way. */
3152
3153 static void
3154 propagate_dereference_distances (void)
3155 {
3156 VEC (basic_block, heap) *queue;
3157 basic_block bb;
3158
3159 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3160 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3161 FOR_EACH_BB (bb)
3162 {
3163 VEC_quick_push (basic_block, queue, bb);
3164 bb->aux = bb;
3165 }
3166
3167 while (!VEC_empty (basic_block, queue))
3168 {
3169 edge_iterator ei;
3170 edge e;
3171 bool change = false;
3172 int i;
3173
3174 bb = VEC_pop (basic_block, queue);
3175 bb->aux = NULL;
3176
3177 if (bitmap_bit_p (final_bbs, bb->index))
3178 continue;
3179
3180 for (i = 0; i < func_param_count; i++)
3181 {
3182 int idx = bb->index * func_param_count + i;
3183 bool first = true;
3184 HOST_WIDE_INT inh = 0;
3185
3186 FOR_EACH_EDGE (e, ei, bb->succs)
3187 {
3188 int succ_idx = e->dest->index * func_param_count + i;
3189
3190 if (e->src == EXIT_BLOCK_PTR)
3191 continue;
3192
3193 if (first)
3194 {
3195 first = false;
3196 inh = bb_dereferences [succ_idx];
3197 }
3198 else if (bb_dereferences [succ_idx] < inh)
3199 inh = bb_dereferences [succ_idx];
3200 }
3201
3202 if (!first && bb_dereferences[idx] < inh)
3203 {
3204 bb_dereferences[idx] = inh;
3205 change = true;
3206 }
3207 }
3208
3209 if (change && !bitmap_bit_p (final_bbs, bb->index))
3210 FOR_EACH_EDGE (e, ei, bb->preds)
3211 {
3212 if (e->src->aux)
3213 continue;
3214
3215 e->src->aux = e->src;
3216 VEC_quick_push (basic_block, queue, e->src);
3217 }
3218 }
3219
3220 VEC_free (basic_block, heap, queue);
3221 }
3222
3223 /* Dump a dereferences TABLE with heading STR to file F. */
3224
3225 static void
3226 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3227 {
3228 basic_block bb;
3229
3230 fprintf (dump_file, str);
3231 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3232 {
3233 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3234 if (bb != EXIT_BLOCK_PTR)
3235 {
3236 int i;
3237 for (i = 0; i < func_param_count; i++)
3238 {
3239 int idx = bb->index * func_param_count + i;
3240 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3241 }
3242 }
3243 fprintf (f, "\n");
3244 }
3245 fprintf (dump_file, "\n");
3246 }
3247
3248 /* Determine what (parts of) parameters passed by reference that are not
3249 assigned to are not certainly dereferenced in this function and thus the
3250 dereferencing cannot be safely moved to the caller without potentially
3251 introducing a segfault. Mark such REPRESENTATIVES as
3252 grp_not_necessarilly_dereferenced.
3253
3254 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3255 part is calculated rather than simple booleans are calculated for each
3256 pointer parameter to handle cases when only a fraction of the whole
3257 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3258 an example).
3259
3260 The maximum dereference distances for each pointer parameter and BB are
3261 already stored in bb_dereference. This routine simply propagates these
3262 values upwards by propagate_dereference_distances and then compares the
3263 distances of individual parameters in the ENTRY BB to the equivalent
3264 distances of each representative of a (fraction of a) parameter. */
3265
3266 static void
3267 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3268 {
3269 int i;
3270
3271 if (dump_file && (dump_flags & TDF_DETAILS))
3272 dump_dereferences_table (dump_file,
3273 "Dereference table before propagation:\n",
3274 bb_dereferences);
3275
3276 propagate_dereference_distances ();
3277
3278 if (dump_file && (dump_flags & TDF_DETAILS))
3279 dump_dereferences_table (dump_file,
3280 "Dereference table after propagation:\n",
3281 bb_dereferences);
3282
3283 for (i = 0; i < func_param_count; i++)
3284 {
3285 struct access *repr = VEC_index (access_p, representatives, i);
3286 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3287
3288 if (!repr || no_accesses_p (repr))
3289 continue;
3290
3291 do
3292 {
3293 if ((repr->offset + repr->size) > bb_dereferences[idx])
3294 repr->grp_not_necessarilly_dereferenced = 1;
3295 repr = repr->next_grp;
3296 }
3297 while (repr);
3298 }
3299 }
3300
3301 /* Return the representative access for the parameter declaration PARM if it is
3302 a scalar passed by reference which is not written to and the pointer value
3303 is not used directly. Thus, if it is legal to dereference it in the caller
3304 and we can rule out modifications through aliases, such parameter should be
3305 turned into one passed by value. Return NULL otherwise. */
3306
3307 static struct access *
3308 unmodified_by_ref_scalar_representative (tree parm)
3309 {
3310 int i, access_count;
3311 struct access *repr;
3312 VEC (access_p, heap) *access_vec;
3313
3314 access_vec = get_base_access_vector (parm);
3315 gcc_assert (access_vec);
3316 repr = VEC_index (access_p, access_vec, 0);
3317 if (repr->write)
3318 return NULL;
3319 repr->group_representative = repr;
3320
3321 access_count = VEC_length (access_p, access_vec);
3322 for (i = 1; i < access_count; i++)
3323 {
3324 struct access *access = VEC_index (access_p, access_vec, i);
3325 if (access->write)
3326 return NULL;
3327 access->group_representative = repr;
3328 access->next_sibling = repr->next_sibling;
3329 repr->next_sibling = access;
3330 }
3331
3332 repr->grp_read = 1;
3333 repr->grp_scalar_ptr = 1;
3334 return repr;
3335 }
3336
3337 /* Return true iff this access precludes IPA-SRA of the parameter it is
3338 associated with. */
3339
3340 static bool
3341 access_precludes_ipa_sra_p (struct access *access)
3342 {
3343 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3344 is incompatible assign in a call statement (and possibly even in asm
3345 statements). This can be relaxed by using a new temporary but only for
3346 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3347 intraprocedural SRA we deal with this by keeping the old aggregate around,
3348 something we cannot do in IPA-SRA.) */
3349 if (access->write
3350 && (is_gimple_call (access->stmt)
3351 || gimple_code (access->stmt) == GIMPLE_ASM))
3352 return true;
3353
3354 return false;
3355 }
3356
3357
3358 /* Sort collected accesses for parameter PARM, identify representatives for
3359 each accessed region and link them together. Return NULL if there are
3360 different but overlapping accesses, return the special ptr value meaning
3361 there are no accesses for this parameter if that is the case and return the
3362 first representative otherwise. Set *RO_GRP if there is a group of accesses
3363 with only read (i.e. no write) accesses. */
3364
3365 static struct access *
3366 splice_param_accesses (tree parm, bool *ro_grp)
3367 {
3368 int i, j, access_count, group_count;
3369 int agg_size, total_size = 0;
3370 struct access *access, *res, **prev_acc_ptr = &res;
3371 VEC (access_p, heap) *access_vec;
3372
3373 access_vec = get_base_access_vector (parm);
3374 if (!access_vec)
3375 return &no_accesses_representant;
3376 access_count = VEC_length (access_p, access_vec);
3377
3378 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3379 compare_access_positions);
3380
3381 i = 0;
3382 total_size = 0;
3383 group_count = 0;
3384 while (i < access_count)
3385 {
3386 bool modification;
3387 access = VEC_index (access_p, access_vec, i);
3388 modification = access->write;
3389 if (access_precludes_ipa_sra_p (access))
3390 return NULL;
3391
3392 /* Access is about to become group representative unless we find some
3393 nasty overlap which would preclude us from breaking this parameter
3394 apart. */
3395
3396 j = i + 1;
3397 while (j < access_count)
3398 {
3399 struct access *ac2 = VEC_index (access_p, access_vec, j);
3400 if (ac2->offset != access->offset)
3401 {
3402 /* All or nothing law for parameters. */
3403 if (access->offset + access->size > ac2->offset)
3404 return NULL;
3405 else
3406 break;
3407 }
3408 else if (ac2->size != access->size)
3409 return NULL;
3410
3411 if (access_precludes_ipa_sra_p (ac2))
3412 return NULL;
3413
3414 modification |= ac2->write;
3415 ac2->group_representative = access;
3416 ac2->next_sibling = access->next_sibling;
3417 access->next_sibling = ac2;
3418 j++;
3419 }
3420
3421 group_count++;
3422 access->grp_maybe_modified = modification;
3423 if (!modification)
3424 *ro_grp = true;
3425 *prev_acc_ptr = access;
3426 prev_acc_ptr = &access->next_grp;
3427 total_size += access->size;
3428 i = j;
3429 }
3430
3431 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3432 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3433 else
3434 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3435 if (total_size >= agg_size)
3436 return NULL;
3437
3438 gcc_assert (group_count > 0);
3439 return res;
3440 }
3441
3442 /* Decide whether parameters with representative accesses given by REPR should
3443 be reduced into components. */
3444
3445 static int
3446 decide_one_param_reduction (struct access *repr)
3447 {
3448 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3449 bool by_ref;
3450 tree parm;
3451
3452 parm = repr->base;
3453 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3454 gcc_assert (cur_parm_size > 0);
3455
3456 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3457 {
3458 by_ref = true;
3459 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3460 }
3461 else
3462 {
3463 by_ref = false;
3464 agg_size = cur_parm_size;
3465 }
3466
3467 if (dump_file)
3468 {
3469 struct access *acc;
3470 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3471 print_generic_expr (dump_file, parm, 0);
3472 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3473 for (acc = repr; acc; acc = acc->next_grp)
3474 dump_access (dump_file, acc, true);
3475 }
3476
3477 total_size = 0;
3478 new_param_count = 0;
3479
3480 for (; repr; repr = repr->next_grp)
3481 {
3482 gcc_assert (parm == repr->base);
3483 new_param_count++;
3484
3485 if (!by_ref || (!repr->grp_maybe_modified
3486 && !repr->grp_not_necessarilly_dereferenced))
3487 total_size += repr->size;
3488 else
3489 total_size += cur_parm_size;
3490 }
3491
3492 gcc_assert (new_param_count > 0);
3493
3494 if (optimize_function_for_size_p (cfun))
3495 parm_size_limit = cur_parm_size;
3496 else
3497 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3498 * cur_parm_size);
3499
3500 if (total_size < agg_size
3501 && total_size <= parm_size_limit)
3502 {
3503 if (dump_file)
3504 fprintf (dump_file, " ....will be split into %i components\n",
3505 new_param_count);
3506 return new_param_count;
3507 }
3508 else
3509 return 0;
3510 }
3511
3512 /* The order of the following enums is important, we need to do extra work for
3513 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3514 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3515 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3516
3517 /* Identify representatives of all accesses to all candidate parameters for
3518 IPA-SRA. Return result based on what representatives have been found. */
3519
3520 static enum ipa_splicing_result
3521 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3522 {
3523 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3524 tree parm;
3525 struct access *repr;
3526
3527 *representatives = VEC_alloc (access_p, heap, func_param_count);
3528
3529 for (parm = DECL_ARGUMENTS (current_function_decl);
3530 parm;
3531 parm = TREE_CHAIN (parm))
3532 {
3533 if (is_unused_scalar_param (parm))
3534 {
3535 VEC_quick_push (access_p, *representatives,
3536 &no_accesses_representant);
3537 if (result == NO_GOOD_ACCESS)
3538 result = UNUSED_PARAMS;
3539 }
3540 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3541 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3542 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3543 {
3544 repr = unmodified_by_ref_scalar_representative (parm);
3545 VEC_quick_push (access_p, *representatives, repr);
3546 if (repr)
3547 result = UNMODIF_BY_REF_ACCESSES;
3548 }
3549 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3550 {
3551 bool ro_grp = false;
3552 repr = splice_param_accesses (parm, &ro_grp);
3553 VEC_quick_push (access_p, *representatives, repr);
3554
3555 if (repr && !no_accesses_p (repr))
3556 {
3557 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3558 {
3559 if (ro_grp)
3560 result = UNMODIF_BY_REF_ACCESSES;
3561 else if (result < MODIF_BY_REF_ACCESSES)
3562 result = MODIF_BY_REF_ACCESSES;
3563 }
3564 else if (result < BY_VAL_ACCESSES)
3565 result = BY_VAL_ACCESSES;
3566 }
3567 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3568 result = UNUSED_PARAMS;
3569 }
3570 else
3571 VEC_quick_push (access_p, *representatives, NULL);
3572 }
3573
3574 if (result == NO_GOOD_ACCESS)
3575 {
3576 VEC_free (access_p, heap, *representatives);
3577 *representatives = NULL;
3578 return NO_GOOD_ACCESS;
3579 }
3580
3581 return result;
3582 }
3583
3584 /* Return the index of BASE in PARMS. Abort if it is not found. */
3585
3586 static inline int
3587 get_param_index (tree base, VEC(tree, heap) *parms)
3588 {
3589 int i, len;
3590
3591 len = VEC_length (tree, parms);
3592 for (i = 0; i < len; i++)
3593 if (VEC_index (tree, parms, i) == base)
3594 return i;
3595 gcc_unreachable ();
3596 }
3597
3598 /* Convert the decisions made at the representative level into compact
3599 parameter adjustments. REPRESENTATIVES are pointers to first
3600 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3601 final number of adjustments. */
3602
3603 static ipa_parm_adjustment_vec
3604 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3605 int adjustments_count)
3606 {
3607 VEC (tree, heap) *parms;
3608 ipa_parm_adjustment_vec adjustments;
3609 tree parm;
3610 int i;
3611
3612 gcc_assert (adjustments_count > 0);
3613 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3614 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3615 parm = DECL_ARGUMENTS (current_function_decl);
3616 for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3617 {
3618 struct access *repr = VEC_index (access_p, representatives, i);
3619
3620 if (!repr || no_accesses_p (repr))
3621 {
3622 struct ipa_parm_adjustment *adj;
3623
3624 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3625 memset (adj, 0, sizeof (*adj));
3626 adj->base_index = get_param_index (parm, parms);
3627 adj->base = parm;
3628 if (!repr)
3629 adj->copy_param = 1;
3630 else
3631 adj->remove_param = 1;
3632 }
3633 else
3634 {
3635 struct ipa_parm_adjustment *adj;
3636 int index = get_param_index (parm, parms);
3637
3638 for (; repr; repr = repr->next_grp)
3639 {
3640 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3641 memset (adj, 0, sizeof (*adj));
3642 gcc_assert (repr->base == parm);
3643 adj->base_index = index;
3644 adj->base = repr->base;
3645 adj->type = repr->type;
3646 adj->offset = repr->offset;
3647 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3648 && (repr->grp_maybe_modified
3649 || repr->grp_not_necessarilly_dereferenced));
3650
3651 }
3652 }
3653 }
3654 VEC_free (tree, heap, parms);
3655 return adjustments;
3656 }
3657
3658 /* Analyze the collected accesses and produce a plan what to do with the
3659 parameters in the form of adjustments, NULL meaning nothing. */
3660
3661 static ipa_parm_adjustment_vec
3662 analyze_all_param_acesses (void)
3663 {
3664 enum ipa_splicing_result repr_state;
3665 bool proceed = false;
3666 int i, adjustments_count = 0;
3667 VEC (access_p, heap) *representatives;
3668 ipa_parm_adjustment_vec adjustments;
3669
3670 repr_state = splice_all_param_accesses (&representatives);
3671 if (repr_state == NO_GOOD_ACCESS)
3672 return NULL;
3673
3674 /* If there are any parameters passed by reference which are not modified
3675 directly, we need to check whether they can be modified indirectly. */
3676 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3677 {
3678 analyze_caller_dereference_legality (representatives);
3679 analyze_modified_params (representatives);
3680 }
3681
3682 for (i = 0; i < func_param_count; i++)
3683 {
3684 struct access *repr = VEC_index (access_p, representatives, i);
3685
3686 if (repr && !no_accesses_p (repr))
3687 {
3688 if (repr->grp_scalar_ptr)
3689 {
3690 adjustments_count++;
3691 if (repr->grp_not_necessarilly_dereferenced
3692 || repr->grp_maybe_modified)
3693 VEC_replace (access_p, representatives, i, NULL);
3694 else
3695 {
3696 proceed = true;
3697 sra_stats.scalar_by_ref_to_by_val++;
3698 }
3699 }
3700 else
3701 {
3702 int new_components = decide_one_param_reduction (repr);
3703
3704 if (new_components == 0)
3705 {
3706 VEC_replace (access_p, representatives, i, NULL);
3707 adjustments_count++;
3708 }
3709 else
3710 {
3711 adjustments_count += new_components;
3712 sra_stats.aggregate_params_reduced++;
3713 sra_stats.param_reductions_created += new_components;
3714 proceed = true;
3715 }
3716 }
3717 }
3718 else
3719 {
3720 if (no_accesses_p (repr))
3721 {
3722 proceed = true;
3723 sra_stats.deleted_unused_parameters++;
3724 }
3725 adjustments_count++;
3726 }
3727 }
3728
3729 if (!proceed && dump_file)
3730 fprintf (dump_file, "NOT proceeding to change params.\n");
3731
3732 if (proceed)
3733 adjustments = turn_representatives_into_adjustments (representatives,
3734 adjustments_count);
3735 else
3736 adjustments = NULL;
3737
3738 VEC_free (access_p, heap, representatives);
3739 return adjustments;
3740 }
3741
3742 /* If a parameter replacement identified by ADJ does not yet exist in the form
3743 of declaration, create it and record it, otherwise return the previously
3744 created one. */
3745
3746 static tree
3747 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3748 {
3749 tree repl;
3750 if (!adj->new_ssa_base)
3751 {
3752 char *pretty_name = make_fancy_name (adj->base);
3753
3754 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3755 DECL_NAME (repl) = get_identifier (pretty_name);
3756 obstack_free (&name_obstack, pretty_name);
3757
3758 get_var_ann (repl);
3759 add_referenced_var (repl);
3760 adj->new_ssa_base = repl;
3761 }
3762 else
3763 repl = adj->new_ssa_base;
3764 return repl;
3765 }
3766
3767 /* Find the first adjustment for a particular parameter BASE in a vector of
3768 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3769 adjustment. */
3770
3771 static struct ipa_parm_adjustment *
3772 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3773 {
3774 int i, len;
3775
3776 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3777 for (i = 0; i < len; i++)
3778 {
3779 struct ipa_parm_adjustment *adj;
3780
3781 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3782 if (!adj->copy_param && adj->base == base)
3783 return adj;
3784 }
3785
3786 return NULL;
3787 }
3788
3789 /* Callback for scan_function. If the statement STMT defines an SSA_NAME of a
3790 parameter which is to be removed because its value is not used, replace the
3791 SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3792 uses too and return true (update_stmt is then issued for the statement by
3793 the caller). DATA is a pointer to an adjustments vector. */
3794
3795 static bool
3796 replace_removed_params_ssa_names (gimple stmt, void *data)
3797 {
3798 VEC (ipa_parm_adjustment_t, heap) *adjustments;
3799 struct ipa_parm_adjustment *adj;
3800 tree lhs, decl, repl, name;
3801
3802 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3803 if (gimple_code (stmt) == GIMPLE_PHI)
3804 lhs = gimple_phi_result (stmt);
3805 else if (is_gimple_assign (stmt))
3806 lhs = gimple_assign_lhs (stmt);
3807 else if (is_gimple_call (stmt))
3808 lhs = gimple_call_lhs (stmt);
3809 else
3810 gcc_unreachable ();
3811
3812 if (TREE_CODE (lhs) != SSA_NAME)
3813 return false;
3814 decl = SSA_NAME_VAR (lhs);
3815 if (TREE_CODE (decl) != PARM_DECL)
3816 return false;
3817
3818 adj = get_adjustment_for_base (adjustments, decl);
3819 if (!adj)
3820 return false;
3821
3822 repl = get_replaced_param_substitute (adj);
3823 name = make_ssa_name (repl, stmt);
3824
3825 if (dump_file)
3826 {
3827 fprintf (dump_file, "replacing an SSA name of a removed param ");
3828 print_generic_expr (dump_file, lhs, 0);
3829 fprintf (dump_file, " with ");
3830 print_generic_expr (dump_file, name, 0);
3831 fprintf (dump_file, "\n");
3832 }
3833
3834 if (is_gimple_assign (stmt))
3835 gimple_assign_set_lhs (stmt, name);
3836 else if (is_gimple_call (stmt))
3837 gimple_call_set_lhs (stmt, name);
3838 else
3839 gimple_phi_set_result (stmt, name);
3840
3841 replace_uses_by (lhs, name);
3842 return true;
3843 }
3844
3845 /* Callback for scan_function and helper to sra_ipa_modify_assign. If the
3846 expression *EXPR should be replaced by a reduction of a parameter, do so.
3847 DATA is a pointer to a vector of adjustments. DONT_CONVERT specifies
3848 whether the function should care about type incompatibility the current and
3849 new expressions. If it is true, the function will leave incompatibility
3850 issues to the caller.
3851
3852 When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3853 a write (LHS) expression. */
3854
3855 static bool
3856 sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3857 bool dont_convert, void *data)
3858 {
3859 ipa_parm_adjustment_vec adjustments;
3860 int i, len;
3861 struct ipa_parm_adjustment *adj, *cand = NULL;
3862 HOST_WIDE_INT offset, size, max_size;
3863 tree base, src;
3864
3865 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3866 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3867
3868 if (TREE_CODE (*expr) == BIT_FIELD_REF
3869 || TREE_CODE (*expr) == IMAGPART_EXPR
3870 || TREE_CODE (*expr) == REALPART_EXPR)
3871 {
3872 expr = &TREE_OPERAND (*expr, 0);
3873 dont_convert = false;
3874 }
3875
3876 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3877 if (!base || size == -1 || max_size == -1)
3878 return false;
3879
3880 if (INDIRECT_REF_P (base))
3881 base = TREE_OPERAND (base, 0);
3882
3883 base = get_ssa_base_param (base);
3884 if (!base || TREE_CODE (base) != PARM_DECL)
3885 return false;
3886
3887 for (i = 0; i < len; i++)
3888 {
3889 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3890
3891 if (adj->base == base &&
3892 (adj->offset == offset || adj->remove_param))
3893 {
3894 cand = adj;
3895 break;
3896 }
3897 }
3898 if (!cand || cand->copy_param || cand->remove_param)
3899 return false;
3900
3901 if (cand->by_ref)
3902 {
3903 tree folded;
3904 src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3905 cand->reduction);
3906 folded = gimple_fold_indirect_ref (src);
3907 if (folded)
3908 src = folded;
3909 }
3910 else
3911 src = cand->reduction;
3912
3913 if (dump_file && (dump_flags & TDF_DETAILS))
3914 {
3915 fprintf (dump_file, "About to replace expr ");
3916 print_generic_expr (dump_file, *expr, 0);
3917 fprintf (dump_file, " with ");
3918 print_generic_expr (dump_file, src, 0);
3919 fprintf (dump_file, "\n");
3920 }
3921
3922 if (!dont_convert
3923 && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3924 {
3925 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3926 *expr = vce;
3927 }
3928 else
3929 *expr = src;
3930 return true;
3931 }
3932
3933 /* Callback for scan_function to process assign statements. Performs
3934 essentially the same function like sra_ipa_modify_expr. */
3935
3936 static enum scan_assign_result
3937 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3938 {
3939 gimple stmt = *stmt_ptr;
3940 tree *lhs_p, *rhs_p;
3941 bool any;
3942
3943 if (!gimple_assign_single_p (stmt))
3944 return SRA_SA_NONE;
3945
3946 rhs_p = gimple_assign_rhs1_ptr (stmt);
3947 lhs_p = gimple_assign_lhs_ptr (stmt);
3948
3949 any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3950 any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3951 if (any)
3952 {
3953 tree new_rhs = NULL_TREE;
3954
3955 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3956 {
3957 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3958 {
3959 /* V_C_Es of constructors can cause trouble (PR 42714). */
3960 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3961 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3962 else
3963 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3964 }
3965 else
3966 new_rhs = fold_build1_loc (gimple_location (stmt),
3967 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3968 *rhs_p);
3969 }
3970 else if (REFERENCE_CLASS_P (*rhs_p)
3971 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3972 && !is_gimple_reg (*lhs_p))
3973 /* This can happen when an assignment in between two single field
3974 structures is turned into an assignment in between two pointers to
3975 scalars (PR 42237). */
3976 new_rhs = *rhs_p;
3977
3978 if (new_rhs)
3979 {
3980 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3981 true, GSI_SAME_STMT);
3982
3983 gimple_assign_set_rhs_from_tree (gsi, tmp);
3984 }
3985
3986 return SRA_SA_PROCESSED;
3987 }
3988
3989 return SRA_SA_NONE;
3990 }
3991
3992 /* Call gimple_debug_bind_reset_value on all debug statements describing
3993 gimple register parameters that are being removed or replaced. */
3994
3995 static void
3996 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
3997 {
3998 int i, len;
3999
4000 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4001 for (i = 0; i < len; i++)
4002 {
4003 struct ipa_parm_adjustment *adj;
4004 imm_use_iterator ui;
4005 gimple stmt;
4006 tree name;
4007
4008 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4009 if (adj->copy_param || !is_gimple_reg (adj->base))
4010 continue;
4011 name = gimple_default_def (cfun, adj->base);
4012 if (!name)
4013 continue;
4014 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4015 {
4016 /* All other users must have been removed by scan_function. */
4017 gcc_assert (is_gimple_debug (stmt));
4018 gimple_debug_bind_reset_value (stmt);
4019 update_stmt (stmt);
4020 }
4021 }
4022 }
4023
4024 /* Return true iff all callers have at least as many actual arguments as there
4025 are formal parameters in the current function. */
4026
4027 static bool
4028 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4029 {
4030 struct cgraph_edge *cs;
4031 for (cs = node->callers; cs; cs = cs->next_caller)
4032 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4033 return false;
4034
4035 return true;
4036 }
4037
4038
4039 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4040
4041 static void
4042 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4043 {
4044 tree old_cur_fndecl = current_function_decl;
4045 struct cgraph_edge *cs;
4046 basic_block this_block;
4047 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4048
4049 for (cs = node->callers; cs; cs = cs->next_caller)
4050 {
4051 current_function_decl = cs->caller->decl;
4052 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4053
4054 if (dump_file)
4055 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4056 cs->caller->uid, cs->callee->uid,
4057 cgraph_node_name (cs->caller),
4058 cgraph_node_name (cs->callee));
4059
4060 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4061
4062 pop_cfun ();
4063 }
4064
4065 for (cs = node->callers; cs; cs = cs->next_caller)
4066 if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
4067 {
4068 compute_inline_parameters (cs->caller);
4069 bitmap_set_bit (recomputed_callers, cs->caller->uid);
4070 }
4071 BITMAP_FREE (recomputed_callers);
4072
4073 current_function_decl = old_cur_fndecl;
4074
4075 if (!encountered_recursive_call)
4076 return;
4077
4078 FOR_EACH_BB (this_block)
4079 {
4080 gimple_stmt_iterator gsi;
4081
4082 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4083 {
4084 gimple stmt = gsi_stmt (gsi);
4085 tree call_fndecl;
4086 if (gimple_code (stmt) != GIMPLE_CALL)
4087 continue;
4088 call_fndecl = gimple_call_fndecl (stmt);
4089 if (call_fndecl && cgraph_get_node (call_fndecl) == node)
4090 {
4091 if (dump_file)
4092 fprintf (dump_file, "Adjusting recursive call");
4093 ipa_modify_call_arguments (NULL, stmt, adjustments);
4094 }
4095 }
4096 }
4097
4098 return;
4099 }
4100
4101 /* Create an abstract origin declaration for OLD_DECL and make it an abstract
4102 origin of the provided decl so that there are preserved parameters for debug
4103 information. */
4104
4105 static void
4106 create_abstract_origin (tree old_decl)
4107 {
4108 if (!DECL_ABSTRACT_ORIGIN (old_decl))
4109 {
4110 tree new_decl = copy_node (old_decl);
4111
4112 DECL_ABSTRACT (new_decl) = 1;
4113 SET_DECL_ASSEMBLER_NAME (new_decl, NULL_TREE);
4114 SET_DECL_RTL (new_decl, NULL);
4115 DECL_STRUCT_FUNCTION (new_decl) = NULL;
4116 DECL_ARTIFICIAL (old_decl) = 1;
4117 DECL_ABSTRACT_ORIGIN (old_decl) = new_decl;
4118 }
4119 }
4120
4121 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4122 as given in ADJUSTMENTS. */
4123
4124 static void
4125 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4126 {
4127 struct cgraph_node *alias;
4128 for (alias = node->same_body; alias; alias = alias->next)
4129 ipa_modify_formal_parameters (alias->decl, adjustments, "ISRA");
4130 /* current_function_decl must be handled last, after same_body aliases,
4131 as following functions will use what it computed. */
4132 create_abstract_origin (current_function_decl);
4133 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4134 scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
4135 replace_removed_params_ssa_names, false, adjustments);
4136 sra_ipa_reset_debug_stmts (adjustments);
4137 convert_callers (node, adjustments);
4138 cgraph_make_node_local (node);
4139 return;
4140 }
4141
4142 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4143 attributes, return true otherwise. NODE is the cgraph node of the current
4144 function. */
4145
4146 static bool
4147 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4148 {
4149 if (!cgraph_node_can_be_local_p (node))
4150 {
4151 if (dump_file)
4152 fprintf (dump_file, "Function not local to this compilation unit.\n");
4153 return false;
4154 }
4155
4156 if (DECL_VIRTUAL_P (current_function_decl))
4157 {
4158 if (dump_file)
4159 fprintf (dump_file, "Function is a virtual method.\n");
4160 return false;
4161 }
4162
4163 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4164 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4165 {
4166 if (dump_file)
4167 fprintf (dump_file, "Function too big to be made truly local.\n");
4168 return false;
4169 }
4170
4171 if (!node->callers)
4172 {
4173 if (dump_file)
4174 fprintf (dump_file,
4175 "Function has no callers in this compilation unit.\n");
4176 return false;
4177 }
4178
4179 if (cfun->stdarg)
4180 {
4181 if (dump_file)
4182 fprintf (dump_file, "Function uses stdarg. \n");
4183 return false;
4184 }
4185
4186 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4187 return false;
4188
4189 return true;
4190 }
4191
4192 /* Perform early interprocedural SRA. */
4193
4194 static unsigned int
4195 ipa_early_sra (void)
4196 {
4197 struct cgraph_node *node = cgraph_node (current_function_decl);
4198 ipa_parm_adjustment_vec adjustments;
4199 int ret = 0;
4200
4201 if (!ipa_sra_preliminary_function_checks (node))
4202 return 0;
4203
4204 sra_initialize ();
4205 sra_mode = SRA_MODE_EARLY_IPA;
4206
4207 if (!find_param_candidates ())
4208 {
4209 if (dump_file)
4210 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4211 goto simple_out;
4212 }
4213
4214 if (!all_callers_have_enough_arguments_p (node))
4215 {
4216 if (dump_file)
4217 fprintf (dump_file, "There are callers with insufficient number of "
4218 "arguments.\n");
4219 goto simple_out;
4220 }
4221
4222 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4223 func_param_count
4224 * last_basic_block_for_function (cfun));
4225 final_bbs = BITMAP_ALLOC (NULL);
4226
4227 scan_function (build_access_from_expr, build_accesses_from_assign,
4228 NULL, true, NULL);
4229 if (encountered_apply_args)
4230 {
4231 if (dump_file)
4232 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4233 goto out;
4234 }
4235
4236 if (encountered_unchangable_recursive_call)
4237 {
4238 if (dump_file)
4239 fprintf (dump_file, "Function calls itself with insufficient "
4240 "number of arguments.\n");
4241 goto out;
4242 }
4243
4244 adjustments = analyze_all_param_acesses ();
4245 if (!adjustments)
4246 goto out;
4247 if (dump_file)
4248 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4249
4250 modify_function (node, adjustments);
4251 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4252 ret = TODO_update_ssa;
4253
4254 statistics_counter_event (cfun, "Unused parameters deleted",
4255 sra_stats.deleted_unused_parameters);
4256 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4257 sra_stats.scalar_by_ref_to_by_val);
4258 statistics_counter_event (cfun, "Aggregate parameters broken up",
4259 sra_stats.aggregate_params_reduced);
4260 statistics_counter_event (cfun, "Aggregate parameter components created",
4261 sra_stats.param_reductions_created);
4262
4263 out:
4264 BITMAP_FREE (final_bbs);
4265 free (bb_dereferences);
4266 simple_out:
4267 sra_deinitialize ();
4268 return ret;
4269 }
4270
4271 /* Return if early ipa sra shall be performed. */
4272 static bool
4273 ipa_early_sra_gate (void)
4274 {
4275 return flag_ipa_sra;
4276 }
4277
4278 struct gimple_opt_pass pass_early_ipa_sra =
4279 {
4280 {
4281 GIMPLE_PASS,
4282 "eipa_sra", /* name */
4283 ipa_early_sra_gate, /* gate */
4284 ipa_early_sra, /* execute */
4285 NULL, /* sub */
4286 NULL, /* next */
4287 0, /* static_pass_number */
4288 TV_IPA_SRA, /* tv_id */
4289 0, /* properties_required */
4290 0, /* properties_provided */
4291 0, /* properties_destroyed */
4292 0, /* todo_flags_start */
4293 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */
4294 }
4295 };
4296
4297