re PR debug/42455 ("-fcompare-debug failure" at -O2 in 'recent' builds)
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "diagnostic.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91
92 /* Enumeration of all aggregate reductions we can do. */
93 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
94 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
95 SRA_MODE_INTRA }; /* late intraprocedural SRA */
96
97 /* Global variable describing which aggregate reduction we are performing at
98 the moment. */
99 static enum sra_mode sra_mode;
100
101 struct assign_link;
102
103 /* ACCESS represents each access to an aggregate variable (as a whole or a
104 part). It can also represent a group of accesses that refer to exactly the
105 same fragment of an aggregate (i.e. those that have exactly the same offset
106 and size). Such representatives for a single aggregate, once determined,
107 are linked in a linked list and have the group fields set.
108
109 Moreover, when doing intraprocedural SRA, a tree is built from those
110 representatives (by the means of first_child and next_sibling pointers), in
111 which all items in a subtree are "within" the root, i.e. their offset is
112 greater or equal to offset of the root and offset+size is smaller or equal
113 to offset+size of the root. Children of an access are sorted by offset.
114
115 Note that accesses to parts of vector and complex number types always
116 represented by an access to the whole complex number or a vector. It is a
117 duty of the modifying functions to replace them appropriately. */
118
119 struct access
120 {
121 /* Values returned by `get_ref_base_and_extent' for each component reference
122 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
123 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
124 HOST_WIDE_INT offset;
125 HOST_WIDE_INT size;
126 tree base;
127
128 /* Expression. It is context dependent so do not use it to create new
129 expressions to access the original aggregate. See PR 42154 for a
130 testcase. */
131 tree expr;
132 /* Type. */
133 tree type;
134
135 /* The statement this access belongs to. */
136 gimple stmt;
137
138 /* Next group representative for this aggregate. */
139 struct access *next_grp;
140
141 /* Pointer to the group representative. Pointer to itself if the struct is
142 the representative. */
143 struct access *group_representative;
144
145 /* If this access has any children (in terms of the definition above), this
146 points to the first one. */
147 struct access *first_child;
148
149 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
150 described above. In IPA-SRA this is a pointer to the next access
151 belonging to the same group (having the same representative). */
152 struct access *next_sibling;
153
154 /* Pointers to the first and last element in the linked list of assign
155 links. */
156 struct assign_link *first_link, *last_link;
157
158 /* Pointer to the next access in the work queue. */
159 struct access *next_queued;
160
161 /* Replacement variable for this access "region." Never to be accessed
162 directly, always only by the means of get_access_replacement() and only
163 when grp_to_be_replaced flag is set. */
164 tree replacement_decl;
165
166 /* Is this particular access write access? */
167 unsigned write : 1;
168
169 /* Is this access currently in the work queue? */
170 unsigned grp_queued : 1;
171
172 /* Does this group contain a write access? This flag is propagated down the
173 access tree. */
174 unsigned grp_write : 1;
175
176 /* Does this group contain a read access? This flag is propagated down the
177 access tree. */
178 unsigned grp_read : 1;
179
180 /* Other passes of the analysis use this bit to make function
181 analyze_access_subtree create scalar replacements for this group if
182 possible. */
183 unsigned grp_hint : 1;
184
185 /* Is the subtree rooted in this access fully covered by scalar
186 replacements? */
187 unsigned grp_covered : 1;
188
189 /* If set to true, this access and all below it in an access tree must not be
190 scalarized. */
191 unsigned grp_unscalarizable_region : 1;
192
193 /* Whether data have been written to parts of the aggregate covered by this
194 access which is not to be scalarized. This flag is propagated up in the
195 access tree. */
196 unsigned grp_unscalarized_data : 1;
197
198 /* Does this access and/or group contain a write access through a
199 BIT_FIELD_REF? */
200 unsigned grp_partial_lhs : 1;
201
202 /* Does this group contain accesses to different types? (I.e. through a union
203 or a similar mechanism). */
204 unsigned grp_different_types : 1;
205
206 /* Set when a scalar replacement should be created for this variable. We do
207 the decision and creation at different places because create_tmp_var
208 cannot be called from within FOR_EACH_REFERENCED_VAR. */
209 unsigned grp_to_be_replaced : 1;
210
211 /* Is it possible that the group refers to data which might be (directly or
212 otherwise) modified? */
213 unsigned grp_maybe_modified : 1;
214
215 /* Set when this is a representative of a pointer to scalar (i.e. by
216 reference) parameter which we consider for turning into a plain scalar
217 (i.e. a by value parameter). */
218 unsigned grp_scalar_ptr : 1;
219
220 /* Set when we discover that this pointer is not safe to dereference in the
221 caller. */
222 unsigned grp_not_necessarilly_dereferenced : 1;
223 };
224
225 typedef struct access *access_p;
226
227 DEF_VEC_P (access_p);
228 DEF_VEC_ALLOC_P (access_p, heap);
229
230 /* Alloc pool for allocating access structures. */
231 static alloc_pool access_pool;
232
233 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
234 are used to propagate subaccesses from rhs to lhs as long as they don't
235 conflict with what is already there. */
236 struct assign_link
237 {
238 struct access *lacc, *racc;
239 struct assign_link *next;
240 };
241
242 /* Alloc pool for allocating assign link structures. */
243 static alloc_pool link_pool;
244
245 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
246 static struct pointer_map_t *base_access_vec;
247
248 /* Bitmap of candidates. */
249 static bitmap candidate_bitmap;
250
251 /* Obstack for creation of fancy names. */
252 static struct obstack name_obstack;
253
254 /* Head of a linked list of accesses that need to have its subaccesses
255 propagated to their assignment counterparts. */
256 static struct access *work_queue_head;
257
258 /* Number of parameters of the analyzed function when doing early ipa SRA. */
259 static int func_param_count;
260
261 /* scan_function sets the following to true if it encounters a call to
262 __builtin_apply_args. */
263 static bool encountered_apply_args;
264
265 /* This is a table in which for each basic block and parameter there is a
266 distance (offset + size) in that parameter which is dereferenced and
267 accessed in that BB. */
268 static HOST_WIDE_INT *bb_dereferences;
269 /* Bitmap of BBs that can cause the function to "stop" progressing by
270 returning, throwing externally, looping infinitely or calling a function
271 which might abort etc.. */
272 static bitmap final_bbs;
273
274 /* Representative of no accesses at all. */
275 static struct access no_accesses_representant;
276
277 /* Predicate to test the special value. */
278
279 static inline bool
280 no_accesses_p (struct access *access)
281 {
282 return access == &no_accesses_representant;
283 }
284
285 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
286 representative fields are dumped, otherwise those which only describe the
287 individual access are. */
288
289 static struct
290 {
291 /* Number of processed aggregates is readily available in
292 analyze_all_variable_accesses and so is not stored here. */
293
294 /* Number of created scalar replacements. */
295 int replacements;
296
297 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
298 expression. */
299 int exprs;
300
301 /* Number of statements created by generate_subtree_copies. */
302 int subtree_copies;
303
304 /* Number of statements created by load_assign_lhs_subreplacements. */
305 int subreplacements;
306
307 /* Number of times sra_modify_assign has deleted a statement. */
308 int deleted;
309
310 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
311 RHS reparately due to type conversions or nonexistent matching
312 references. */
313 int separate_lhs_rhs_handling;
314
315 /* Number of parameters that were removed because they were unused. */
316 int deleted_unused_parameters;
317
318 /* Number of scalars passed as parameters by reference that have been
319 converted to be passed by value. */
320 int scalar_by_ref_to_by_val;
321
322 /* Number of aggregate parameters that were replaced by one or more of their
323 components. */
324 int aggregate_params_reduced;
325
326 /* Numbber of components created when splitting aggregate parameters. */
327 int param_reductions_created;
328 } sra_stats;
329
330 static void
331 dump_access (FILE *f, struct access *access, bool grp)
332 {
333 fprintf (f, "access { ");
334 fprintf (f, "base = (%d)'", DECL_UID (access->base));
335 print_generic_expr (f, access->base, 0);
336 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
337 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
338 fprintf (f, ", expr = ");
339 print_generic_expr (f, access->expr, 0);
340 fprintf (f, ", type = ");
341 print_generic_expr (f, access->type, 0);
342 if (grp)
343 fprintf (f, ", grp_write = %d, grp_read = %d, grp_hint = %d, "
344 "grp_covered = %d, grp_unscalarizable_region = %d, "
345 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
346 "grp_different_types = %d, grp_to_be_replaced = %d, "
347 "grp_maybe_modified = %d, "
348 "grp_not_necessarilly_dereferenced = %d\n",
349 access->grp_write, access->grp_read, access->grp_hint,
350 access->grp_covered, access->grp_unscalarizable_region,
351 access->grp_unscalarized_data, access->grp_partial_lhs,
352 access->grp_different_types, access->grp_to_be_replaced,
353 access->grp_maybe_modified,
354 access->grp_not_necessarilly_dereferenced);
355 else
356 fprintf (f, ", write = %d, grp_partial_lhs = %d\n", access->write,
357 access->grp_partial_lhs);
358 }
359
360 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
361
362 static void
363 dump_access_tree_1 (FILE *f, struct access *access, int level)
364 {
365 do
366 {
367 int i;
368
369 for (i = 0; i < level; i++)
370 fputs ("* ", dump_file);
371
372 dump_access (f, access, true);
373
374 if (access->first_child)
375 dump_access_tree_1 (f, access->first_child, level + 1);
376
377 access = access->next_sibling;
378 }
379 while (access);
380 }
381
382 /* Dump all access trees for a variable, given the pointer to the first root in
383 ACCESS. */
384
385 static void
386 dump_access_tree (FILE *f, struct access *access)
387 {
388 for (; access; access = access->next_grp)
389 dump_access_tree_1 (f, access, 0);
390 }
391
392 /* Return true iff ACC is non-NULL and has subaccesses. */
393
394 static inline bool
395 access_has_children_p (struct access *acc)
396 {
397 return acc && acc->first_child;
398 }
399
400 /* Return a vector of pointers to accesses for the variable given in BASE or
401 NULL if there is none. */
402
403 static VEC (access_p, heap) *
404 get_base_access_vector (tree base)
405 {
406 void **slot;
407
408 slot = pointer_map_contains (base_access_vec, base);
409 if (!slot)
410 return NULL;
411 else
412 return *(VEC (access_p, heap) **) slot;
413 }
414
415 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
416 in ACCESS. Return NULL if it cannot be found. */
417
418 static struct access *
419 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
420 HOST_WIDE_INT size)
421 {
422 while (access && (access->offset != offset || access->size != size))
423 {
424 struct access *child = access->first_child;
425
426 while (child && (child->offset + child->size <= offset))
427 child = child->next_sibling;
428 access = child;
429 }
430
431 return access;
432 }
433
434 /* Return the first group representative for DECL or NULL if none exists. */
435
436 static struct access *
437 get_first_repr_for_decl (tree base)
438 {
439 VEC (access_p, heap) *access_vec;
440
441 access_vec = get_base_access_vector (base);
442 if (!access_vec)
443 return NULL;
444
445 return VEC_index (access_p, access_vec, 0);
446 }
447
448 /* Find an access representative for the variable BASE and given OFFSET and
449 SIZE. Requires that access trees have already been built. Return NULL if
450 it cannot be found. */
451
452 static struct access *
453 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
454 HOST_WIDE_INT size)
455 {
456 struct access *access;
457
458 access = get_first_repr_for_decl (base);
459 while (access && (access->offset + access->size <= offset))
460 access = access->next_grp;
461 if (!access)
462 return NULL;
463
464 return find_access_in_subtree (access, offset, size);
465 }
466
467 /* Add LINK to the linked list of assign links of RACC. */
468 static void
469 add_link_to_rhs (struct access *racc, struct assign_link *link)
470 {
471 gcc_assert (link->racc == racc);
472
473 if (!racc->first_link)
474 {
475 gcc_assert (!racc->last_link);
476 racc->first_link = link;
477 }
478 else
479 racc->last_link->next = link;
480
481 racc->last_link = link;
482 link->next = NULL;
483 }
484
485 /* Move all link structures in their linked list in OLD_RACC to the linked list
486 in NEW_RACC. */
487 static void
488 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
489 {
490 if (!old_racc->first_link)
491 {
492 gcc_assert (!old_racc->last_link);
493 return;
494 }
495
496 if (new_racc->first_link)
497 {
498 gcc_assert (!new_racc->last_link->next);
499 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
500
501 new_racc->last_link->next = old_racc->first_link;
502 new_racc->last_link = old_racc->last_link;
503 }
504 else
505 {
506 gcc_assert (!new_racc->last_link);
507
508 new_racc->first_link = old_racc->first_link;
509 new_racc->last_link = old_racc->last_link;
510 }
511 old_racc->first_link = old_racc->last_link = NULL;
512 }
513
514 /* Add ACCESS to the work queue (which is actually a stack). */
515
516 static void
517 add_access_to_work_queue (struct access *access)
518 {
519 if (!access->grp_queued)
520 {
521 gcc_assert (!access->next_queued);
522 access->next_queued = work_queue_head;
523 access->grp_queued = 1;
524 work_queue_head = access;
525 }
526 }
527
528 /* Pop an access from the work queue, and return it, assuming there is one. */
529
530 static struct access *
531 pop_access_from_work_queue (void)
532 {
533 struct access *access = work_queue_head;
534
535 work_queue_head = access->next_queued;
536 access->next_queued = NULL;
537 access->grp_queued = 0;
538 return access;
539 }
540
541
542 /* Allocate necessary structures. */
543
544 static void
545 sra_initialize (void)
546 {
547 candidate_bitmap = BITMAP_ALLOC (NULL);
548 gcc_obstack_init (&name_obstack);
549 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
550 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
551 base_access_vec = pointer_map_create ();
552 memset (&sra_stats, 0, sizeof (sra_stats));
553 encountered_apply_args = false;
554 }
555
556 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
557
558 static bool
559 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
560 void *data ATTRIBUTE_UNUSED)
561 {
562 VEC (access_p, heap) *access_vec;
563 access_vec = (VEC (access_p, heap) *) *value;
564 VEC_free (access_p, heap, access_vec);
565
566 return true;
567 }
568
569 /* Deallocate all general structures. */
570
571 static void
572 sra_deinitialize (void)
573 {
574 BITMAP_FREE (candidate_bitmap);
575 free_alloc_pool (access_pool);
576 free_alloc_pool (link_pool);
577 obstack_free (&name_obstack, NULL);
578
579 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
580 pointer_map_destroy (base_access_vec);
581 }
582
583 /* Remove DECL from candidates for SRA and write REASON to the dump file if
584 there is one. */
585 static void
586 disqualify_candidate (tree decl, const char *reason)
587 {
588 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
589
590 if (dump_file && (dump_flags & TDF_DETAILS))
591 {
592 fprintf (dump_file, "! Disqualifying ");
593 print_generic_expr (dump_file, decl, 0);
594 fprintf (dump_file, " - %s\n", reason);
595 }
596 }
597
598 /* Return true iff the type contains a field or an element which does not allow
599 scalarization. */
600
601 static bool
602 type_internals_preclude_sra_p (tree type)
603 {
604 tree fld;
605 tree et;
606
607 switch (TREE_CODE (type))
608 {
609 case RECORD_TYPE:
610 case UNION_TYPE:
611 case QUAL_UNION_TYPE:
612 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
613 if (TREE_CODE (fld) == FIELD_DECL)
614 {
615 tree ft = TREE_TYPE (fld);
616
617 if (TREE_THIS_VOLATILE (fld)
618 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
619 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
620 || !host_integerp (DECL_SIZE (fld), 1))
621 return true;
622
623 if (AGGREGATE_TYPE_P (ft)
624 && type_internals_preclude_sra_p (ft))
625 return true;
626 }
627
628 return false;
629
630 case ARRAY_TYPE:
631 et = TREE_TYPE (type);
632
633 if (AGGREGATE_TYPE_P (et))
634 return type_internals_preclude_sra_p (et);
635 else
636 return false;
637
638 default:
639 return false;
640 }
641 }
642
643 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
644 base variable if it is. Return T if it is not an SSA_NAME. */
645
646 static tree
647 get_ssa_base_param (tree t)
648 {
649 if (TREE_CODE (t) == SSA_NAME)
650 {
651 if (SSA_NAME_IS_DEFAULT_DEF (t))
652 return SSA_NAME_VAR (t);
653 else
654 return NULL_TREE;
655 }
656 return t;
657 }
658
659 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
660 belongs to, unless the BB has already been marked as a potentially
661 final. */
662
663 static void
664 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
665 {
666 basic_block bb = gimple_bb (stmt);
667 int idx, parm_index = 0;
668 tree parm;
669
670 if (bitmap_bit_p (final_bbs, bb->index))
671 return;
672
673 for (parm = DECL_ARGUMENTS (current_function_decl);
674 parm && parm != base;
675 parm = TREE_CHAIN (parm))
676 parm_index++;
677
678 gcc_assert (parm_index < func_param_count);
679
680 idx = bb->index * func_param_count + parm_index;
681 if (bb_dereferences[idx] < dist)
682 bb_dereferences[idx] = dist;
683 }
684
685 /* Create and insert access for EXPR. Return created access, or NULL if it is
686 not possible. */
687
688 static struct access *
689 create_access (tree expr, gimple stmt, bool write)
690 {
691 struct access *access;
692 void **slot;
693 VEC (access_p,heap) *vec;
694 HOST_WIDE_INT offset, size, max_size;
695 tree base = expr;
696 bool ptr, unscalarizable_region = false;
697
698 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
699
700 if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
701 {
702 base = get_ssa_base_param (TREE_OPERAND (base, 0));
703 if (!base)
704 return NULL;
705 ptr = true;
706 }
707 else
708 ptr = false;
709
710 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
711 return NULL;
712
713 if (sra_mode == SRA_MODE_EARLY_IPA)
714 {
715 if (size < 0 || size != max_size)
716 {
717 disqualify_candidate (base, "Encountered a variable sized access.");
718 return NULL;
719 }
720 if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
721 {
722 disqualify_candidate (base,
723 "Encountered an acces not aligned to a byte.");
724 return NULL;
725 }
726
727 if (ptr)
728 mark_parm_dereference (base, offset + size, stmt);
729 }
730 else
731 {
732 if (size != max_size)
733 {
734 size = max_size;
735 unscalarizable_region = true;
736 }
737 if (size < 0)
738 {
739 disqualify_candidate (base, "Encountered an unconstrained access.");
740 return NULL;
741 }
742 }
743
744 access = (struct access *) pool_alloc (access_pool);
745 memset (access, 0, sizeof (struct access));
746
747 access->base = base;
748 access->offset = offset;
749 access->size = size;
750 access->expr = expr;
751 access->type = TREE_TYPE (expr);
752 access->write = write;
753 access->grp_unscalarizable_region = unscalarizable_region;
754 access->stmt = stmt;
755
756 slot = pointer_map_contains (base_access_vec, base);
757 if (slot)
758 vec = (VEC (access_p, heap) *) *slot;
759 else
760 vec = VEC_alloc (access_p, heap, 32);
761
762 VEC_safe_push (access_p, heap, vec, access);
763
764 *((struct VEC (access_p,heap) **)
765 pointer_map_insert (base_access_vec, base)) = vec;
766
767 return access;
768 }
769
770
771 /* Search the given tree for a declaration by skipping handled components and
772 exclude it from the candidates. */
773
774 static void
775 disqualify_base_of_expr (tree t, const char *reason)
776 {
777 while (handled_component_p (t))
778 t = TREE_OPERAND (t, 0);
779
780 if (sra_mode == SRA_MODE_EARLY_IPA)
781 {
782 if (INDIRECT_REF_P (t))
783 t = TREE_OPERAND (t, 0);
784 t = get_ssa_base_param (t);
785 }
786
787 if (t && DECL_P (t))
788 disqualify_candidate (t, reason);
789 }
790
791 /* Scan expression EXPR and create access structures for all accesses to
792 candidates for scalarization. Return the created access or NULL if none is
793 created. */
794
795 static struct access *
796 build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
797 {
798 struct access *ret = NULL;
799 tree expr = *expr_ptr;
800 bool partial_ref;
801
802 if (TREE_CODE (expr) == BIT_FIELD_REF
803 || TREE_CODE (expr) == IMAGPART_EXPR
804 || TREE_CODE (expr) == REALPART_EXPR)
805 {
806 expr = TREE_OPERAND (expr, 0);
807 partial_ref = true;
808 }
809 else
810 partial_ref = false;
811
812 /* We need to dive through V_C_Es in order to get the size of its parameter
813 and not the result type. Ada produces such statements. We are also
814 capable of handling the topmost V_C_E but not any of those buried in other
815 handled components. */
816 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
817 expr = TREE_OPERAND (expr, 0);
818
819 if (contains_view_convert_expr_p (expr))
820 {
821 disqualify_base_of_expr (expr, "V_C_E under a different handled "
822 "component.");
823 return NULL;
824 }
825
826 switch (TREE_CODE (expr))
827 {
828 case INDIRECT_REF:
829 if (sra_mode != SRA_MODE_EARLY_IPA)
830 return NULL;
831 /* fall through */
832 case VAR_DECL:
833 case PARM_DECL:
834 case RESULT_DECL:
835 case COMPONENT_REF:
836 case ARRAY_REF:
837 case ARRAY_RANGE_REF:
838 ret = create_access (expr, stmt, write);
839 break;
840
841 default:
842 break;
843 }
844
845 if (write && partial_ref && ret)
846 ret->grp_partial_lhs = 1;
847
848 return ret;
849 }
850
851 /* Callback of scan_function. Scan expression EXPR and create access
852 structures for all accesses to candidates for scalarization. Return true if
853 any access has been inserted. */
854
855 static bool
856 build_access_from_expr (tree *expr_ptr,
857 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
858 void *data ATTRIBUTE_UNUSED)
859 {
860 return build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write) != NULL;
861 }
862
863 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
864 modes in which it matters, return true iff they have been disqualified. RHS
865 may be NULL, in that case ignore it. If we scalarize an aggregate in
866 intra-SRA we may need to add statements after each statement. This is not
867 possible if a statement unconditionally has to end the basic block. */
868 static bool
869 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
870 {
871 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
872 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
873 {
874 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
875 if (rhs)
876 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
877 return true;
878 }
879 return false;
880 }
881
882
883 /* Result code for scan_assign callback for scan_function. */
884 enum scan_assign_result { SRA_SA_NONE, /* nothing done for the stmt */
885 SRA_SA_PROCESSED, /* stmt analyzed/changed */
886 SRA_SA_REMOVED }; /* stmt redundant and eliminated */
887
888
889 /* Callback of scan_function. Scan expressions occuring in the statement
890 pointed to by STMT_EXPR, create access structures for all accesses to
891 candidates for scalarization and remove those candidates which occur in
892 statements or expressions that prevent them from being split apart. Return
893 true if any access has been inserted. */
894
895 static enum scan_assign_result
896 build_accesses_from_assign (gimple *stmt_ptr,
897 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
898 void *data ATTRIBUTE_UNUSED)
899 {
900 gimple stmt = *stmt_ptr;
901 tree *lhs_ptr, *rhs_ptr;
902 struct access *lacc, *racc;
903
904 if (!gimple_assign_single_p (stmt))
905 return SRA_SA_NONE;
906
907 lhs_ptr = gimple_assign_lhs_ptr (stmt);
908 rhs_ptr = gimple_assign_rhs1_ptr (stmt);
909
910 if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
911 return SRA_SA_NONE;
912
913 racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
914 lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
915
916 if (lacc && racc
917 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
918 && !lacc->grp_unscalarizable_region
919 && !racc->grp_unscalarizable_region
920 && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
921 /* FIXME: Turn the following line into an assert after PR 40058 is
922 fixed. */
923 && lacc->size == racc->size
924 && useless_type_conversion_p (lacc->type, racc->type))
925 {
926 struct assign_link *link;
927
928 link = (struct assign_link *) pool_alloc (link_pool);
929 memset (link, 0, sizeof (struct assign_link));
930
931 link->lacc = lacc;
932 link->racc = racc;
933
934 add_link_to_rhs (racc, link);
935 }
936
937 return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
938 }
939
940 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
941 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
942
943 static bool
944 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
945 void *data ATTRIBUTE_UNUSED)
946 {
947 if (DECL_P (op))
948 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
949
950 return false;
951 }
952
953
954 /* Scan function and look for interesting statements. Return true if any has
955 been found or processed, as indicated by callbacks. SCAN_EXPR is a callback
956 called on all expressions within statements except assign statements and
957 those deemed entirely unsuitable for some reason (all operands in such
958 statements and expression are removed from candidate_bitmap). SCAN_ASSIGN
959 is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
960 called on assign statements and those call statements which have a lhs, it
961 can be NULL. ANALYSIS_STAGE is true when running in the analysis stage of a
962 pass and thus no statement is being modified. DATA is a pointer passed to
963 all callbacks. If any single callback returns true, this function also
964 returns true, otherwise it returns false. */
965
966 static bool
967 scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
968 enum scan_assign_result (*scan_assign) (gimple *,
969 gimple_stmt_iterator *,
970 void *),
971 bool (*handle_ssa_defs)(gimple, void *),
972 bool analysis_stage, void *data)
973 {
974 gimple_stmt_iterator gsi;
975 basic_block bb;
976 unsigned i;
977 tree *t;
978 bool ret = false;
979
980 FOR_EACH_BB (bb)
981 {
982 bool bb_changed = false;
983
984 if (handle_ssa_defs)
985 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
986 ret |= handle_ssa_defs (gsi_stmt (gsi), data);
987
988 gsi = gsi_start_bb (bb);
989 while (!gsi_end_p (gsi))
990 {
991 gimple stmt = gsi_stmt (gsi);
992 enum scan_assign_result assign_result;
993 bool any = false, deleted = false;
994
995 if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
996 bitmap_set_bit (final_bbs, bb->index);
997 switch (gimple_code (stmt))
998 {
999 case GIMPLE_RETURN:
1000 t = gimple_return_retval_ptr (stmt);
1001 if (*t != NULL_TREE)
1002 any |= scan_expr (t, &gsi, false, data);
1003 if (analysis_stage && final_bbs)
1004 bitmap_set_bit (final_bbs, bb->index);
1005 break;
1006
1007 case GIMPLE_ASSIGN:
1008 assign_result = scan_assign (&stmt, &gsi, data);
1009 any |= assign_result == SRA_SA_PROCESSED;
1010 deleted = assign_result == SRA_SA_REMOVED;
1011 if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1012 any |= handle_ssa_defs (stmt, data);
1013 break;
1014
1015 case GIMPLE_CALL:
1016 /* Operands must be processed before the lhs. */
1017 for (i = 0; i < gimple_call_num_args (stmt); i++)
1018 {
1019 tree *argp = gimple_call_arg_ptr (stmt, i);
1020 any |= scan_expr (argp, &gsi, false, data);
1021 }
1022
1023 if (analysis_stage)
1024 {
1025 tree dest = gimple_call_fndecl (stmt);
1026 int flags = gimple_call_flags (stmt);
1027
1028 if (dest
1029 && DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1030 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1031 encountered_apply_args = true;
1032
1033 if (final_bbs
1034 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1035 bitmap_set_bit (final_bbs, bb->index);
1036 }
1037
1038 if (gimple_call_lhs (stmt))
1039 {
1040 tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1041 if (!analysis_stage
1042 || !disqualify_ops_if_throwing_stmt (stmt,
1043 *lhs_ptr, NULL))
1044 {
1045 any |= scan_expr (lhs_ptr, &gsi, true, data);
1046 if (handle_ssa_defs)
1047 any |= handle_ssa_defs (stmt, data);
1048 }
1049 }
1050 break;
1051
1052 case GIMPLE_ASM:
1053 if (analysis_stage)
1054 {
1055 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1056 asm_visit_addr);
1057 if (final_bbs)
1058 bitmap_set_bit (final_bbs, bb->index);
1059 }
1060 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1061 {
1062 tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1063 any |= scan_expr (op, &gsi, false, data);
1064 }
1065 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1066 {
1067 tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1068 any |= scan_expr (op, &gsi, true, data);
1069 }
1070 break;
1071
1072 default:
1073 break;
1074 }
1075
1076 if (any)
1077 {
1078 ret = true;
1079
1080 if (!analysis_stage)
1081 {
1082 bb_changed = true;
1083 update_stmt (stmt);
1084 maybe_clean_eh_stmt (stmt);
1085 }
1086 }
1087 if (deleted)
1088 bb_changed = true;
1089 else
1090 {
1091 gsi_next (&gsi);
1092 ret = true;
1093 }
1094 }
1095 if (!analysis_stage && bb_changed && sra_mode == SRA_MODE_EARLY_IPA)
1096 gimple_purge_dead_eh_edges (bb);
1097 }
1098
1099 return ret;
1100 }
1101
1102 /* Helper of QSORT function. There are pointers to accesses in the array. An
1103 access is considered smaller than another if it has smaller offset or if the
1104 offsets are the same but is size is bigger. */
1105
1106 static int
1107 compare_access_positions (const void *a, const void *b)
1108 {
1109 const access_p *fp1 = (const access_p *) a;
1110 const access_p *fp2 = (const access_p *) b;
1111 const access_p f1 = *fp1;
1112 const access_p f2 = *fp2;
1113
1114 if (f1->offset != f2->offset)
1115 return f1->offset < f2->offset ? -1 : 1;
1116
1117 if (f1->size == f2->size)
1118 {
1119 /* Put any non-aggregate type before any aggregate type. */
1120 if (!is_gimple_reg_type (f1->type)
1121 && is_gimple_reg_type (f2->type))
1122 return 1;
1123 else if (is_gimple_reg_type (f1->type)
1124 && !is_gimple_reg_type (f2->type))
1125 return -1;
1126 /* Put any complex or vector type before any other scalar type. */
1127 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1128 && TREE_CODE (f1->type) != VECTOR_TYPE
1129 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1130 || TREE_CODE (f2->type) == VECTOR_TYPE))
1131 return 1;
1132 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1133 || TREE_CODE (f1->type) == VECTOR_TYPE)
1134 && TREE_CODE (f2->type) != COMPLEX_TYPE
1135 && TREE_CODE (f2->type) != VECTOR_TYPE)
1136 return -1;
1137 /* Put the integral type with the bigger precision first. */
1138 else if (INTEGRAL_TYPE_P (f1->type)
1139 && INTEGRAL_TYPE_P (f2->type))
1140 return TYPE_PRECISION (f1->type) > TYPE_PRECISION (f2->type) ? -1 : 1;
1141 /* Put any integral type with non-full precision last. */
1142 else if (INTEGRAL_TYPE_P (f1->type)
1143 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1144 != TYPE_PRECISION (f1->type)))
1145 return 1;
1146 else if (INTEGRAL_TYPE_P (f2->type)
1147 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1148 != TYPE_PRECISION (f2->type)))
1149 return -1;
1150 /* Stabilize the sort. */
1151 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1152 }
1153
1154 /* We want the bigger accesses first, thus the opposite operator in the next
1155 line: */
1156 return f1->size > f2->size ? -1 : 1;
1157 }
1158
1159
1160 /* Append a name of the declaration to the name obstack. A helper function for
1161 make_fancy_name. */
1162
1163 static void
1164 make_fancy_decl_name (tree decl)
1165 {
1166 char buffer[32];
1167
1168 tree name = DECL_NAME (decl);
1169 if (name)
1170 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1171 IDENTIFIER_LENGTH (name));
1172 else
1173 {
1174 sprintf (buffer, "D%u", DECL_UID (decl));
1175 obstack_grow (&name_obstack, buffer, strlen (buffer));
1176 }
1177 }
1178
1179 /* Helper for make_fancy_name. */
1180
1181 static void
1182 make_fancy_name_1 (tree expr)
1183 {
1184 char buffer[32];
1185 tree index;
1186
1187 if (DECL_P (expr))
1188 {
1189 make_fancy_decl_name (expr);
1190 return;
1191 }
1192
1193 switch (TREE_CODE (expr))
1194 {
1195 case COMPONENT_REF:
1196 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1197 obstack_1grow (&name_obstack, '$');
1198 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1199 break;
1200
1201 case ARRAY_REF:
1202 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1203 obstack_1grow (&name_obstack, '$');
1204 /* Arrays with only one element may not have a constant as their
1205 index. */
1206 index = TREE_OPERAND (expr, 1);
1207 if (TREE_CODE (index) != INTEGER_CST)
1208 break;
1209 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1210 obstack_grow (&name_obstack, buffer, strlen (buffer));
1211
1212 break;
1213
1214 case BIT_FIELD_REF:
1215 case REALPART_EXPR:
1216 case IMAGPART_EXPR:
1217 gcc_unreachable (); /* we treat these as scalars. */
1218 break;
1219 default:
1220 break;
1221 }
1222 }
1223
1224 /* Create a human readable name for replacement variable of ACCESS. */
1225
1226 static char *
1227 make_fancy_name (tree expr)
1228 {
1229 make_fancy_name_1 (expr);
1230 obstack_1grow (&name_obstack, '\0');
1231 return XOBFINISH (&name_obstack, char *);
1232 }
1233
1234 /* Helper function for build_ref_for_offset. */
1235
1236 static bool
1237 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1238 tree exp_type)
1239 {
1240 while (1)
1241 {
1242 tree fld;
1243 tree tr_size, index, minidx;
1244 HOST_WIDE_INT el_size;
1245
1246 if (offset == 0 && exp_type
1247 && types_compatible_p (exp_type, type))
1248 return true;
1249
1250 switch (TREE_CODE (type))
1251 {
1252 case UNION_TYPE:
1253 case QUAL_UNION_TYPE:
1254 case RECORD_TYPE:
1255 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1256 {
1257 HOST_WIDE_INT pos, size;
1258 tree expr, *expr_ptr;
1259
1260 if (TREE_CODE (fld) != FIELD_DECL)
1261 continue;
1262
1263 pos = int_bit_position (fld);
1264 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1265 tr_size = DECL_SIZE (fld);
1266 if (!tr_size || !host_integerp (tr_size, 1))
1267 continue;
1268 size = tree_low_cst (tr_size, 1);
1269 if (pos > offset || (pos + size) <= offset)
1270 continue;
1271
1272 if (res)
1273 {
1274 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1275 NULL_TREE);
1276 expr_ptr = &expr;
1277 }
1278 else
1279 expr_ptr = NULL;
1280 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1281 offset - pos, exp_type))
1282 {
1283 if (res)
1284 *res = expr;
1285 return true;
1286 }
1287 }
1288 return false;
1289
1290 case ARRAY_TYPE:
1291 tr_size = TYPE_SIZE (TREE_TYPE (type));
1292 if (!tr_size || !host_integerp (tr_size, 1))
1293 return false;
1294 el_size = tree_low_cst (tr_size, 1);
1295
1296 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1297 if (TREE_CODE (minidx) != INTEGER_CST)
1298 return false;
1299 if (res)
1300 {
1301 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1302 if (!integer_zerop (minidx))
1303 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1304 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1305 NULL_TREE, NULL_TREE);
1306 }
1307 offset = offset % el_size;
1308 type = TREE_TYPE (type);
1309 break;
1310
1311 default:
1312 if (offset != 0)
1313 return false;
1314
1315 if (exp_type)
1316 return false;
1317 else
1318 return true;
1319 }
1320 }
1321 }
1322
1323 /* Construct an expression that would reference a part of aggregate *EXPR of
1324 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1325 function only determines whether it can build such a reference without
1326 actually doing it, otherwise, the tree it points to is unshared first and
1327 then used as a base for furhter sub-references.
1328
1329 FIXME: Eventually this should be replaced with
1330 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1331 minor rewrite of fold_stmt.
1332 */
1333
1334 bool
1335 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1336 tree exp_type, bool allow_ptr)
1337 {
1338 location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1339
1340 if (expr)
1341 *expr = unshare_expr (*expr);
1342
1343 if (allow_ptr && POINTER_TYPE_P (type))
1344 {
1345 type = TREE_TYPE (type);
1346 if (expr)
1347 *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1348 }
1349
1350 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1351 }
1352
1353 /* Return true iff TYPE is stdarg va_list type. */
1354
1355 static inline bool
1356 is_va_list_type (tree type)
1357 {
1358 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1359 }
1360
1361 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1362 those with type which is suitable for scalarization. */
1363
1364 static bool
1365 find_var_candidates (void)
1366 {
1367 tree var, type;
1368 referenced_var_iterator rvi;
1369 bool ret = false;
1370
1371 FOR_EACH_REFERENCED_VAR (var, rvi)
1372 {
1373 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1374 continue;
1375 type = TREE_TYPE (var);
1376
1377 if (!AGGREGATE_TYPE_P (type)
1378 || needs_to_live_in_memory (var)
1379 || TREE_THIS_VOLATILE (var)
1380 || !COMPLETE_TYPE_P (type)
1381 || !host_integerp (TYPE_SIZE (type), 1)
1382 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1383 || type_internals_preclude_sra_p (type)
1384 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1385 we also want to schedule it rather late. Thus we ignore it in
1386 the early pass. */
1387 || (sra_mode == SRA_MODE_EARLY_INTRA
1388 && is_va_list_type (type)))
1389 continue;
1390
1391 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1392
1393 if (dump_file && (dump_flags & TDF_DETAILS))
1394 {
1395 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1396 print_generic_expr (dump_file, var, 0);
1397 fprintf (dump_file, "\n");
1398 }
1399 ret = true;
1400 }
1401
1402 return ret;
1403 }
1404
1405 /* Sort all accesses for the given variable, check for partial overlaps and
1406 return NULL if there are any. If there are none, pick a representative for
1407 each combination of offset and size and create a linked list out of them.
1408 Return the pointer to the first representative and make sure it is the first
1409 one in the vector of accesses. */
1410
1411 static struct access *
1412 sort_and_splice_var_accesses (tree var)
1413 {
1414 int i, j, access_count;
1415 struct access *res, **prev_acc_ptr = &res;
1416 VEC (access_p, heap) *access_vec;
1417 bool first = true;
1418 HOST_WIDE_INT low = -1, high = 0;
1419
1420 access_vec = get_base_access_vector (var);
1421 if (!access_vec)
1422 return NULL;
1423 access_count = VEC_length (access_p, access_vec);
1424
1425 /* Sort by <OFFSET, SIZE>. */
1426 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1427 compare_access_positions);
1428
1429 i = 0;
1430 while (i < access_count)
1431 {
1432 struct access *access = VEC_index (access_p, access_vec, i);
1433 bool grp_write = access->write;
1434 bool grp_read = !access->write;
1435 bool multiple_reads = false;
1436 bool grp_partial_lhs = access->grp_partial_lhs;
1437 bool grp_different_types = false;
1438 bool first_scalar = is_gimple_reg_type (access->type);
1439 bool unscalarizable_region = access->grp_unscalarizable_region;
1440
1441 if (first || access->offset >= high)
1442 {
1443 first = false;
1444 low = access->offset;
1445 high = access->offset + access->size;
1446 }
1447 else if (access->offset > low && access->offset + access->size > high)
1448 return NULL;
1449 else
1450 gcc_assert (access->offset >= low
1451 && access->offset + access->size <= high);
1452
1453 j = i + 1;
1454 while (j < access_count)
1455 {
1456 struct access *ac2 = VEC_index (access_p, access_vec, j);
1457 if (ac2->offset != access->offset || ac2->size != access->size)
1458 break;
1459 if (ac2->write)
1460 grp_write = true;
1461 else
1462 {
1463 if (grp_read)
1464 multiple_reads = true;
1465 else
1466 grp_read = true;
1467 }
1468 grp_partial_lhs |= ac2->grp_partial_lhs;
1469 grp_different_types |= !types_compatible_p (access->type, ac2->type);
1470 unscalarizable_region |= ac2->grp_unscalarizable_region;
1471 relink_to_new_repr (access, ac2);
1472
1473 /* If there are both aggregate-type and scalar-type accesses with
1474 this combination of size and offset, the comparison function
1475 should have put the scalars first. */
1476 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1477 ac2->group_representative = access;
1478 j++;
1479 }
1480
1481 i = j;
1482
1483 access->group_representative = access;
1484 access->grp_write = grp_write;
1485 access->grp_read = grp_read;
1486 access->grp_hint = multiple_reads;
1487 access->grp_partial_lhs = grp_partial_lhs;
1488 access->grp_different_types = grp_different_types;
1489 access->grp_unscalarizable_region = unscalarizable_region;
1490 if (access->first_link)
1491 add_access_to_work_queue (access);
1492
1493 *prev_acc_ptr = access;
1494 prev_acc_ptr = &access->next_grp;
1495 }
1496
1497 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1498 return res;
1499 }
1500
1501 /* Create a variable for the given ACCESS which determines the type, name and a
1502 few other properties. Return the variable declaration and store it also to
1503 ACCESS->replacement. */
1504
1505 static tree
1506 create_access_replacement (struct access *access)
1507 {
1508 tree repl;
1509
1510 repl = create_tmp_var (access->type, "SR");
1511 get_var_ann (repl);
1512 add_referenced_var (repl);
1513 mark_sym_for_renaming (repl);
1514
1515 if (!access->grp_partial_lhs
1516 && (TREE_CODE (access->type) == COMPLEX_TYPE
1517 || TREE_CODE (access->type) == VECTOR_TYPE))
1518 DECL_GIMPLE_REG_P (repl) = 1;
1519
1520 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1521 DECL_ARTIFICIAL (repl) = 1;
1522
1523 if (DECL_NAME (access->base)
1524 && !DECL_IGNORED_P (access->base)
1525 && !DECL_ARTIFICIAL (access->base))
1526 {
1527 char *pretty_name = make_fancy_name (access->expr);
1528
1529 DECL_NAME (repl) = get_identifier (pretty_name);
1530 obstack_free (&name_obstack, pretty_name);
1531
1532 SET_DECL_DEBUG_EXPR (repl, access->expr);
1533 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1534 DECL_IGNORED_P (repl) = 0;
1535 }
1536
1537 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1538 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1539
1540 if (dump_file)
1541 {
1542 fprintf (dump_file, "Created a replacement for ");
1543 print_generic_expr (dump_file, access->base, 0);
1544 fprintf (dump_file, " offset: %u, size: %u: ",
1545 (unsigned) access->offset, (unsigned) access->size);
1546 print_generic_expr (dump_file, repl, 0);
1547 fprintf (dump_file, "\n");
1548 }
1549 sra_stats.replacements++;
1550
1551 return repl;
1552 }
1553
1554 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1555
1556 static inline tree
1557 get_access_replacement (struct access *access)
1558 {
1559 gcc_assert (access->grp_to_be_replaced);
1560
1561 if (!access->replacement_decl)
1562 access->replacement_decl = create_access_replacement (access);
1563 return access->replacement_decl;
1564 }
1565
1566 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1567 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1568 to it is not "within" the root. */
1569
1570 static void
1571 build_access_subtree (struct access **access)
1572 {
1573 struct access *root = *access, *last_child = NULL;
1574 HOST_WIDE_INT limit = root->offset + root->size;
1575
1576 *access = (*access)->next_grp;
1577 while (*access && (*access)->offset + (*access)->size <= limit)
1578 {
1579 if (!last_child)
1580 root->first_child = *access;
1581 else
1582 last_child->next_sibling = *access;
1583 last_child = *access;
1584
1585 build_access_subtree (access);
1586 }
1587 }
1588
1589 /* Build a tree of access representatives, ACCESS is the pointer to the first
1590 one, others are linked in a list by the next_grp field. Decide about scalar
1591 replacements on the way, return true iff any are to be created. */
1592
1593 static void
1594 build_access_trees (struct access *access)
1595 {
1596 while (access)
1597 {
1598 struct access *root = access;
1599
1600 build_access_subtree (&access);
1601 root->next_grp = access;
1602 }
1603 }
1604
1605 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1606 array. */
1607
1608 static bool
1609 expr_with_var_bounded_array_refs_p (tree expr)
1610 {
1611 while (handled_component_p (expr))
1612 {
1613 if (TREE_CODE (expr) == ARRAY_REF
1614 && !host_integerp (array_ref_low_bound (expr), 0))
1615 return true;
1616 expr = TREE_OPERAND (expr, 0);
1617 }
1618 return false;
1619 }
1620
1621 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1622 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set
1623 all sorts of access flags appropriately along the way, notably always ser
1624 grp_read when MARK_READ is true and grp_write when MARK_WRITE is true. */
1625
1626 static bool
1627 analyze_access_subtree (struct access *root, bool allow_replacements,
1628 bool mark_read, bool mark_write)
1629 {
1630 struct access *child;
1631 HOST_WIDE_INT limit = root->offset + root->size;
1632 HOST_WIDE_INT covered_to = root->offset;
1633 bool scalar = is_gimple_reg_type (root->type);
1634 bool hole = false, sth_created = false;
1635 bool direct_read = root->grp_read;
1636
1637 if (mark_read)
1638 root->grp_read = true;
1639 else if (root->grp_read)
1640 mark_read = true;
1641
1642 if (mark_write)
1643 root->grp_write = true;
1644 else if (root->grp_write)
1645 mark_write = true;
1646
1647 if (root->grp_unscalarizable_region)
1648 allow_replacements = false;
1649
1650 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1651 allow_replacements = false;
1652
1653 for (child = root->first_child; child; child = child->next_sibling)
1654 {
1655 if (!hole && child->offset < covered_to)
1656 hole = true;
1657 else
1658 covered_to += child->size;
1659
1660 sth_created |= analyze_access_subtree (child, allow_replacements,
1661 mark_read, mark_write);
1662
1663 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1664 hole |= !child->grp_covered;
1665 }
1666
1667 if (allow_replacements && scalar && !root->first_child
1668 && (root->grp_hint
1669 || (direct_read && root->grp_write)))
1670 {
1671 if (dump_file && (dump_flags & TDF_DETAILS))
1672 {
1673 fprintf (dump_file, "Marking ");
1674 print_generic_expr (dump_file, root->base, 0);
1675 fprintf (dump_file, " offset: %u, size: %u: ",
1676 (unsigned) root->offset, (unsigned) root->size);
1677 fprintf (dump_file, " to be replaced.\n");
1678 }
1679
1680 root->grp_to_be_replaced = 1;
1681 sth_created = true;
1682 hole = false;
1683 }
1684 else if (covered_to < limit)
1685 hole = true;
1686
1687 if (sth_created && !hole)
1688 {
1689 root->grp_covered = 1;
1690 return true;
1691 }
1692 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1693 root->grp_unscalarized_data = 1; /* not covered and written to */
1694 if (sth_created)
1695 return true;
1696 return false;
1697 }
1698
1699 /* Analyze all access trees linked by next_grp by the means of
1700 analyze_access_subtree. */
1701 static bool
1702 analyze_access_trees (struct access *access)
1703 {
1704 bool ret = false;
1705
1706 while (access)
1707 {
1708 if (analyze_access_subtree (access, true, false, false))
1709 ret = true;
1710 access = access->next_grp;
1711 }
1712
1713 return ret;
1714 }
1715
1716 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1717 SIZE would conflict with an already existing one. If exactly such a child
1718 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1719
1720 static bool
1721 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1722 HOST_WIDE_INT size, struct access **exact_match)
1723 {
1724 struct access *child;
1725
1726 for (child = lacc->first_child; child; child = child->next_sibling)
1727 {
1728 if (child->offset == norm_offset && child->size == size)
1729 {
1730 *exact_match = child;
1731 return true;
1732 }
1733
1734 if (child->offset < norm_offset + size
1735 && child->offset + child->size > norm_offset)
1736 return true;
1737 }
1738
1739 return false;
1740 }
1741
1742 /* Create a new child access of PARENT, with all properties just like MODEL
1743 except for its offset and with its grp_write false and grp_read true.
1744 Return the new access or NULL if it cannot be created. Note that this access
1745 is created long after all splicing and sorting, it's not located in any
1746 access vector and is automatically a representative of its group. */
1747
1748 static struct access *
1749 create_artificial_child_access (struct access *parent, struct access *model,
1750 HOST_WIDE_INT new_offset)
1751 {
1752 struct access *access;
1753 struct access **child;
1754 tree expr = parent->base;;
1755
1756 gcc_assert (!model->grp_unscalarizable_region);
1757
1758 if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1759 model->type, false))
1760 return NULL;
1761
1762 access = (struct access *) pool_alloc (access_pool);
1763 memset (access, 0, sizeof (struct access));
1764 access->base = parent->base;
1765 access->expr = expr;
1766 access->offset = new_offset;
1767 access->size = model->size;
1768 access->type = model->type;
1769 access->grp_write = true;
1770 access->grp_read = false;
1771
1772 child = &parent->first_child;
1773 while (*child && (*child)->offset < new_offset)
1774 child = &(*child)->next_sibling;
1775
1776 access->next_sibling = *child;
1777 *child = access;
1778
1779 return access;
1780 }
1781
1782
1783 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1784 true if any new subaccess was created. Additionally, if RACC is a scalar
1785 access but LACC is not, change the type of the latter, if possible. */
1786
1787 static bool
1788 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1789 {
1790 struct access *rchild;
1791 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1792 bool ret = false;
1793
1794 if (is_gimple_reg_type (lacc->type)
1795 || lacc->grp_unscalarizable_region
1796 || racc->grp_unscalarizable_region)
1797 return false;
1798
1799 if (!lacc->first_child && !racc->first_child
1800 && is_gimple_reg_type (racc->type))
1801 {
1802 tree t = lacc->base;
1803
1804 if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1805 false))
1806 {
1807 lacc->expr = t;
1808 lacc->type = racc->type;
1809 }
1810 return false;
1811 }
1812
1813 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1814 {
1815 struct access *new_acc = NULL;
1816 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1817
1818 if (rchild->grp_unscalarizable_region)
1819 continue;
1820
1821 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1822 &new_acc))
1823 {
1824 if (new_acc)
1825 {
1826 rchild->grp_hint = 1;
1827 new_acc->grp_hint |= new_acc->grp_read;
1828 if (rchild->first_child)
1829 ret |= propagate_subaccesses_across_link (new_acc, rchild);
1830 }
1831 continue;
1832 }
1833
1834 /* If a (part of) a union field is on the RHS of an assignment, it can
1835 have sub-accesses which do not make sense on the LHS (PR 40351).
1836 Check that this is not the case. */
1837 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
1838 rchild->type, false))
1839 continue;
1840
1841 rchild->grp_hint = 1;
1842 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
1843 if (new_acc)
1844 {
1845 ret = true;
1846 if (racc->first_child)
1847 propagate_subaccesses_across_link (new_acc, rchild);
1848 }
1849 }
1850
1851 return ret;
1852 }
1853
1854 /* Propagate all subaccesses across assignment links. */
1855
1856 static void
1857 propagate_all_subaccesses (void)
1858 {
1859 while (work_queue_head)
1860 {
1861 struct access *racc = pop_access_from_work_queue ();
1862 struct assign_link *link;
1863
1864 gcc_assert (racc->first_link);
1865
1866 for (link = racc->first_link; link; link = link->next)
1867 {
1868 struct access *lacc = link->lacc;
1869
1870 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
1871 continue;
1872 lacc = lacc->group_representative;
1873 if (propagate_subaccesses_across_link (lacc, racc)
1874 && lacc->first_link)
1875 add_access_to_work_queue (lacc);
1876 }
1877 }
1878 }
1879
1880 /* Go through all accesses collected throughout the (intraprocedural) analysis
1881 stage, exclude overlapping ones, identify representatives and build trees
1882 out of them, making decisions about scalarization on the way. Return true
1883 iff there are any to-be-scalarized variables after this stage. */
1884
1885 static bool
1886 analyze_all_variable_accesses (void)
1887 {
1888 int res = 0;
1889 bitmap tmp = BITMAP_ALLOC (NULL);
1890 bitmap_iterator bi;
1891 unsigned i;
1892
1893 bitmap_copy (tmp, candidate_bitmap);
1894 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
1895 {
1896 tree var = referenced_var (i);
1897 struct access *access;
1898
1899 access = sort_and_splice_var_accesses (var);
1900 if (access)
1901 build_access_trees (access);
1902 else
1903 disqualify_candidate (var,
1904 "No or inhibitingly overlapping accesses.");
1905 }
1906
1907 propagate_all_subaccesses ();
1908
1909 bitmap_copy (tmp, candidate_bitmap);
1910 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
1911 {
1912 tree var = referenced_var (i);
1913 struct access *access = get_first_repr_for_decl (var);
1914
1915 if (analyze_access_trees (access))
1916 {
1917 res++;
1918 if (dump_file && (dump_flags & TDF_DETAILS))
1919 {
1920 fprintf (dump_file, "\nAccess trees for ");
1921 print_generic_expr (dump_file, var, 0);
1922 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
1923 dump_access_tree (dump_file, access);
1924 fprintf (dump_file, "\n");
1925 }
1926 }
1927 else
1928 disqualify_candidate (var, "No scalar replacements to be created.");
1929 }
1930
1931 BITMAP_FREE (tmp);
1932
1933 if (res)
1934 {
1935 statistics_counter_event (cfun, "Scalarized aggregates", res);
1936 return true;
1937 }
1938 else
1939 return false;
1940 }
1941
1942 /* Return true iff a reference statement into aggregate AGG can be built for
1943 every single to-be-replaced accesses that is a child of ACCESS, its sibling
1944 or a child of its sibling. TOP_OFFSET is the offset from the processed
1945 access subtree that has to be subtracted from offset of each access. */
1946
1947 static bool
1948 ref_expr_for_all_replacements_p (struct access *access, tree agg,
1949 HOST_WIDE_INT top_offset)
1950 {
1951 do
1952 {
1953 if (access->grp_to_be_replaced
1954 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
1955 access->offset - top_offset,
1956 access->type, false))
1957 return false;
1958
1959 if (access->first_child
1960 && !ref_expr_for_all_replacements_p (access->first_child, agg,
1961 top_offset))
1962 return false;
1963
1964 access = access->next_sibling;
1965 }
1966 while (access);
1967
1968 return true;
1969 }
1970
1971 /* Generate statements copying scalar replacements of accesses within a subtree
1972 into or out of AGG. ACCESS is the first child of the root of the subtree to
1973 be processed. AGG is an aggregate type expression (can be a declaration but
1974 does not have to be, it can for example also be an indirect_ref).
1975 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
1976 from offsets of individual accesses to get corresponding offsets for AGG.
1977 If CHUNK_SIZE is non-null, copy only replacements in the interval
1978 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
1979 statement iterator used to place the new statements. WRITE should be true
1980 when the statements should write from AGG to the replacement and false if
1981 vice versa. if INSERT_AFTER is true, new statements will be added after the
1982 current statement in GSI, they will be added before the statement
1983 otherwise. */
1984
1985 static void
1986 generate_subtree_copies (struct access *access, tree agg,
1987 HOST_WIDE_INT top_offset,
1988 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
1989 gimple_stmt_iterator *gsi, bool write,
1990 bool insert_after)
1991 {
1992 do
1993 {
1994 tree expr = agg;
1995
1996 if (chunk_size && access->offset >= start_offset + chunk_size)
1997 return;
1998
1999 if (access->grp_to_be_replaced
2000 && (chunk_size == 0
2001 || access->offset + access->size > start_offset))
2002 {
2003 tree repl = get_access_replacement (access);
2004 bool ref_found;
2005 gimple stmt;
2006
2007 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2008 access->offset - top_offset,
2009 access->type, false);
2010 gcc_assert (ref_found);
2011
2012 if (write)
2013 {
2014 if (access->grp_partial_lhs)
2015 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2016 !insert_after,
2017 insert_after ? GSI_NEW_STMT
2018 : GSI_SAME_STMT);
2019 stmt = gimple_build_assign (repl, expr);
2020 }
2021 else
2022 {
2023 TREE_NO_WARNING (repl) = 1;
2024 if (access->grp_partial_lhs)
2025 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2026 !insert_after,
2027 insert_after ? GSI_NEW_STMT
2028 : GSI_SAME_STMT);
2029 stmt = gimple_build_assign (expr, repl);
2030 }
2031
2032 if (insert_after)
2033 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2034 else
2035 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2036 update_stmt (stmt);
2037 sra_stats.subtree_copies++;
2038 }
2039
2040 if (access->first_child)
2041 generate_subtree_copies (access->first_child, agg, top_offset,
2042 start_offset, chunk_size, gsi,
2043 write, insert_after);
2044
2045 access = access->next_sibling;
2046 }
2047 while (access);
2048 }
2049
2050 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2051 the root of the subtree to be processed. GSI is the statement iterator used
2052 for inserting statements which are added after the current statement if
2053 INSERT_AFTER is true or before it otherwise. */
2054
2055 static void
2056 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2057 bool insert_after)
2058
2059 {
2060 struct access *child;
2061
2062 if (access->grp_to_be_replaced)
2063 {
2064 gimple stmt;
2065
2066 stmt = gimple_build_assign (get_access_replacement (access),
2067 fold_convert (access->type,
2068 integer_zero_node));
2069 if (insert_after)
2070 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2071 else
2072 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2073 update_stmt (stmt);
2074 }
2075
2076 for (child = access->first_child; child; child = child->next_sibling)
2077 init_subtree_with_zero (child, gsi, insert_after);
2078 }
2079
2080 /* Search for an access representative for the given expression EXPR and
2081 return it or NULL if it cannot be found. */
2082
2083 static struct access *
2084 get_access_for_expr (tree expr)
2085 {
2086 HOST_WIDE_INT offset, size, max_size;
2087 tree base;
2088
2089 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2090 a different size than the size of its argument and we need the latter
2091 one. */
2092 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2093 expr = TREE_OPERAND (expr, 0);
2094
2095 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2096 if (max_size == -1 || !DECL_P (base))
2097 return NULL;
2098
2099 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2100 return NULL;
2101
2102 return get_var_base_offset_size_access (base, offset, max_size);
2103 }
2104
2105 /* Callback for scan_function. Replace the expression EXPR with a scalar
2106 replacement if there is one and generate other statements to do type
2107 conversion or subtree copying if necessary. GSI is used to place newly
2108 created statements, WRITE is true if the expression is being written to (it
2109 is on a LHS of a statement or output in an assembly statement). */
2110
2111 static bool
2112 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2113 void *data ATTRIBUTE_UNUSED)
2114 {
2115 struct access *access;
2116 tree type, bfr;
2117
2118 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2119 {
2120 bfr = *expr;
2121 expr = &TREE_OPERAND (*expr, 0);
2122 }
2123 else
2124 bfr = NULL_TREE;
2125
2126 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2127 expr = &TREE_OPERAND (*expr, 0);
2128 access = get_access_for_expr (*expr);
2129 if (!access)
2130 return false;
2131 type = TREE_TYPE (*expr);
2132
2133 if (access->grp_to_be_replaced)
2134 {
2135 tree repl = get_access_replacement (access);
2136 /* If we replace a non-register typed access simply use the original
2137 access expression to extract the scalar component afterwards.
2138 This happens if scalarizing a function return value or parameter
2139 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2140 gcc.c-torture/compile/20011217-1.c.
2141
2142 We also want to use this when accessing a complex or vector which can
2143 be accessed as a different type too, potentially creating a need for
2144 type conversion (see PR42196). */
2145 if (!is_gimple_reg_type (type)
2146 || (access->grp_different_types
2147 && (TREE_CODE (type) == COMPLEX_TYPE
2148 || TREE_CODE (type) == VECTOR_TYPE)))
2149 {
2150 tree ref = access->base;
2151 bool ok;
2152
2153 ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2154 access->offset, access->type, false);
2155 gcc_assert (ok);
2156
2157 if (write)
2158 {
2159 gimple stmt;
2160
2161 if (access->grp_partial_lhs)
2162 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2163 false, GSI_NEW_STMT);
2164 stmt = gimple_build_assign (repl, ref);
2165 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2166 }
2167 else
2168 {
2169 gimple stmt;
2170
2171 if (access->grp_partial_lhs)
2172 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2173 true, GSI_SAME_STMT);
2174 stmt = gimple_build_assign (ref, repl);
2175 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2176 }
2177 }
2178 else
2179 {
2180 gcc_assert (useless_type_conversion_p (type, access->type));
2181 *expr = repl;
2182 }
2183 sra_stats.exprs++;
2184 }
2185
2186 if (access->first_child)
2187 {
2188 HOST_WIDE_INT start_offset, chunk_size;
2189 if (bfr
2190 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2191 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2192 {
2193 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2194 start_offset = access->offset
2195 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2196 }
2197 else
2198 start_offset = chunk_size = 0;
2199
2200 generate_subtree_copies (access->first_child, access->base, 0,
2201 start_offset, chunk_size, gsi, write, write);
2202 }
2203 return true;
2204 }
2205
2206 /* Where scalar replacements of the RHS have been written to when a replacement
2207 of a LHS of an assigments cannot be direclty loaded from a replacement of
2208 the RHS. */
2209 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2210 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2211 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2212
2213 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2214 base aggregate if there are unscalarized data or directly to LHS
2215 otherwise. */
2216
2217 static enum unscalarized_data_handling
2218 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2219 gimple_stmt_iterator *gsi)
2220 {
2221 if (top_racc->grp_unscalarized_data)
2222 {
2223 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2224 gsi, false, false);
2225 return SRA_UDH_RIGHT;
2226 }
2227 else
2228 {
2229 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2230 0, 0, gsi, false, false);
2231 return SRA_UDH_LEFT;
2232 }
2233 }
2234
2235
2236 /* Try to generate statements to load all sub-replacements in an access
2237 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2238 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2239 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2240 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2241 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2242 the rhs top aggregate has already been refreshed by contents of its scalar
2243 reductions and is set to true if this function has to do it. */
2244
2245 static void
2246 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2247 HOST_WIDE_INT left_offset,
2248 HOST_WIDE_INT right_offset,
2249 gimple_stmt_iterator *old_gsi,
2250 gimple_stmt_iterator *new_gsi,
2251 enum unscalarized_data_handling *refreshed,
2252 tree lhs)
2253 {
2254 location_t loc = EXPR_LOCATION (lacc->expr);
2255 do
2256 {
2257 if (lacc->grp_to_be_replaced)
2258 {
2259 struct access *racc;
2260 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2261 gimple stmt;
2262 tree rhs;
2263
2264 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2265 if (racc && racc->grp_to_be_replaced)
2266 {
2267 rhs = get_access_replacement (racc);
2268 if (!useless_type_conversion_p (lacc->type, racc->type))
2269 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2270 }
2271 else
2272 {
2273 /* No suitable access on the right hand side, need to load from
2274 the aggregate. See if we have to update it first... */
2275 if (*refreshed == SRA_UDH_NONE)
2276 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2277 lhs, old_gsi);
2278
2279 if (*refreshed == SRA_UDH_LEFT)
2280 {
2281 bool repl_found;
2282
2283 rhs = lacc->base;
2284 repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2285 lacc->offset, lacc->type,
2286 false);
2287 gcc_assert (repl_found);
2288 }
2289 else
2290 {
2291 bool repl_found;
2292
2293 rhs = top_racc->base;
2294 repl_found = build_ref_for_offset (&rhs,
2295 TREE_TYPE (top_racc->base),
2296 offset, lacc->type, false);
2297 gcc_assert (repl_found);
2298 }
2299 }
2300
2301 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2302 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2303 update_stmt (stmt);
2304 sra_stats.subreplacements++;
2305 }
2306 else if (*refreshed == SRA_UDH_NONE
2307 && lacc->grp_read && !lacc->grp_covered)
2308 *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2309 old_gsi);
2310
2311 if (lacc->first_child)
2312 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2313 left_offset, right_offset,
2314 old_gsi, new_gsi, refreshed, lhs);
2315 lacc = lacc->next_sibling;
2316 }
2317 while (lacc);
2318 }
2319
2320 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2321 to the assignment and GSI is the statement iterator pointing at it. Returns
2322 the same values as sra_modify_assign. */
2323
2324 static enum scan_assign_result
2325 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2326 {
2327 tree lhs = gimple_assign_lhs (*stmt);
2328 struct access *acc;
2329
2330 acc = get_access_for_expr (lhs);
2331 if (!acc)
2332 return SRA_SA_NONE;
2333
2334 if (VEC_length (constructor_elt,
2335 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2336 {
2337 /* I have never seen this code path trigger but if it can happen the
2338 following should handle it gracefully. */
2339 if (access_has_children_p (acc))
2340 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2341 true, true);
2342 return SRA_SA_PROCESSED;
2343 }
2344
2345 if (acc->grp_covered)
2346 {
2347 init_subtree_with_zero (acc, gsi, false);
2348 unlink_stmt_vdef (*stmt);
2349 gsi_remove (gsi, true);
2350 return SRA_SA_REMOVED;
2351 }
2352 else
2353 {
2354 init_subtree_with_zero (acc, gsi, true);
2355 return SRA_SA_PROCESSED;
2356 }
2357 }
2358
2359
2360 /* Callback of scan_function to process assign statements. It examines both
2361 sides of the statement, replaces them with a scalare replacement if there is
2362 one and generating copying of replacements if scalarized aggregates have been
2363 used in the assignment. STMT is a pointer to the assign statement, GSI is
2364 used to hold generated statements for type conversions and subtree
2365 copying. */
2366
2367 static enum scan_assign_result
2368 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2369 void *data ATTRIBUTE_UNUSED)
2370 {
2371 struct access *lacc, *racc;
2372 tree lhs, rhs;
2373 bool modify_this_stmt = false;
2374 bool force_gimple_rhs = false;
2375 location_t loc = gimple_location (*stmt);
2376
2377 if (!gimple_assign_single_p (*stmt))
2378 return SRA_SA_NONE;
2379 lhs = gimple_assign_lhs (*stmt);
2380 rhs = gimple_assign_rhs1 (*stmt);
2381
2382 if (TREE_CODE (rhs) == CONSTRUCTOR)
2383 return sra_modify_constructor_assign (stmt, gsi);
2384
2385 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2386 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2387 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2388 {
2389 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2390 gsi, false, data);
2391 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2392 gsi, true, data);
2393 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2394 }
2395
2396 lacc = get_access_for_expr (lhs);
2397 racc = get_access_for_expr (rhs);
2398 if (!lacc && !racc)
2399 return SRA_SA_NONE;
2400
2401 if (lacc && lacc->grp_to_be_replaced)
2402 {
2403 lhs = get_access_replacement (lacc);
2404 gimple_assign_set_lhs (*stmt, lhs);
2405 modify_this_stmt = true;
2406 if (lacc->grp_partial_lhs)
2407 force_gimple_rhs = true;
2408 sra_stats.exprs++;
2409 }
2410
2411 if (racc && racc->grp_to_be_replaced)
2412 {
2413 rhs = get_access_replacement (racc);
2414 modify_this_stmt = true;
2415 if (racc->grp_partial_lhs)
2416 force_gimple_rhs = true;
2417 sra_stats.exprs++;
2418 }
2419
2420 if (modify_this_stmt)
2421 {
2422 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2423 {
2424 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2425 ??? This should move to fold_stmt which we simply should
2426 call after building a VIEW_CONVERT_EXPR here. */
2427 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2428 && !access_has_children_p (lacc))
2429 {
2430 tree expr = lhs;
2431 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2432 TREE_TYPE (rhs), false))
2433 {
2434 lhs = expr;
2435 gimple_assign_set_lhs (*stmt, expr);
2436 }
2437 }
2438 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2439 && !access_has_children_p (racc))
2440 {
2441 tree expr = rhs;
2442 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2443 TREE_TYPE (lhs), false))
2444 rhs = expr;
2445 }
2446 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2447 {
2448 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2449 if (is_gimple_reg_type (TREE_TYPE (lhs))
2450 && TREE_CODE (lhs) != SSA_NAME)
2451 force_gimple_rhs = true;
2452 }
2453 }
2454
2455 if (force_gimple_rhs)
2456 rhs = force_gimple_operand_gsi (gsi, rhs, true, NULL_TREE,
2457 true, GSI_SAME_STMT);
2458 if (gimple_assign_rhs1 (*stmt) != rhs)
2459 {
2460 gimple_assign_set_rhs_from_tree (gsi, rhs);
2461 gcc_assert (*stmt == gsi_stmt (*gsi));
2462 }
2463 }
2464
2465 /* From this point on, the function deals with assignments in between
2466 aggregates when at least one has scalar reductions of some of its
2467 components. There are three possible scenarios: Both the LHS and RHS have
2468 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2469
2470 In the first case, we would like to load the LHS components from RHS
2471 components whenever possible. If that is not possible, we would like to
2472 read it directly from the RHS (after updating it by storing in it its own
2473 components). If there are some necessary unscalarized data in the LHS,
2474 those will be loaded by the original assignment too. If neither of these
2475 cases happen, the original statement can be removed. Most of this is done
2476 by load_assign_lhs_subreplacements.
2477
2478 In the second case, we would like to store all RHS scalarized components
2479 directly into LHS and if they cover the aggregate completely, remove the
2480 statement too. In the third case, we want the LHS components to be loaded
2481 directly from the RHS (DSE will remove the original statement if it
2482 becomes redundant).
2483
2484 This is a bit complex but manageable when types match and when unions do
2485 not cause confusion in a way that we cannot really load a component of LHS
2486 from the RHS or vice versa (the access representing this level can have
2487 subaccesses that are accessible only through a different union field at a
2488 higher level - different from the one used in the examined expression).
2489 Unions are fun.
2490
2491 Therefore, I specially handle a fourth case, happening when there is a
2492 specific type cast or it is impossible to locate a scalarized subaccess on
2493 the other side of the expression. If that happens, I simply "refresh" the
2494 RHS by storing in it is scalarized components leave the original statement
2495 there to do the copying and then load the scalar replacements of the LHS.
2496 This is what the first branch does. */
2497
2498 if (contains_view_convert_expr_p (rhs) || contains_view_convert_expr_p (lhs)
2499 || (access_has_children_p (racc)
2500 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2501 || (access_has_children_p (lacc)
2502 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2503 {
2504 if (access_has_children_p (racc))
2505 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2506 gsi, false, false);
2507 if (access_has_children_p (lacc))
2508 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2509 gsi, true, true);
2510 sra_stats.separate_lhs_rhs_handling++;
2511 }
2512 else
2513 {
2514 if (access_has_children_p (lacc) && access_has_children_p (racc))
2515 {
2516 gimple_stmt_iterator orig_gsi = *gsi;
2517 enum unscalarized_data_handling refreshed;
2518
2519 if (lacc->grp_read && !lacc->grp_covered)
2520 refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2521 else
2522 refreshed = SRA_UDH_NONE;
2523
2524 load_assign_lhs_subreplacements (lacc->first_child, racc,
2525 lacc->offset, racc->offset,
2526 &orig_gsi, gsi, &refreshed, lhs);
2527 if (refreshed != SRA_UDH_RIGHT)
2528 {
2529 if (*stmt == gsi_stmt (*gsi))
2530 gsi_next (gsi);
2531
2532 unlink_stmt_vdef (*stmt);
2533 gsi_remove (&orig_gsi, true);
2534 sra_stats.deleted++;
2535 return SRA_SA_REMOVED;
2536 }
2537 }
2538 else
2539 {
2540 if (access_has_children_p (racc))
2541 {
2542 if (!racc->grp_unscalarized_data)
2543 {
2544 generate_subtree_copies (racc->first_child, lhs,
2545 racc->offset, 0, 0, gsi,
2546 false, false);
2547 gcc_assert (*stmt == gsi_stmt (*gsi));
2548 unlink_stmt_vdef (*stmt);
2549 gsi_remove (gsi, true);
2550 sra_stats.deleted++;
2551 return SRA_SA_REMOVED;
2552 }
2553 else
2554 generate_subtree_copies (racc->first_child, lhs,
2555 racc->offset, 0, 0, gsi, false, true);
2556 }
2557 else if (access_has_children_p (lacc))
2558 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2559 0, 0, gsi, true, true);
2560 }
2561 }
2562 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2563 }
2564
2565 /* Generate statements initializing scalar replacements of parts of function
2566 parameters. */
2567
2568 static void
2569 initialize_parameter_reductions (void)
2570 {
2571 gimple_stmt_iterator gsi;
2572 gimple_seq seq = NULL;
2573 tree parm;
2574
2575 for (parm = DECL_ARGUMENTS (current_function_decl);
2576 parm;
2577 parm = TREE_CHAIN (parm))
2578 {
2579 VEC (access_p, heap) *access_vec;
2580 struct access *access;
2581
2582 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2583 continue;
2584 access_vec = get_base_access_vector (parm);
2585 if (!access_vec)
2586 continue;
2587
2588 if (!seq)
2589 {
2590 seq = gimple_seq_alloc ();
2591 gsi = gsi_start (seq);
2592 }
2593
2594 for (access = VEC_index (access_p, access_vec, 0);
2595 access;
2596 access = access->next_grp)
2597 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2598 }
2599
2600 if (seq)
2601 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2602 }
2603
2604 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2605 it reveals there are components of some aggregates to be scalarized, it runs
2606 the required transformations. */
2607 static unsigned int
2608 perform_intra_sra (void)
2609 {
2610 int ret = 0;
2611 sra_initialize ();
2612
2613 if (!find_var_candidates ())
2614 goto out;
2615
2616 if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2617 true, NULL))
2618 goto out;
2619
2620 if (!analyze_all_variable_accesses ())
2621 goto out;
2622
2623 scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2624 initialize_parameter_reductions ();
2625
2626 statistics_counter_event (cfun, "Scalar replacements created",
2627 sra_stats.replacements);
2628 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2629 statistics_counter_event (cfun, "Subtree copy stmts",
2630 sra_stats.subtree_copies);
2631 statistics_counter_event (cfun, "Subreplacement stmts",
2632 sra_stats.subreplacements);
2633 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2634 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2635 sra_stats.separate_lhs_rhs_handling);
2636
2637 ret = TODO_update_ssa;
2638
2639 out:
2640 sra_deinitialize ();
2641 return ret;
2642 }
2643
2644 /* Perform early intraprocedural SRA. */
2645 static unsigned int
2646 early_intra_sra (void)
2647 {
2648 sra_mode = SRA_MODE_EARLY_INTRA;
2649 return perform_intra_sra ();
2650 }
2651
2652 /* Perform "late" intraprocedural SRA. */
2653 static unsigned int
2654 late_intra_sra (void)
2655 {
2656 sra_mode = SRA_MODE_INTRA;
2657 return perform_intra_sra ();
2658 }
2659
2660
2661 static bool
2662 gate_intra_sra (void)
2663 {
2664 return flag_tree_sra != 0;
2665 }
2666
2667
2668 struct gimple_opt_pass pass_sra_early =
2669 {
2670 {
2671 GIMPLE_PASS,
2672 "esra", /* name */
2673 gate_intra_sra, /* gate */
2674 early_intra_sra, /* execute */
2675 NULL, /* sub */
2676 NULL, /* next */
2677 0, /* static_pass_number */
2678 TV_TREE_SRA, /* tv_id */
2679 PROP_cfg | PROP_ssa, /* properties_required */
2680 0, /* properties_provided */
2681 0, /* properties_destroyed */
2682 0, /* todo_flags_start */
2683 TODO_dump_func
2684 | TODO_update_ssa
2685 | TODO_ggc_collect
2686 | TODO_verify_ssa /* todo_flags_finish */
2687 }
2688 };
2689
2690 struct gimple_opt_pass pass_sra =
2691 {
2692 {
2693 GIMPLE_PASS,
2694 "sra", /* name */
2695 gate_intra_sra, /* gate */
2696 late_intra_sra, /* execute */
2697 NULL, /* sub */
2698 NULL, /* next */
2699 0, /* static_pass_number */
2700 TV_TREE_SRA, /* tv_id */
2701 PROP_cfg | PROP_ssa, /* properties_required */
2702 0, /* properties_provided */
2703 0, /* properties_destroyed */
2704 TODO_update_address_taken, /* todo_flags_start */
2705 TODO_dump_func
2706 | TODO_update_ssa
2707 | TODO_ggc_collect
2708 | TODO_verify_ssa /* todo_flags_finish */
2709 }
2710 };
2711
2712
2713 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2714 parameter. */
2715
2716 static bool
2717 is_unused_scalar_param (tree parm)
2718 {
2719 tree name;
2720 return (is_gimple_reg (parm)
2721 && (!(name = gimple_default_def (cfun, parm))
2722 || has_zero_uses (name)));
2723 }
2724
2725 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2726 examine whether there are any direct or otherwise infeasible ones. If so,
2727 return true, otherwise return false. PARM must be a gimple register with a
2728 non-NULL default definition. */
2729
2730 static bool
2731 ptr_parm_has_direct_uses (tree parm)
2732 {
2733 imm_use_iterator ui;
2734 gimple stmt;
2735 tree name = gimple_default_def (cfun, parm);
2736 bool ret = false;
2737
2738 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2739 {
2740 if (gimple_assign_single_p (stmt))
2741 {
2742 tree rhs = gimple_assign_rhs1 (stmt);
2743 if (rhs == name)
2744 ret = true;
2745 else if (TREE_CODE (rhs) == ADDR_EXPR)
2746 {
2747 do
2748 {
2749 rhs = TREE_OPERAND (rhs, 0);
2750 }
2751 while (handled_component_p (rhs));
2752 if (INDIRECT_REF_P (rhs) && TREE_OPERAND (rhs, 0) == name)
2753 ret = true;
2754 }
2755 }
2756 else if (gimple_code (stmt) == GIMPLE_RETURN)
2757 {
2758 tree t = gimple_return_retval (stmt);
2759 if (t == name)
2760 ret = true;
2761 }
2762 else if (is_gimple_call (stmt))
2763 {
2764 unsigned i;
2765 for (i = 0; i < gimple_call_num_args (stmt); i++)
2766 {
2767 tree arg = gimple_call_arg (stmt, i);
2768 if (arg == name)
2769 {
2770 ret = true;
2771 break;
2772 }
2773 }
2774 }
2775 else if (!is_gimple_debug (stmt))
2776 ret = true;
2777
2778 if (ret)
2779 BREAK_FROM_IMM_USE_STMT (ui);
2780 }
2781
2782 return ret;
2783 }
2784
2785 /* Identify candidates for reduction for IPA-SRA based on their type and mark
2786 them in candidate_bitmap. Note that these do not necessarily include
2787 parameter which are unused and thus can be removed. Return true iff any
2788 such candidate has been found. */
2789
2790 static bool
2791 find_param_candidates (void)
2792 {
2793 tree parm;
2794 int count = 0;
2795 bool ret = false;
2796
2797 for (parm = DECL_ARGUMENTS (current_function_decl);
2798 parm;
2799 parm = TREE_CHAIN (parm))
2800 {
2801 tree type = TREE_TYPE (parm);
2802
2803 count++;
2804
2805 if (TREE_THIS_VOLATILE (parm)
2806 || TREE_ADDRESSABLE (parm)
2807 || is_va_list_type (type))
2808 continue;
2809
2810 if (is_unused_scalar_param (parm))
2811 {
2812 ret = true;
2813 continue;
2814 }
2815
2816 if (POINTER_TYPE_P (type))
2817 {
2818 type = TREE_TYPE (type);
2819
2820 if (TREE_CODE (type) == FUNCTION_TYPE
2821 || TYPE_VOLATILE (type)
2822 || !is_gimple_reg (parm)
2823 || is_va_list_type (type)
2824 || ptr_parm_has_direct_uses (parm))
2825 continue;
2826 }
2827 else if (!AGGREGATE_TYPE_P (type))
2828 continue;
2829
2830 if (!COMPLETE_TYPE_P (type)
2831 || !host_integerp (TYPE_SIZE (type), 1)
2832 || tree_low_cst (TYPE_SIZE (type), 1) == 0
2833 || (AGGREGATE_TYPE_P (type)
2834 && type_internals_preclude_sra_p (type)))
2835 continue;
2836
2837 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
2838 ret = true;
2839 if (dump_file && (dump_flags & TDF_DETAILS))
2840 {
2841 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
2842 print_generic_expr (dump_file, parm, 0);
2843 fprintf (dump_file, "\n");
2844 }
2845 }
2846
2847 func_param_count = count;
2848 return ret;
2849 }
2850
2851 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
2852 maybe_modified. */
2853
2854 static bool
2855 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
2856 void *data)
2857 {
2858 struct access *repr = (struct access *) data;
2859
2860 repr->grp_maybe_modified = 1;
2861 return true;
2862 }
2863
2864 /* Analyze what representatives (in linked lists accessible from
2865 REPRESENTATIVES) can be modified by side effects of statements in the
2866 current function. */
2867
2868 static void
2869 analyze_modified_params (VEC (access_p, heap) *representatives)
2870 {
2871 int i;
2872
2873 for (i = 0; i < func_param_count; i++)
2874 {
2875 struct access *repr;
2876
2877 for (repr = VEC_index (access_p, representatives, i);
2878 repr;
2879 repr = repr->next_grp)
2880 {
2881 struct access *access;
2882 bitmap visited;
2883 ao_ref ar;
2884
2885 if (no_accesses_p (repr))
2886 continue;
2887 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
2888 || repr->grp_maybe_modified)
2889 continue;
2890
2891 ao_ref_init (&ar, repr->expr);
2892 visited = BITMAP_ALLOC (NULL);
2893 for (access = repr; access; access = access->next_sibling)
2894 {
2895 /* All accesses are read ones, otherwise grp_maybe_modified would
2896 be trivially set. */
2897 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
2898 mark_maybe_modified, repr, &visited);
2899 if (repr->grp_maybe_modified)
2900 break;
2901 }
2902 BITMAP_FREE (visited);
2903 }
2904 }
2905 }
2906
2907 /* Propagate distances in bb_dereferences in the opposite direction than the
2908 control flow edges, in each step storing the maximum of the current value
2909 and the minimum of all successors. These steps are repeated until the table
2910 stabilizes. Note that BBs which might terminate the functions (according to
2911 final_bbs bitmap) never updated in this way. */
2912
2913 static void
2914 propagate_dereference_distances (void)
2915 {
2916 VEC (basic_block, heap) *queue;
2917 basic_block bb;
2918
2919 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
2920 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
2921 FOR_EACH_BB (bb)
2922 {
2923 VEC_quick_push (basic_block, queue, bb);
2924 bb->aux = bb;
2925 }
2926
2927 while (!VEC_empty (basic_block, queue))
2928 {
2929 edge_iterator ei;
2930 edge e;
2931 bool change = false;
2932 int i;
2933
2934 bb = VEC_pop (basic_block, queue);
2935 bb->aux = NULL;
2936
2937 if (bitmap_bit_p (final_bbs, bb->index))
2938 continue;
2939
2940 for (i = 0; i < func_param_count; i++)
2941 {
2942 int idx = bb->index * func_param_count + i;
2943 bool first = true;
2944 HOST_WIDE_INT inh = 0;
2945
2946 FOR_EACH_EDGE (e, ei, bb->succs)
2947 {
2948 int succ_idx = e->dest->index * func_param_count + i;
2949
2950 if (e->src == EXIT_BLOCK_PTR)
2951 continue;
2952
2953 if (first)
2954 {
2955 first = false;
2956 inh = bb_dereferences [succ_idx];
2957 }
2958 else if (bb_dereferences [succ_idx] < inh)
2959 inh = bb_dereferences [succ_idx];
2960 }
2961
2962 if (!first && bb_dereferences[idx] < inh)
2963 {
2964 bb_dereferences[idx] = inh;
2965 change = true;
2966 }
2967 }
2968
2969 if (change && !bitmap_bit_p (final_bbs, bb->index))
2970 FOR_EACH_EDGE (e, ei, bb->preds)
2971 {
2972 if (e->src->aux)
2973 continue;
2974
2975 e->src->aux = e->src;
2976 VEC_quick_push (basic_block, queue, e->src);
2977 }
2978 }
2979
2980 VEC_free (basic_block, heap, queue);
2981 }
2982
2983 /* Dump a dereferences TABLE with heading STR to file F. */
2984
2985 static void
2986 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
2987 {
2988 basic_block bb;
2989
2990 fprintf (dump_file, str);
2991 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2992 {
2993 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
2994 if (bb != EXIT_BLOCK_PTR)
2995 {
2996 int i;
2997 for (i = 0; i < func_param_count; i++)
2998 {
2999 int idx = bb->index * func_param_count + i;
3000 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3001 }
3002 }
3003 fprintf (f, "\n");
3004 }
3005 fprintf (dump_file, "\n");
3006 }
3007
3008 /* Determine what (parts of) parameters passed by reference that are not
3009 assigned to are not certainly dereferenced in this function and thus the
3010 dereferencing cannot be safely moved to the caller without potentially
3011 introducing a segfault. Mark such REPRESENTATIVES as
3012 grp_not_necessarilly_dereferenced.
3013
3014 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3015 part is calculated rather than simple booleans are calculated for each
3016 pointer parameter to handle cases when only a fraction of the whole
3017 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3018 an example).
3019
3020 The maximum dereference distances for each pointer parameter and BB are
3021 already stored in bb_dereference. This routine simply propagates these
3022 values upwards by propagate_dereference_distances and then compares the
3023 distances of individual parameters in the ENTRY BB to the equivalent
3024 distances of each representative of a (fraction of a) parameter. */
3025
3026 static void
3027 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3028 {
3029 int i;
3030
3031 if (dump_file && (dump_flags & TDF_DETAILS))
3032 dump_dereferences_table (dump_file,
3033 "Dereference table before propagation:\n",
3034 bb_dereferences);
3035
3036 propagate_dereference_distances ();
3037
3038 if (dump_file && (dump_flags & TDF_DETAILS))
3039 dump_dereferences_table (dump_file,
3040 "Dereference table after propagation:\n",
3041 bb_dereferences);
3042
3043 for (i = 0; i < func_param_count; i++)
3044 {
3045 struct access *repr = VEC_index (access_p, representatives, i);
3046 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3047
3048 if (!repr || no_accesses_p (repr))
3049 continue;
3050
3051 do
3052 {
3053 if ((repr->offset + repr->size) > bb_dereferences[idx])
3054 repr->grp_not_necessarilly_dereferenced = 1;
3055 repr = repr->next_grp;
3056 }
3057 while (repr);
3058 }
3059 }
3060
3061 /* Return the representative access for the parameter declaration PARM if it is
3062 a scalar passed by reference which is not written to and the pointer value
3063 is not used directly. Thus, if it is legal to dereference it in the caller
3064 and we can rule out modifications through aliases, such parameter should be
3065 turned into one passed by value. Return NULL otherwise. */
3066
3067 static struct access *
3068 unmodified_by_ref_scalar_representative (tree parm)
3069 {
3070 int i, access_count;
3071 struct access *repr;
3072 VEC (access_p, heap) *access_vec;
3073
3074 access_vec = get_base_access_vector (parm);
3075 gcc_assert (access_vec);
3076 repr = VEC_index (access_p, access_vec, 0);
3077 if (repr->write)
3078 return NULL;
3079 repr->group_representative = repr;
3080
3081 access_count = VEC_length (access_p, access_vec);
3082 for (i = 1; i < access_count; i++)
3083 {
3084 struct access *access = VEC_index (access_p, access_vec, i);
3085 if (access->write)
3086 return NULL;
3087 access->group_representative = repr;
3088 access->next_sibling = repr->next_sibling;
3089 repr->next_sibling = access;
3090 }
3091
3092 repr->grp_read = 1;
3093 repr->grp_scalar_ptr = 1;
3094 return repr;
3095 }
3096
3097 /* Return true iff this access precludes IPA-SRA of the parameter it is
3098 associated with. */
3099
3100 static bool
3101 access_precludes_ipa_sra_p (struct access *access)
3102 {
3103 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3104 is incompatible assign in a call statement (and possibly even in asm
3105 statements). This can be relaxed by using a new temporary but only for
3106 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3107 intraprocedural SRA we deal with this by keeping the old aggregate around,
3108 something we cannot do in IPA-SRA.) */
3109 if (access->write
3110 && (is_gimple_call (access->stmt)
3111 || gimple_code (access->stmt) == GIMPLE_ASM))
3112 return true;
3113
3114 return false;
3115 }
3116
3117
3118 /* Sort collected accesses for parameter PARM, identify representatives for
3119 each accessed region and link them together. Return NULL if there are
3120 different but overlapping accesses, return the special ptr value meaning
3121 there are no accesses for this parameter if that is the case and return the
3122 first representative otherwise. Set *RO_GRP if there is a group of accesses
3123 with only read (i.e. no write) accesses. */
3124
3125 static struct access *
3126 splice_param_accesses (tree parm, bool *ro_grp)
3127 {
3128 int i, j, access_count, group_count;
3129 int agg_size, total_size = 0;
3130 struct access *access, *res, **prev_acc_ptr = &res;
3131 VEC (access_p, heap) *access_vec;
3132
3133 access_vec = get_base_access_vector (parm);
3134 if (!access_vec)
3135 return &no_accesses_representant;
3136 access_count = VEC_length (access_p, access_vec);
3137
3138 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3139 compare_access_positions);
3140
3141 i = 0;
3142 total_size = 0;
3143 group_count = 0;
3144 while (i < access_count)
3145 {
3146 bool modification;
3147 access = VEC_index (access_p, access_vec, i);
3148 modification = access->write;
3149 if (access_precludes_ipa_sra_p (access))
3150 return NULL;
3151
3152 /* Access is about to become group representative unless we find some
3153 nasty overlap which would preclude us from breaking this parameter
3154 apart. */
3155
3156 j = i + 1;
3157 while (j < access_count)
3158 {
3159 struct access *ac2 = VEC_index (access_p, access_vec, j);
3160 if (ac2->offset != access->offset)
3161 {
3162 /* All or nothing law for parameters. */
3163 if (access->offset + access->size > ac2->offset)
3164 return NULL;
3165 else
3166 break;
3167 }
3168 else if (ac2->size != access->size)
3169 return NULL;
3170
3171 if (access_precludes_ipa_sra_p (ac2))
3172 return NULL;
3173
3174 modification |= ac2->write;
3175 ac2->group_representative = access;
3176 ac2->next_sibling = access->next_sibling;
3177 access->next_sibling = ac2;
3178 j++;
3179 }
3180
3181 group_count++;
3182 access->grp_maybe_modified = modification;
3183 if (!modification)
3184 *ro_grp = true;
3185 *prev_acc_ptr = access;
3186 prev_acc_ptr = &access->next_grp;
3187 total_size += access->size;
3188 i = j;
3189 }
3190
3191 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3192 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3193 else
3194 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3195 if (total_size >= agg_size)
3196 return NULL;
3197
3198 gcc_assert (group_count > 0);
3199 return res;
3200 }
3201
3202 /* Decide whether parameters with representative accesses given by REPR should
3203 be reduced into components. */
3204
3205 static int
3206 decide_one_param_reduction (struct access *repr)
3207 {
3208 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3209 bool by_ref;
3210 tree parm;
3211
3212 parm = repr->base;
3213 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3214 gcc_assert (cur_parm_size > 0);
3215
3216 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3217 {
3218 by_ref = true;
3219 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3220 }
3221 else
3222 {
3223 by_ref = false;
3224 agg_size = cur_parm_size;
3225 }
3226
3227 if (dump_file)
3228 {
3229 struct access *acc;
3230 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3231 print_generic_expr (dump_file, parm, 0);
3232 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3233 for (acc = repr; acc; acc = acc->next_grp)
3234 dump_access (dump_file, acc, true);
3235 }
3236
3237 total_size = 0;
3238 new_param_count = 0;
3239
3240 for (; repr; repr = repr->next_grp)
3241 {
3242 gcc_assert (parm == repr->base);
3243 new_param_count++;
3244
3245 if (!by_ref || (!repr->grp_maybe_modified
3246 && !repr->grp_not_necessarilly_dereferenced))
3247 total_size += repr->size;
3248 else
3249 total_size += cur_parm_size;
3250 }
3251
3252 gcc_assert (new_param_count > 0);
3253
3254 if (optimize_function_for_size_p (cfun))
3255 parm_size_limit = cur_parm_size;
3256 else
3257 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3258 * cur_parm_size);
3259
3260 if (total_size < agg_size
3261 && total_size <= parm_size_limit)
3262 {
3263 if (dump_file)
3264 fprintf (dump_file, " ....will be split into %i components\n",
3265 new_param_count);
3266 return new_param_count;
3267 }
3268 else
3269 return 0;
3270 }
3271
3272 /* The order of the following enums is important, we need to do extra work for
3273 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3274 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3275 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3276
3277 /* Identify representatives of all accesses to all candidate parameters for
3278 IPA-SRA. Return result based on what representatives have been found. */
3279
3280 static enum ipa_splicing_result
3281 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3282 {
3283 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3284 tree parm;
3285 struct access *repr;
3286
3287 *representatives = VEC_alloc (access_p, heap, func_param_count);
3288
3289 for (parm = DECL_ARGUMENTS (current_function_decl);
3290 parm;
3291 parm = TREE_CHAIN (parm))
3292 {
3293 if (is_unused_scalar_param (parm))
3294 {
3295 VEC_quick_push (access_p, *representatives,
3296 &no_accesses_representant);
3297 if (result == NO_GOOD_ACCESS)
3298 result = UNUSED_PARAMS;
3299 }
3300 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3301 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3302 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3303 {
3304 repr = unmodified_by_ref_scalar_representative (parm);
3305 VEC_quick_push (access_p, *representatives, repr);
3306 if (repr)
3307 result = UNMODIF_BY_REF_ACCESSES;
3308 }
3309 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3310 {
3311 bool ro_grp = false;
3312 repr = splice_param_accesses (parm, &ro_grp);
3313 VEC_quick_push (access_p, *representatives, repr);
3314
3315 if (repr && !no_accesses_p (repr))
3316 {
3317 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3318 {
3319 if (ro_grp)
3320 result = UNMODIF_BY_REF_ACCESSES;
3321 else if (result < MODIF_BY_REF_ACCESSES)
3322 result = MODIF_BY_REF_ACCESSES;
3323 }
3324 else if (result < BY_VAL_ACCESSES)
3325 result = BY_VAL_ACCESSES;
3326 }
3327 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3328 result = UNUSED_PARAMS;
3329 }
3330 else
3331 VEC_quick_push (access_p, *representatives, NULL);
3332 }
3333
3334 if (result == NO_GOOD_ACCESS)
3335 {
3336 VEC_free (access_p, heap, *representatives);
3337 *representatives = NULL;
3338 return NO_GOOD_ACCESS;
3339 }
3340
3341 return result;
3342 }
3343
3344 /* Return the index of BASE in PARMS. Abort if it is not found. */
3345
3346 static inline int
3347 get_param_index (tree base, VEC(tree, heap) *parms)
3348 {
3349 int i, len;
3350
3351 len = VEC_length (tree, parms);
3352 for (i = 0; i < len; i++)
3353 if (VEC_index (tree, parms, i) == base)
3354 return i;
3355 gcc_unreachable ();
3356 }
3357
3358 /* Convert the decisions made at the representative level into compact
3359 parameter adjustments. REPRESENTATIVES are pointers to first
3360 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3361 final number of adjustments. */
3362
3363 static ipa_parm_adjustment_vec
3364 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3365 int adjustments_count)
3366 {
3367 VEC (tree, heap) *parms;
3368 ipa_parm_adjustment_vec adjustments;
3369 tree parm;
3370 int i;
3371
3372 gcc_assert (adjustments_count > 0);
3373 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3374 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3375 parm = DECL_ARGUMENTS (current_function_decl);
3376 for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3377 {
3378 struct access *repr = VEC_index (access_p, representatives, i);
3379
3380 if (!repr || no_accesses_p (repr))
3381 {
3382 struct ipa_parm_adjustment *adj;
3383
3384 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3385 memset (adj, 0, sizeof (*adj));
3386 adj->base_index = get_param_index (parm, parms);
3387 adj->base = parm;
3388 if (!repr)
3389 adj->copy_param = 1;
3390 else
3391 adj->remove_param = 1;
3392 }
3393 else
3394 {
3395 struct ipa_parm_adjustment *adj;
3396 int index = get_param_index (parm, parms);
3397
3398 for (; repr; repr = repr->next_grp)
3399 {
3400 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3401 memset (adj, 0, sizeof (*adj));
3402 gcc_assert (repr->base == parm);
3403 adj->base_index = index;
3404 adj->base = repr->base;
3405 adj->type = repr->type;
3406 adj->offset = repr->offset;
3407 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3408 && (repr->grp_maybe_modified
3409 || repr->grp_not_necessarilly_dereferenced));
3410
3411 }
3412 }
3413 }
3414 VEC_free (tree, heap, parms);
3415 return adjustments;
3416 }
3417
3418 /* Analyze the collected accesses and produce a plan what to do with the
3419 parameters in the form of adjustments, NULL meaning nothing. */
3420
3421 static ipa_parm_adjustment_vec
3422 analyze_all_param_acesses (void)
3423 {
3424 enum ipa_splicing_result repr_state;
3425 bool proceed = false;
3426 int i, adjustments_count = 0;
3427 VEC (access_p, heap) *representatives;
3428 ipa_parm_adjustment_vec adjustments;
3429
3430 repr_state = splice_all_param_accesses (&representatives);
3431 if (repr_state == NO_GOOD_ACCESS)
3432 return NULL;
3433
3434 /* If there are any parameters passed by reference which are not modified
3435 directly, we need to check whether they can be modified indirectly. */
3436 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3437 {
3438 analyze_caller_dereference_legality (representatives);
3439 analyze_modified_params (representatives);
3440 }
3441
3442 for (i = 0; i < func_param_count; i++)
3443 {
3444 struct access *repr = VEC_index (access_p, representatives, i);
3445
3446 if (repr && !no_accesses_p (repr))
3447 {
3448 if (repr->grp_scalar_ptr)
3449 {
3450 adjustments_count++;
3451 if (repr->grp_not_necessarilly_dereferenced
3452 || repr->grp_maybe_modified)
3453 VEC_replace (access_p, representatives, i, NULL);
3454 else
3455 {
3456 proceed = true;
3457 sra_stats.scalar_by_ref_to_by_val++;
3458 }
3459 }
3460 else
3461 {
3462 int new_components = decide_one_param_reduction (repr);
3463
3464 if (new_components == 0)
3465 {
3466 VEC_replace (access_p, representatives, i, NULL);
3467 adjustments_count++;
3468 }
3469 else
3470 {
3471 adjustments_count += new_components;
3472 sra_stats.aggregate_params_reduced++;
3473 sra_stats.param_reductions_created += new_components;
3474 proceed = true;
3475 }
3476 }
3477 }
3478 else
3479 {
3480 if (no_accesses_p (repr))
3481 {
3482 proceed = true;
3483 sra_stats.deleted_unused_parameters++;
3484 }
3485 adjustments_count++;
3486 }
3487 }
3488
3489 if (!proceed && dump_file)
3490 fprintf (dump_file, "NOT proceeding to change params.\n");
3491
3492 if (proceed)
3493 adjustments = turn_representatives_into_adjustments (representatives,
3494 adjustments_count);
3495 else
3496 adjustments = NULL;
3497
3498 VEC_free (access_p, heap, representatives);
3499 return adjustments;
3500 }
3501
3502 /* If a parameter replacement identified by ADJ does not yet exist in the form
3503 of declaration, create it and record it, otherwise return the previously
3504 created one. */
3505
3506 static tree
3507 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3508 {
3509 tree repl;
3510 if (!adj->new_ssa_base)
3511 {
3512 char *pretty_name = make_fancy_name (adj->base);
3513
3514 repl = create_tmp_var (TREE_TYPE (adj->base), "ISR");
3515 if (TREE_CODE (TREE_TYPE (repl)) == COMPLEX_TYPE
3516 || TREE_CODE (TREE_TYPE (repl)) == VECTOR_TYPE)
3517 DECL_GIMPLE_REG_P (repl) = 1;
3518 DECL_NAME (repl) = get_identifier (pretty_name);
3519 obstack_free (&name_obstack, pretty_name);
3520
3521 get_var_ann (repl);
3522 add_referenced_var (repl);
3523 adj->new_ssa_base = repl;
3524 }
3525 else
3526 repl = adj->new_ssa_base;
3527 return repl;
3528 }
3529
3530 /* Find the first adjustment for a particular parameter BASE in a vector of
3531 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3532 adjustment. */
3533
3534 static struct ipa_parm_adjustment *
3535 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3536 {
3537 int i, len;
3538
3539 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3540 for (i = 0; i < len; i++)
3541 {
3542 struct ipa_parm_adjustment *adj;
3543
3544 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3545 if (!adj->copy_param && adj->base == base)
3546 return adj;
3547 }
3548
3549 return NULL;
3550 }
3551
3552 /* Callback for scan_function. If the statement STMT defines an SSA_NAME of a
3553 parameter which is to be removed because its value is not used, replace the
3554 SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3555 uses too and return true (update_stmt is then issued for the statement by
3556 the caller). DATA is a pointer to an adjustments vector. */
3557
3558 static bool
3559 replace_removed_params_ssa_names (gimple stmt, void *data)
3560 {
3561 VEC (ipa_parm_adjustment_t, heap) *adjustments;
3562 struct ipa_parm_adjustment *adj;
3563 tree lhs, decl, repl, name;
3564
3565 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3566 if (gimple_code (stmt) == GIMPLE_PHI)
3567 lhs = gimple_phi_result (stmt);
3568 else if (is_gimple_assign (stmt))
3569 lhs = gimple_assign_lhs (stmt);
3570 else if (is_gimple_call (stmt))
3571 lhs = gimple_call_lhs (stmt);
3572 else
3573 gcc_unreachable ();
3574
3575 if (TREE_CODE (lhs) != SSA_NAME)
3576 return false;
3577 decl = SSA_NAME_VAR (lhs);
3578 if (TREE_CODE (decl) != PARM_DECL)
3579 return false;
3580
3581 adj = get_adjustment_for_base (adjustments, decl);
3582 if (!adj)
3583 return false;
3584
3585 repl = get_replaced_param_substitute (adj);
3586 name = make_ssa_name (repl, stmt);
3587
3588 if (dump_file)
3589 {
3590 fprintf (dump_file, "replacing an SSA name of a removed param ");
3591 print_generic_expr (dump_file, lhs, 0);
3592 fprintf (dump_file, " with ");
3593 print_generic_expr (dump_file, name, 0);
3594 fprintf (dump_file, "\n");
3595 }
3596
3597 if (is_gimple_assign (stmt))
3598 gimple_assign_set_lhs (stmt, name);
3599 else if (is_gimple_call (stmt))
3600 gimple_call_set_lhs (stmt, name);
3601 else
3602 gimple_phi_set_result (stmt, name);
3603
3604 replace_uses_by (lhs, name);
3605 return true;
3606 }
3607
3608 /* Callback for scan_function and helper to sra_ipa_modify_assign. If the
3609 expression *EXPR should be replaced by a reduction of a parameter, do so.
3610 DATA is a pointer to a vector of adjustments. DONT_CONVERT specifies
3611 whether the function should care about type incompatibility the current and
3612 new expressions. If it is true, the function will leave incompatibility
3613 issues to the caller.
3614
3615 When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3616 a write (LHS) expression. */
3617
3618 static bool
3619 sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3620 bool dont_convert, void *data)
3621 {
3622 ipa_parm_adjustment_vec adjustments;
3623 int i, len;
3624 struct ipa_parm_adjustment *adj, *cand = NULL;
3625 HOST_WIDE_INT offset, size, max_size;
3626 tree base, src;
3627
3628 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3629 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3630
3631 if (TREE_CODE (*expr) == BIT_FIELD_REF
3632 || TREE_CODE (*expr) == IMAGPART_EXPR
3633 || TREE_CODE (*expr) == REALPART_EXPR)
3634 {
3635 expr = &TREE_OPERAND (*expr, 0);
3636 dont_convert = false;
3637 }
3638
3639 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3640 if (!base || size == -1 || max_size == -1)
3641 return false;
3642
3643 if (INDIRECT_REF_P (base))
3644 base = TREE_OPERAND (base, 0);
3645
3646 base = get_ssa_base_param (base);
3647 if (!base || TREE_CODE (base) != PARM_DECL)
3648 return false;
3649
3650 for (i = 0; i < len; i++)
3651 {
3652 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3653
3654 if (adj->base == base &&
3655 (adj->offset == offset || adj->remove_param))
3656 {
3657 cand = adj;
3658 break;
3659 }
3660 }
3661 if (!cand || cand->copy_param || cand->remove_param)
3662 return false;
3663
3664 if (cand->by_ref)
3665 {
3666 tree folded;
3667 src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3668 cand->reduction);
3669 folded = gimple_fold_indirect_ref (src);
3670 if (folded)
3671 src = folded;
3672 }
3673 else
3674 src = cand->reduction;
3675
3676 if (dump_file && (dump_flags & TDF_DETAILS))
3677 {
3678 fprintf (dump_file, "About to replace expr ");
3679 print_generic_expr (dump_file, *expr, 0);
3680 fprintf (dump_file, " with ");
3681 print_generic_expr (dump_file, src, 0);
3682 fprintf (dump_file, "\n");
3683 }
3684
3685 if (!dont_convert
3686 && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3687 {
3688 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3689 *expr = vce;
3690 }
3691 else
3692 *expr = src;
3693 return true;
3694 }
3695
3696 /* Callback for scan_function to process assign statements. Performs
3697 essentially the same function like sra_ipa_modify_expr. */
3698
3699 static enum scan_assign_result
3700 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3701 {
3702 gimple stmt = *stmt_ptr;
3703 tree *lhs_p, *rhs_p;
3704 bool any;
3705
3706 if (!gimple_assign_single_p (stmt))
3707 return SRA_SA_NONE;
3708
3709 rhs_p = gimple_assign_rhs1_ptr (stmt);
3710 lhs_p = gimple_assign_lhs_ptr (stmt);
3711
3712 any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3713 any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3714 if (any)
3715 {
3716 tree new_rhs = NULL_TREE;
3717
3718 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3719 new_rhs = fold_build1_loc (gimple_location (stmt), VIEW_CONVERT_EXPR,
3720 TREE_TYPE (*lhs_p), *rhs_p);
3721 else if (REFERENCE_CLASS_P (*rhs_p)
3722 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3723 && !is_gimple_reg (*lhs_p))
3724 /* This can happen when an assignment in between two single field
3725 structures is turned into an assignment in between two pointers to
3726 scalars (PR 42237). */
3727 new_rhs = *rhs_p;
3728
3729 if (new_rhs)
3730 {
3731 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3732 true, GSI_SAME_STMT);
3733
3734 gimple_assign_set_rhs_from_tree (gsi, tmp);
3735 }
3736
3737 return SRA_SA_PROCESSED;
3738 }
3739
3740 return SRA_SA_NONE;
3741 }
3742
3743 /* Call gimple_debug_bind_reset_value on all debug statements describing
3744 gimple register parameters that are being removed or replaced. */
3745
3746 static void
3747 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
3748 {
3749 int i, len;
3750
3751 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3752 for (i = 0; i < len; i++)
3753 {
3754 struct ipa_parm_adjustment *adj;
3755 imm_use_iterator ui;
3756 gimple stmt;
3757 tree name;
3758
3759 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3760 if (adj->copy_param || !is_gimple_reg (adj->base))
3761 continue;
3762 name = gimple_default_def (cfun, adj->base);
3763 if (!name)
3764 continue;
3765 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3766 {
3767 /* All other users must have been removed by scan_function. */
3768 gcc_assert (is_gimple_debug (stmt));
3769 gimple_debug_bind_reset_value (stmt);
3770 update_stmt (stmt);
3771 }
3772 }
3773 }
3774
3775 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
3776
3777 static void
3778 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
3779 {
3780 tree old_cur_fndecl = current_function_decl;
3781 struct cgraph_edge *cs;
3782 basic_block this_block;
3783 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
3784
3785 for (cs = node->callers; cs; cs = cs->next_caller)
3786 {
3787 current_function_decl = cs->caller->decl;
3788 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
3789
3790 if (dump_file)
3791 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
3792 cs->caller->uid, cs->callee->uid,
3793 cgraph_node_name (cs->caller),
3794 cgraph_node_name (cs->callee));
3795
3796 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
3797
3798 pop_cfun ();
3799 }
3800
3801 for (cs = node->callers; cs; cs = cs->next_caller)
3802 if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
3803 {
3804 compute_inline_parameters (cs->caller);
3805 bitmap_set_bit (recomputed_callers, cs->caller->uid);
3806 }
3807 BITMAP_FREE (recomputed_callers);
3808
3809 current_function_decl = old_cur_fndecl;
3810 FOR_EACH_BB (this_block)
3811 {
3812 gimple_stmt_iterator gsi;
3813
3814 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
3815 {
3816 gimple stmt = gsi_stmt (gsi);
3817 if (gimple_code (stmt) == GIMPLE_CALL
3818 && gimple_call_fndecl (stmt) == node->decl)
3819 {
3820 if (dump_file)
3821 fprintf (dump_file, "Adjusting recursive call");
3822 ipa_modify_call_arguments (NULL, stmt, adjustments);
3823 }
3824 }
3825 }
3826
3827 return;
3828 }
3829
3830 /* Perform all the modification required in IPA-SRA for NODE to have parameters
3831 as given in ADJUSTMENTS. */
3832
3833 static void
3834 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
3835 {
3836 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
3837 scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
3838 replace_removed_params_ssa_names, false, adjustments);
3839 sra_ipa_reset_debug_stmts (adjustments);
3840 convert_callers (node, adjustments);
3841 cgraph_make_node_local (node);
3842 return;
3843 }
3844
3845 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
3846 attributes, return true otherwise. NODE is the cgraph node of the current
3847 function. */
3848
3849 static bool
3850 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
3851 {
3852 if (!cgraph_node_can_be_local_p (node))
3853 {
3854 if (dump_file)
3855 fprintf (dump_file, "Function not local to this compilation unit.\n");
3856 return false;
3857 }
3858
3859 if (DECL_VIRTUAL_P (current_function_decl))
3860 {
3861 if (dump_file)
3862 fprintf (dump_file, "Function is a virtual method.\n");
3863 return false;
3864 }
3865
3866 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
3867 && node->global.size >= MAX_INLINE_INSNS_AUTO)
3868 {
3869 if (dump_file)
3870 fprintf (dump_file, "Function too big to be made truly local.\n");
3871 return false;
3872 }
3873
3874 if (!node->callers)
3875 {
3876 if (dump_file)
3877 fprintf (dump_file,
3878 "Function has no callers in this compilation unit.\n");
3879 return false;
3880 }
3881
3882 if (cfun->stdarg)
3883 {
3884 if (dump_file)
3885 fprintf (dump_file, "Function uses stdarg. \n");
3886 return false;
3887 }
3888
3889 return true;
3890 }
3891
3892 /* Perform early interprocedural SRA. */
3893
3894 static unsigned int
3895 ipa_early_sra (void)
3896 {
3897 struct cgraph_node *node = cgraph_node (current_function_decl);
3898 ipa_parm_adjustment_vec adjustments;
3899 int ret = 0;
3900
3901 if (!ipa_sra_preliminary_function_checks (node))
3902 return 0;
3903
3904 sra_initialize ();
3905 sra_mode = SRA_MODE_EARLY_IPA;
3906
3907 if (!find_param_candidates ())
3908 {
3909 if (dump_file)
3910 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
3911 goto simple_out;
3912 }
3913
3914 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
3915 func_param_count
3916 * last_basic_block_for_function (cfun));
3917 final_bbs = BITMAP_ALLOC (NULL);
3918
3919 scan_function (build_access_from_expr, build_accesses_from_assign,
3920 NULL, true, NULL);
3921 if (encountered_apply_args)
3922 {
3923 if (dump_file)
3924 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
3925 goto out;
3926 }
3927
3928 adjustments = analyze_all_param_acesses ();
3929 if (!adjustments)
3930 goto out;
3931 if (dump_file)
3932 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
3933
3934 modify_function (node, adjustments);
3935 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
3936 ret = TODO_update_ssa;
3937
3938 statistics_counter_event (cfun, "Unused parameters deleted",
3939 sra_stats.deleted_unused_parameters);
3940 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
3941 sra_stats.scalar_by_ref_to_by_val);
3942 statistics_counter_event (cfun, "Aggregate parameters broken up",
3943 sra_stats.aggregate_params_reduced);
3944 statistics_counter_event (cfun, "Aggregate parameter components created",
3945 sra_stats.param_reductions_created);
3946
3947 out:
3948 BITMAP_FREE (final_bbs);
3949 free (bb_dereferences);
3950 simple_out:
3951 sra_deinitialize ();
3952 return ret;
3953 }
3954
3955 /* Return if early ipa sra shall be performed. */
3956 static bool
3957 ipa_early_sra_gate (void)
3958 {
3959 return flag_ipa_sra;
3960 }
3961
3962 struct gimple_opt_pass pass_early_ipa_sra =
3963 {
3964 {
3965 GIMPLE_PASS,
3966 "eipa_sra", /* name */
3967 ipa_early_sra_gate, /* gate */
3968 ipa_early_sra, /* execute */
3969 NULL, /* sub */
3970 NULL, /* next */
3971 0, /* static_pass_number */
3972 TV_IPA_SRA, /* tv_id */
3973 0, /* properties_required */
3974 0, /* properties_provided */
3975 0, /* properties_destroyed */
3976 0, /* todo_flags_start */
3977 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */
3978 }
3979 };
3980
3981