tree-sra.c (create_access_replacement): Only rename the replacement if we can rewrite...
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
95
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
104
105 struct assign_link;
106
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
112
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
118
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
122
123 struct access
124 {
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
131
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
138
139 /* The statement this access belongs to. */
140 gimple stmt;
141
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
144
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
148
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
152
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
157
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
161
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
164
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
169
170 /* Is this particular access write access? */
171 unsigned write : 1;
172
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
175
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
178
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
182
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
186
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
190
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
194
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
198
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
202
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
206
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
211
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
215
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
219
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
224
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
228
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
233
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
236
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
240
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
245
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250
251 typedef struct access *access_p;
252
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
255
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
258
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
263 {
264 struct access *lacc, *racc;
265 struct assign_link *next;
266 };
267
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
270
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
273
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
276
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
283
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
287
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
290
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
294
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
297
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
301
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
310
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
313
314 /* Predicate to test the special value. */
315
316 static inline bool
317 no_accesses_p (struct access *access)
318 {
319 return access == &no_accesses_representant;
320 }
321
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
325
326 static struct
327 {
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
330
331 /* Number of created scalar replacements. */
332 int replacements;
333
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
337
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
340
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
343
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
346
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
351
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
354
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
358
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
362
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
366
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
369 {
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
401 }
402
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
404
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
407 {
408 do
409 {
410 int i;
411
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
414
415 dump_access (f, access, true);
416
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
419
420 access = access->next_sibling;
421 }
422 while (access);
423 }
424
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
427
428 static void
429 dump_access_tree (FILE *f, struct access *access)
430 {
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
433 }
434
435 /* Return true iff ACC is non-NULL and has subaccesses. */
436
437 static inline bool
438 access_has_children_p (struct access *acc)
439 {
440 return acc && acc->first_child;
441 }
442
443 /* Return true iff ACC is (partly) covered by at least one replacement. */
444
445 static bool
446 access_has_replacements_p (struct access *acc)
447 {
448 struct access *child;
449 if (acc->grp_to_be_replaced)
450 return true;
451 for (child = acc->first_child; child; child = child->next_sibling)
452 if (access_has_replacements_p (child))
453 return true;
454 return false;
455 }
456
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458 NULL if there is none. */
459
460 static VEC (access_p, heap) *
461 get_base_access_vector (tree base)
462 {
463 void **slot;
464
465 slot = pointer_map_contains (base_access_vec, base);
466 if (!slot)
467 return NULL;
468 else
469 return *(VEC (access_p, heap) **) slot;
470 }
471
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473 in ACCESS. Return NULL if it cannot be found. */
474
475 static struct access *
476 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477 HOST_WIDE_INT size)
478 {
479 while (access && (access->offset != offset || access->size != size))
480 {
481 struct access *child = access->first_child;
482
483 while (child && (child->offset + child->size <= offset))
484 child = child->next_sibling;
485 access = child;
486 }
487
488 return access;
489 }
490
491 /* Return the first group representative for DECL or NULL if none exists. */
492
493 static struct access *
494 get_first_repr_for_decl (tree base)
495 {
496 VEC (access_p, heap) *access_vec;
497
498 access_vec = get_base_access_vector (base);
499 if (!access_vec)
500 return NULL;
501
502 return VEC_index (access_p, access_vec, 0);
503 }
504
505 /* Find an access representative for the variable BASE and given OFFSET and
506 SIZE. Requires that access trees have already been built. Return NULL if
507 it cannot be found. */
508
509 static struct access *
510 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511 HOST_WIDE_INT size)
512 {
513 struct access *access;
514
515 access = get_first_repr_for_decl (base);
516 while (access && (access->offset + access->size <= offset))
517 access = access->next_grp;
518 if (!access)
519 return NULL;
520
521 return find_access_in_subtree (access, offset, size);
522 }
523
524 /* Add LINK to the linked list of assign links of RACC. */
525 static void
526 add_link_to_rhs (struct access *racc, struct assign_link *link)
527 {
528 gcc_assert (link->racc == racc);
529
530 if (!racc->first_link)
531 {
532 gcc_assert (!racc->last_link);
533 racc->first_link = link;
534 }
535 else
536 racc->last_link->next = link;
537
538 racc->last_link = link;
539 link->next = NULL;
540 }
541
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
543 in NEW_RACC. */
544 static void
545 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
546 {
547 if (!old_racc->first_link)
548 {
549 gcc_assert (!old_racc->last_link);
550 return;
551 }
552
553 if (new_racc->first_link)
554 {
555 gcc_assert (!new_racc->last_link->next);
556 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
557
558 new_racc->last_link->next = old_racc->first_link;
559 new_racc->last_link = old_racc->last_link;
560 }
561 else
562 {
563 gcc_assert (!new_racc->last_link);
564
565 new_racc->first_link = old_racc->first_link;
566 new_racc->last_link = old_racc->last_link;
567 }
568 old_racc->first_link = old_racc->last_link = NULL;
569 }
570
571 /* Add ACCESS to the work queue (which is actually a stack). */
572
573 static void
574 add_access_to_work_queue (struct access *access)
575 {
576 if (!access->grp_queued)
577 {
578 gcc_assert (!access->next_queued);
579 access->next_queued = work_queue_head;
580 access->grp_queued = 1;
581 work_queue_head = access;
582 }
583 }
584
585 /* Pop an access from the work queue, and return it, assuming there is one. */
586
587 static struct access *
588 pop_access_from_work_queue (void)
589 {
590 struct access *access = work_queue_head;
591
592 work_queue_head = access->next_queued;
593 access->next_queued = NULL;
594 access->grp_queued = 0;
595 return access;
596 }
597
598
599 /* Allocate necessary structures. */
600
601 static void
602 sra_initialize (void)
603 {
604 candidate_bitmap = BITMAP_ALLOC (NULL);
605 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607 gcc_obstack_init (&name_obstack);
608 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610 base_access_vec = pointer_map_create ();
611 memset (&sra_stats, 0, sizeof (sra_stats));
612 encountered_apply_args = false;
613 encountered_recursive_call = false;
614 encountered_unchangable_recursive_call = false;
615 }
616
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
618
619 static bool
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621 void *data ATTRIBUTE_UNUSED)
622 {
623 VEC (access_p, heap) *access_vec;
624 access_vec = (VEC (access_p, heap) *) *value;
625 VEC_free (access_p, heap, access_vec);
626
627 return true;
628 }
629
630 /* Deallocate all general structures. */
631
632 static void
633 sra_deinitialize (void)
634 {
635 BITMAP_FREE (candidate_bitmap);
636 BITMAP_FREE (should_scalarize_away_bitmap);
637 BITMAP_FREE (cannot_scalarize_away_bitmap);
638 free_alloc_pool (access_pool);
639 free_alloc_pool (link_pool);
640 obstack_free (&name_obstack, NULL);
641
642 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643 pointer_map_destroy (base_access_vec);
644 }
645
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
647 there is one. */
648 static void
649 disqualify_candidate (tree decl, const char *reason)
650 {
651 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
652
653 if (dump_file && (dump_flags & TDF_DETAILS))
654 {
655 fprintf (dump_file, "! Disqualifying ");
656 print_generic_expr (dump_file, decl, 0);
657 fprintf (dump_file, " - %s\n", reason);
658 }
659 }
660
661 /* Return true iff the type contains a field or an element which does not allow
662 scalarization. */
663
664 static bool
665 type_internals_preclude_sra_p (tree type, const char **msg)
666 {
667 tree fld;
668 tree et;
669
670 switch (TREE_CODE (type))
671 {
672 case RECORD_TYPE:
673 case UNION_TYPE:
674 case QUAL_UNION_TYPE:
675 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676 if (TREE_CODE (fld) == FIELD_DECL)
677 {
678 tree ft = TREE_TYPE (fld);
679
680 if (TREE_THIS_VOLATILE (fld))
681 {
682 *msg = "volatile structure field";
683 return true;
684 }
685 if (!DECL_FIELD_OFFSET (fld))
686 {
687 *msg = "no structure field offset";
688 return true;
689 }
690 if (!DECL_SIZE (fld))
691 {
692 *msg = "zero structure field size";
693 return true;
694 }
695 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
696 {
697 *msg = "structure field offset not fixed";
698 return true;
699 }
700 if (!host_integerp (DECL_SIZE (fld), 1))
701 {
702 *msg = "structure field size not fixed";
703 return true;
704 }
705 if (AGGREGATE_TYPE_P (ft)
706 && int_bit_position (fld) % BITS_PER_UNIT != 0)
707 {
708 *msg = "structure field is bit field";
709 return true;
710 }
711
712 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
713 return true;
714 }
715
716 return false;
717
718 case ARRAY_TYPE:
719 et = TREE_TYPE (type);
720
721 if (TYPE_VOLATILE (et))
722 {
723 *msg = "element type is volatile";
724 return true;
725 }
726
727 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
728 return true;
729
730 return false;
731
732 default:
733 return false;
734 }
735 }
736
737 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
738 base variable if it is. Return T if it is not an SSA_NAME. */
739
740 static tree
741 get_ssa_base_param (tree t)
742 {
743 if (TREE_CODE (t) == SSA_NAME)
744 {
745 if (SSA_NAME_IS_DEFAULT_DEF (t))
746 return SSA_NAME_VAR (t);
747 else
748 return NULL_TREE;
749 }
750 return t;
751 }
752
753 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754 belongs to, unless the BB has already been marked as a potentially
755 final. */
756
757 static void
758 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
759 {
760 basic_block bb = gimple_bb (stmt);
761 int idx, parm_index = 0;
762 tree parm;
763
764 if (bitmap_bit_p (final_bbs, bb->index))
765 return;
766
767 for (parm = DECL_ARGUMENTS (current_function_decl);
768 parm && parm != base;
769 parm = DECL_CHAIN (parm))
770 parm_index++;
771
772 gcc_assert (parm_index < func_param_count);
773
774 idx = bb->index * func_param_count + parm_index;
775 if (bb_dereferences[idx] < dist)
776 bb_dereferences[idx] = dist;
777 }
778
779 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780 the three fields. Also add it to the vector of accesses corresponding to
781 the base. Finally, return the new access. */
782
783 static struct access *
784 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
785 {
786 VEC (access_p, heap) *vec;
787 struct access *access;
788 void **slot;
789
790 access = (struct access *) pool_alloc (access_pool);
791 memset (access, 0, sizeof (struct access));
792 access->base = base;
793 access->offset = offset;
794 access->size = size;
795
796 slot = pointer_map_contains (base_access_vec, base);
797 if (slot)
798 vec = (VEC (access_p, heap) *) *slot;
799 else
800 vec = VEC_alloc (access_p, heap, 32);
801
802 VEC_safe_push (access_p, heap, vec, access);
803
804 *((struct VEC (access_p,heap) **)
805 pointer_map_insert (base_access_vec, base)) = vec;
806
807 return access;
808 }
809
810 /* Create and insert access for EXPR. Return created access, or NULL if it is
811 not possible. */
812
813 static struct access *
814 create_access (tree expr, gimple stmt, bool write)
815 {
816 struct access *access;
817 HOST_WIDE_INT offset, size, max_size;
818 tree base = expr;
819 bool ptr, unscalarizable_region = false;
820
821 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
822
823 if (sra_mode == SRA_MODE_EARLY_IPA
824 && TREE_CODE (base) == MEM_REF)
825 {
826 base = get_ssa_base_param (TREE_OPERAND (base, 0));
827 if (!base)
828 return NULL;
829 ptr = true;
830 }
831 else
832 ptr = false;
833
834 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
835 return NULL;
836
837 if (sra_mode == SRA_MODE_EARLY_IPA)
838 {
839 if (size < 0 || size != max_size)
840 {
841 disqualify_candidate (base, "Encountered a variable sized access.");
842 return NULL;
843 }
844 if (TREE_CODE (expr) == COMPONENT_REF
845 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
846 {
847 disqualify_candidate (base, "Encountered a bit-field access.");
848 return NULL;
849 }
850 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
851
852 if (ptr)
853 mark_parm_dereference (base, offset + size, stmt);
854 }
855 else
856 {
857 if (size != max_size)
858 {
859 size = max_size;
860 unscalarizable_region = true;
861 }
862 if (size < 0)
863 {
864 disqualify_candidate (base, "Encountered an unconstrained access.");
865 return NULL;
866 }
867 }
868
869 access = create_access_1 (base, offset, size);
870 access->expr = expr;
871 access->type = TREE_TYPE (expr);
872 access->write = write;
873 access->grp_unscalarizable_region = unscalarizable_region;
874 access->stmt = stmt;
875
876 if (TREE_CODE (expr) == COMPONENT_REF
877 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
878 access->non_addressable = 1;
879
880 return access;
881 }
882
883
884 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885 register types or (recursively) records with only these two kinds of fields.
886 It also returns false if any of these records contains a bit-field. */
887
888 static bool
889 type_consists_of_records_p (tree type)
890 {
891 tree fld;
892
893 if (TREE_CODE (type) != RECORD_TYPE)
894 return false;
895
896 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
897 if (TREE_CODE (fld) == FIELD_DECL)
898 {
899 tree ft = TREE_TYPE (fld);
900
901 if (DECL_BIT_FIELD (fld))
902 return false;
903
904 if (!is_gimple_reg_type (ft)
905 && !type_consists_of_records_p (ft))
906 return false;
907 }
908
909 return true;
910 }
911
912 /* Create total_scalarization accesses for all scalar type fields in DECL that
913 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
914 must be the top-most VAR_DECL representing the variable, OFFSET must be the
915 offset of DECL within BASE. REF must be the memory reference expression for
916 the given decl. */
917
918 static void
919 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
920 tree ref)
921 {
922 tree fld, decl_type = TREE_TYPE (decl);
923
924 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
925 if (TREE_CODE (fld) == FIELD_DECL)
926 {
927 HOST_WIDE_INT pos = offset + int_bit_position (fld);
928 tree ft = TREE_TYPE (fld);
929 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
930 NULL_TREE);
931
932 if (is_gimple_reg_type (ft))
933 {
934 struct access *access;
935 HOST_WIDE_INT size;
936
937 size = tree_low_cst (DECL_SIZE (fld), 1);
938 access = create_access_1 (base, pos, size);
939 access->expr = nref;
940 access->type = ft;
941 access->grp_total_scalarization = 1;
942 /* Accesses for intraprocedural SRA can have their stmt NULL. */
943 }
944 else
945 completely_scalarize_record (base, fld, pos, nref);
946 }
947 }
948
949 /* Create total_scalarization accesses for all scalar type fields in VAR and
950 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
951 type_consists_of_records_p. */
952
953 static void
954 completely_scalarize_var (tree var)
955 {
956 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
957 struct access *access;
958
959 access = create_access_1 (var, 0, size);
960 access->expr = var;
961 access->type = TREE_TYPE (var);
962 access->grp_total_scalarization = 1;
963
964 completely_scalarize_record (var, var, 0, var);
965 }
966
967 /* Search the given tree for a declaration by skipping handled components and
968 exclude it from the candidates. */
969
970 static void
971 disqualify_base_of_expr (tree t, const char *reason)
972 {
973 t = get_base_address (t);
974 if (sra_mode == SRA_MODE_EARLY_IPA
975 && TREE_CODE (t) == MEM_REF)
976 t = get_ssa_base_param (TREE_OPERAND (t, 0));
977
978 if (t && DECL_P (t))
979 disqualify_candidate (t, reason);
980 }
981
982 /* Scan expression EXPR and create access structures for all accesses to
983 candidates for scalarization. Return the created access or NULL if none is
984 created. */
985
986 static struct access *
987 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
988 {
989 struct access *ret = NULL;
990 bool partial_ref;
991
992 if (TREE_CODE (expr) == BIT_FIELD_REF
993 || TREE_CODE (expr) == IMAGPART_EXPR
994 || TREE_CODE (expr) == REALPART_EXPR)
995 {
996 expr = TREE_OPERAND (expr, 0);
997 partial_ref = true;
998 }
999 else
1000 partial_ref = false;
1001
1002 /* We need to dive through V_C_Es in order to get the size of its parameter
1003 and not the result type. Ada produces such statements. We are also
1004 capable of handling the topmost V_C_E but not any of those buried in other
1005 handled components. */
1006 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1007 expr = TREE_OPERAND (expr, 0);
1008
1009 if (contains_view_convert_expr_p (expr))
1010 {
1011 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1012 "component.");
1013 return NULL;
1014 }
1015
1016 switch (TREE_CODE (expr))
1017 {
1018 case MEM_REF:
1019 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1020 && sra_mode != SRA_MODE_EARLY_IPA)
1021 return NULL;
1022 /* fall through */
1023 case VAR_DECL:
1024 case PARM_DECL:
1025 case RESULT_DECL:
1026 case COMPONENT_REF:
1027 case ARRAY_REF:
1028 case ARRAY_RANGE_REF:
1029 ret = create_access (expr, stmt, write);
1030 break;
1031
1032 default:
1033 break;
1034 }
1035
1036 if (write && partial_ref && ret)
1037 ret->grp_partial_lhs = 1;
1038
1039 return ret;
1040 }
1041
1042 /* Scan expression EXPR and create access structures for all accesses to
1043 candidates for scalarization. Return true if any access has been inserted.
1044 STMT must be the statement from which the expression is taken, WRITE must be
1045 true if the expression is a store and false otherwise. */
1046
1047 static bool
1048 build_access_from_expr (tree expr, gimple stmt, bool write)
1049 {
1050 struct access *access;
1051
1052 access = build_access_from_expr_1 (expr, stmt, write);
1053 if (access)
1054 {
1055 /* This means the aggregate is accesses as a whole in a way other than an
1056 assign statement and thus cannot be removed even if we had a scalar
1057 replacement for everything. */
1058 if (cannot_scalarize_away_bitmap)
1059 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1060 return true;
1061 }
1062 return false;
1063 }
1064
1065 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1066 modes in which it matters, return true iff they have been disqualified. RHS
1067 may be NULL, in that case ignore it. If we scalarize an aggregate in
1068 intra-SRA we may need to add statements after each statement. This is not
1069 possible if a statement unconditionally has to end the basic block. */
1070 static bool
1071 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1072 {
1073 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1074 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1075 {
1076 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1077 if (rhs)
1078 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1079 return true;
1080 }
1081 return false;
1082 }
1083
1084 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1085 invoked only on strict-alignment targets. */
1086
1087 static bool
1088 tree_non_aligned_mem_p (tree exp, unsigned int align)
1089 {
1090 unsigned int exp_align;
1091
1092 if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1093 exp = TREE_OPERAND (exp, 0);
1094
1095 if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1096 return false;
1097
1098 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1099 compute an explicit alignment. Pretend that dereferenced pointers
1100 are always aligned on strict-alignment targets. */
1101 if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1102 exp_align = get_object_or_type_alignment (exp);
1103 else
1104 exp_align = get_object_alignment (exp);
1105
1106 if (exp_align < align)
1107 return true;
1108
1109 return false;
1110 }
1111
1112 /* Return true if EXP is a memory reference less aligned than what the access
1113 ACC would require. This is invoked only on strict-alignment targets. */
1114
1115 static bool
1116 tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
1117 {
1118 unsigned int acc_align;
1119
1120 /* The alignment of the access is that of its expression. However, it may
1121 have been artificially increased, e.g. by a local alignment promotion,
1122 so we cap it to the alignment of the type of the base, on the grounds
1123 that valid sub-accesses cannot be more aligned than that. */
1124 acc_align = get_object_alignment (acc->expr);
1125 if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
1126 acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
1127
1128 return tree_non_aligned_mem_p (exp, acc_align);
1129 }
1130
1131 /* Scan expressions occuring in STMT, create access structures for all accesses
1132 to candidates for scalarization and remove those candidates which occur in
1133 statements or expressions that prevent them from being split apart. Return
1134 true if any access has been inserted. */
1135
1136 static bool
1137 build_accesses_from_assign (gimple stmt)
1138 {
1139 tree lhs, rhs;
1140 struct access *lacc, *racc;
1141
1142 if (!gimple_assign_single_p (stmt)
1143 /* Scope clobbers don't influence scalarization. */
1144 || gimple_clobber_p (stmt))
1145 return false;
1146
1147 lhs = gimple_assign_lhs (stmt);
1148 rhs = gimple_assign_rhs1 (stmt);
1149
1150 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1151 return false;
1152
1153 racc = build_access_from_expr_1 (rhs, stmt, false);
1154 lacc = build_access_from_expr_1 (lhs, stmt, true);
1155
1156 if (lacc)
1157 {
1158 lacc->grp_assignment_write = 1;
1159 if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
1160 lacc->grp_unscalarizable_region = 1;
1161 }
1162
1163 if (racc)
1164 {
1165 racc->grp_assignment_read = 1;
1166 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1167 && !is_gimple_reg_type (racc->type))
1168 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1169 if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
1170 racc->grp_unscalarizable_region = 1;
1171 }
1172
1173 if (lacc && racc
1174 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1175 && !lacc->grp_unscalarizable_region
1176 && !racc->grp_unscalarizable_region
1177 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1178 /* FIXME: Turn the following line into an assert after PR 40058 is
1179 fixed. */
1180 && lacc->size == racc->size
1181 && useless_type_conversion_p (lacc->type, racc->type))
1182 {
1183 struct assign_link *link;
1184
1185 link = (struct assign_link *) pool_alloc (link_pool);
1186 memset (link, 0, sizeof (struct assign_link));
1187
1188 link->lacc = lacc;
1189 link->racc = racc;
1190
1191 add_link_to_rhs (racc, link);
1192 }
1193
1194 return lacc || racc;
1195 }
1196
1197 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1198 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1199
1200 static bool
1201 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1202 void *data ATTRIBUTE_UNUSED)
1203 {
1204 op = get_base_address (op);
1205 if (op
1206 && DECL_P (op))
1207 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1208
1209 return false;
1210 }
1211
1212 /* Return true iff callsite CALL has at least as many actual arguments as there
1213 are formal parameters of the function currently processed by IPA-SRA. */
1214
1215 static inline bool
1216 callsite_has_enough_arguments_p (gimple call)
1217 {
1218 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1219 }
1220
1221 /* Scan function and look for interesting expressions and create access
1222 structures for them. Return true iff any access is created. */
1223
1224 static bool
1225 scan_function (void)
1226 {
1227 basic_block bb;
1228 bool ret = false;
1229
1230 FOR_EACH_BB (bb)
1231 {
1232 gimple_stmt_iterator gsi;
1233 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1234 {
1235 gimple stmt = gsi_stmt (gsi);
1236 tree t;
1237 unsigned i;
1238
1239 if (final_bbs && stmt_can_throw_external (stmt))
1240 bitmap_set_bit (final_bbs, bb->index);
1241 switch (gimple_code (stmt))
1242 {
1243 case GIMPLE_RETURN:
1244 t = gimple_return_retval (stmt);
1245 if (t != NULL_TREE)
1246 ret |= build_access_from_expr (t, stmt, false);
1247 if (final_bbs)
1248 bitmap_set_bit (final_bbs, bb->index);
1249 break;
1250
1251 case GIMPLE_ASSIGN:
1252 ret |= build_accesses_from_assign (stmt);
1253 break;
1254
1255 case GIMPLE_CALL:
1256 for (i = 0; i < gimple_call_num_args (stmt); i++)
1257 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1258 stmt, false);
1259
1260 if (sra_mode == SRA_MODE_EARLY_IPA)
1261 {
1262 tree dest = gimple_call_fndecl (stmt);
1263 int flags = gimple_call_flags (stmt);
1264
1265 if (dest)
1266 {
1267 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1268 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1269 encountered_apply_args = true;
1270 if (cgraph_get_node (dest)
1271 == cgraph_get_node (current_function_decl))
1272 {
1273 encountered_recursive_call = true;
1274 if (!callsite_has_enough_arguments_p (stmt))
1275 encountered_unchangable_recursive_call = true;
1276 }
1277 }
1278
1279 if (final_bbs
1280 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1281 bitmap_set_bit (final_bbs, bb->index);
1282 }
1283
1284 t = gimple_call_lhs (stmt);
1285 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1286 ret |= build_access_from_expr (t, stmt, true);
1287 break;
1288
1289 case GIMPLE_ASM:
1290 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1291 asm_visit_addr);
1292 if (final_bbs)
1293 bitmap_set_bit (final_bbs, bb->index);
1294
1295 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1296 {
1297 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1298 ret |= build_access_from_expr (t, stmt, false);
1299 }
1300 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1301 {
1302 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1303 ret |= build_access_from_expr (t, stmt, true);
1304 }
1305 break;
1306
1307 default:
1308 break;
1309 }
1310 }
1311 }
1312
1313 return ret;
1314 }
1315
1316 /* Helper of QSORT function. There are pointers to accesses in the array. An
1317 access is considered smaller than another if it has smaller offset or if the
1318 offsets are the same but is size is bigger. */
1319
1320 static int
1321 compare_access_positions (const void *a, const void *b)
1322 {
1323 const access_p *fp1 = (const access_p *) a;
1324 const access_p *fp2 = (const access_p *) b;
1325 const access_p f1 = *fp1;
1326 const access_p f2 = *fp2;
1327
1328 if (f1->offset != f2->offset)
1329 return f1->offset < f2->offset ? -1 : 1;
1330
1331 if (f1->size == f2->size)
1332 {
1333 if (f1->type == f2->type)
1334 return 0;
1335 /* Put any non-aggregate type before any aggregate type. */
1336 else if (!is_gimple_reg_type (f1->type)
1337 && is_gimple_reg_type (f2->type))
1338 return 1;
1339 else if (is_gimple_reg_type (f1->type)
1340 && !is_gimple_reg_type (f2->type))
1341 return -1;
1342 /* Put any complex or vector type before any other scalar type. */
1343 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1344 && TREE_CODE (f1->type) != VECTOR_TYPE
1345 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1346 || TREE_CODE (f2->type) == VECTOR_TYPE))
1347 return 1;
1348 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1349 || TREE_CODE (f1->type) == VECTOR_TYPE)
1350 && TREE_CODE (f2->type) != COMPLEX_TYPE
1351 && TREE_CODE (f2->type) != VECTOR_TYPE)
1352 return -1;
1353 /* Put the integral type with the bigger precision first. */
1354 else if (INTEGRAL_TYPE_P (f1->type)
1355 && INTEGRAL_TYPE_P (f2->type))
1356 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1357 /* Put any integral type with non-full precision last. */
1358 else if (INTEGRAL_TYPE_P (f1->type)
1359 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1360 != TYPE_PRECISION (f1->type)))
1361 return 1;
1362 else if (INTEGRAL_TYPE_P (f2->type)
1363 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1364 != TYPE_PRECISION (f2->type)))
1365 return -1;
1366 /* Stabilize the sort. */
1367 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1368 }
1369
1370 /* We want the bigger accesses first, thus the opposite operator in the next
1371 line: */
1372 return f1->size > f2->size ? -1 : 1;
1373 }
1374
1375
1376 /* Append a name of the declaration to the name obstack. A helper function for
1377 make_fancy_name. */
1378
1379 static void
1380 make_fancy_decl_name (tree decl)
1381 {
1382 char buffer[32];
1383
1384 tree name = DECL_NAME (decl);
1385 if (name)
1386 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1387 IDENTIFIER_LENGTH (name));
1388 else
1389 {
1390 sprintf (buffer, "D%u", DECL_UID (decl));
1391 obstack_grow (&name_obstack, buffer, strlen (buffer));
1392 }
1393 }
1394
1395 /* Helper for make_fancy_name. */
1396
1397 static void
1398 make_fancy_name_1 (tree expr)
1399 {
1400 char buffer[32];
1401 tree index;
1402
1403 if (DECL_P (expr))
1404 {
1405 make_fancy_decl_name (expr);
1406 return;
1407 }
1408
1409 switch (TREE_CODE (expr))
1410 {
1411 case COMPONENT_REF:
1412 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1413 obstack_1grow (&name_obstack, '$');
1414 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1415 break;
1416
1417 case ARRAY_REF:
1418 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1419 obstack_1grow (&name_obstack, '$');
1420 /* Arrays with only one element may not have a constant as their
1421 index. */
1422 index = TREE_OPERAND (expr, 1);
1423 if (TREE_CODE (index) != INTEGER_CST)
1424 break;
1425 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1426 obstack_grow (&name_obstack, buffer, strlen (buffer));
1427 break;
1428
1429 case ADDR_EXPR:
1430 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1431 break;
1432
1433 case MEM_REF:
1434 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1435 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1436 {
1437 obstack_1grow (&name_obstack, '$');
1438 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1439 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1440 obstack_grow (&name_obstack, buffer, strlen (buffer));
1441 }
1442 break;
1443
1444 case BIT_FIELD_REF:
1445 case REALPART_EXPR:
1446 case IMAGPART_EXPR:
1447 gcc_unreachable (); /* we treat these as scalars. */
1448 break;
1449 default:
1450 break;
1451 }
1452 }
1453
1454 /* Create a human readable name for replacement variable of ACCESS. */
1455
1456 static char *
1457 make_fancy_name (tree expr)
1458 {
1459 make_fancy_name_1 (expr);
1460 obstack_1grow (&name_obstack, '\0');
1461 return XOBFINISH (&name_obstack, char *);
1462 }
1463
1464 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1465 EXP_TYPE at the given OFFSET. If BASE is something for which
1466 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1467 to insert new statements either before or below the current one as specified
1468 by INSERT_AFTER. This function is not capable of handling bitfields. */
1469
1470 tree
1471 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1472 tree exp_type, gimple_stmt_iterator *gsi,
1473 bool insert_after)
1474 {
1475 tree prev_base = base;
1476 tree off;
1477 HOST_WIDE_INT base_offset;
1478 unsigned HOST_WIDE_INT misalign;
1479 unsigned int align;
1480
1481 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1482
1483 base = get_addr_base_and_unit_offset (base, &base_offset);
1484
1485 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1486 offset such as array[var_index]. */
1487 if (!base)
1488 {
1489 gimple stmt;
1490 tree tmp, addr;
1491
1492 gcc_checking_assert (gsi);
1493 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1494 add_referenced_var (tmp);
1495 tmp = make_ssa_name (tmp, NULL);
1496 addr = build_fold_addr_expr (unshare_expr (prev_base));
1497 STRIP_USELESS_TYPE_CONVERSION (addr);
1498 stmt = gimple_build_assign (tmp, addr);
1499 gimple_set_location (stmt, loc);
1500 SSA_NAME_DEF_STMT (tmp) = stmt;
1501 if (insert_after)
1502 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1503 else
1504 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1505 update_stmt (stmt);
1506
1507 off = build_int_cst (reference_alias_ptr_type (prev_base),
1508 offset / BITS_PER_UNIT);
1509 base = tmp;
1510 }
1511 else if (TREE_CODE (base) == MEM_REF)
1512 {
1513 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1514 base_offset + offset / BITS_PER_UNIT);
1515 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1516 base = unshare_expr (TREE_OPERAND (base, 0));
1517 }
1518 else
1519 {
1520 off = build_int_cst (reference_alias_ptr_type (base),
1521 base_offset + offset / BITS_PER_UNIT);
1522 base = build_fold_addr_expr (unshare_expr (base));
1523 }
1524
1525 /* If prev_base were always an originally performed access
1526 we can extract more optimistic alignment information
1527 by looking at the access mode. That would constrain the
1528 alignment of base + base_offset which we would need to
1529 adjust according to offset. */
1530 align = get_pointer_alignment_1 (base, &misalign);
1531 if (misalign == 0
1532 && (TREE_CODE (prev_base) == MEM_REF
1533 || TREE_CODE (prev_base) == TARGET_MEM_REF))
1534 align = MAX (align, TYPE_ALIGN (TREE_TYPE (prev_base)));
1535 misalign += (double_int_sext (tree_to_double_int (off),
1536 TYPE_PRECISION (TREE_TYPE (off))).low
1537 * BITS_PER_UNIT);
1538 misalign = misalign & (align - 1);
1539 if (misalign != 0)
1540 align = (misalign & -misalign);
1541 if (align < TYPE_ALIGN (exp_type))
1542 exp_type = build_aligned_type (exp_type, align);
1543
1544 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1545 }
1546
1547 DEF_VEC_ALLOC_P_STACK (tree);
1548 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1549
1550 /* Construct a memory reference to a part of an aggregate BASE at the given
1551 OFFSET and of the type of MODEL. In case this is a chain of references
1552 to component, the function will replicate the chain of COMPONENT_REFs of
1553 the expression of MODEL to access it. GSI and INSERT_AFTER have the same
1554 meaning as in build_ref_for_offset. */
1555
1556 static tree
1557 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1558 struct access *model, gimple_stmt_iterator *gsi,
1559 bool insert_after)
1560 {
1561 tree type = model->type, t;
1562 VEC(tree,stack) *cr_stack = NULL;
1563
1564 if (TREE_CODE (model->expr) == COMPONENT_REF)
1565 {
1566 tree expr = model->expr;
1567
1568 /* Create a stack of the COMPONENT_REFs so later we can walk them in
1569 order from inner to outer. */
1570 cr_stack = VEC_alloc (tree, stack, 6);
1571
1572 do {
1573 tree field = TREE_OPERAND (expr, 1);
1574 tree cr_offset = component_ref_field_offset (expr);
1575 HOST_WIDE_INT bit_pos
1576 = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
1577 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1578
1579 /* We can be called with a model different from the one associated
1580 with BASE so we need to avoid going up the chain too far. */
1581 if (offset - bit_pos < 0)
1582 break;
1583
1584 offset -= bit_pos;
1585 VEC_safe_push (tree, stack, cr_stack, expr);
1586
1587 expr = TREE_OPERAND (expr, 0);
1588 type = TREE_TYPE (expr);
1589 } while (TREE_CODE (expr) == COMPONENT_REF);
1590 }
1591
1592 t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1593
1594 if (TREE_CODE (model->expr) == COMPONENT_REF)
1595 {
1596 unsigned i;
1597 tree expr;
1598
1599 /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
1600 FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1601 {
1602 tree field = TREE_OPERAND (expr, 1);
1603 t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1604 TREE_OPERAND (expr, 2));
1605 }
1606
1607 VEC_free (tree, stack, cr_stack);
1608 }
1609
1610 return t;
1611 }
1612
1613 /* Construct a memory reference consisting of component_refs and array_refs to
1614 a part of an aggregate *RES (which is of type TYPE). The requested part
1615 should have type EXP_TYPE at be the given OFFSET. This function might not
1616 succeed, it returns true when it does and only then *RES points to something
1617 meaningful. This function should be used only to build expressions that we
1618 might need to present to user (e.g. in warnings). In all other situations,
1619 build_ref_for_model or build_ref_for_offset should be used instead. */
1620
1621 static bool
1622 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1623 tree exp_type)
1624 {
1625 while (1)
1626 {
1627 tree fld;
1628 tree tr_size, index, minidx;
1629 HOST_WIDE_INT el_size;
1630
1631 if (offset == 0 && exp_type
1632 && types_compatible_p (exp_type, type))
1633 return true;
1634
1635 switch (TREE_CODE (type))
1636 {
1637 case UNION_TYPE:
1638 case QUAL_UNION_TYPE:
1639 case RECORD_TYPE:
1640 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1641 {
1642 HOST_WIDE_INT pos, size;
1643 tree expr, *expr_ptr;
1644
1645 if (TREE_CODE (fld) != FIELD_DECL)
1646 continue;
1647
1648 pos = int_bit_position (fld);
1649 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1650 tr_size = DECL_SIZE (fld);
1651 if (!tr_size || !host_integerp (tr_size, 1))
1652 continue;
1653 size = tree_low_cst (tr_size, 1);
1654 if (size == 0)
1655 {
1656 if (pos != offset)
1657 continue;
1658 }
1659 else if (pos > offset || (pos + size) <= offset)
1660 continue;
1661
1662 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1663 NULL_TREE);
1664 expr_ptr = &expr;
1665 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1666 offset - pos, exp_type))
1667 {
1668 *res = expr;
1669 return true;
1670 }
1671 }
1672 return false;
1673
1674 case ARRAY_TYPE:
1675 tr_size = TYPE_SIZE (TREE_TYPE (type));
1676 if (!tr_size || !host_integerp (tr_size, 1))
1677 return false;
1678 el_size = tree_low_cst (tr_size, 1);
1679
1680 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1681 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1682 return false;
1683 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1684 if (!integer_zerop (minidx))
1685 index = int_const_binop (PLUS_EXPR, index, minidx);
1686 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1687 NULL_TREE, NULL_TREE);
1688 offset = offset % el_size;
1689 type = TREE_TYPE (type);
1690 break;
1691
1692 default:
1693 if (offset != 0)
1694 return false;
1695
1696 if (exp_type)
1697 return false;
1698 else
1699 return true;
1700 }
1701 }
1702 }
1703
1704 /* Return true iff TYPE is stdarg va_list type. */
1705
1706 static inline bool
1707 is_va_list_type (tree type)
1708 {
1709 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1710 }
1711
1712 /* Print message to dump file why a variable was rejected. */
1713
1714 static void
1715 reject (tree var, const char *msg)
1716 {
1717 if (dump_file && (dump_flags & TDF_DETAILS))
1718 {
1719 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1720 print_generic_expr (dump_file, var, 0);
1721 fprintf (dump_file, "\n");
1722 }
1723 }
1724
1725 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1726 those with type which is suitable for scalarization. */
1727
1728 static bool
1729 find_var_candidates (void)
1730 {
1731 tree var, type;
1732 referenced_var_iterator rvi;
1733 bool ret = false;
1734 const char *msg;
1735
1736 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1737 {
1738 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1739 continue;
1740 type = TREE_TYPE (var);
1741
1742 if (!AGGREGATE_TYPE_P (type))
1743 {
1744 reject (var, "not aggregate");
1745 continue;
1746 }
1747 if (needs_to_live_in_memory (var))
1748 {
1749 reject (var, "needs to live in memory");
1750 continue;
1751 }
1752 if (TREE_THIS_VOLATILE (var))
1753 {
1754 reject (var, "is volatile");
1755 continue;
1756 }
1757 if (!COMPLETE_TYPE_P (type))
1758 {
1759 reject (var, "has incomplete type");
1760 continue;
1761 }
1762 if (!host_integerp (TYPE_SIZE (type), 1))
1763 {
1764 reject (var, "type size not fixed");
1765 continue;
1766 }
1767 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1768 {
1769 reject (var, "type size is zero");
1770 continue;
1771 }
1772 if (type_internals_preclude_sra_p (type, &msg))
1773 {
1774 reject (var, msg);
1775 continue;
1776 }
1777 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1778 we also want to schedule it rather late. Thus we ignore it in
1779 the early pass. */
1780 (sra_mode == SRA_MODE_EARLY_INTRA
1781 && is_va_list_type (type)))
1782 {
1783 reject (var, "is va_list");
1784 continue;
1785 }
1786
1787 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1788
1789 if (dump_file && (dump_flags & TDF_DETAILS))
1790 {
1791 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1792 print_generic_expr (dump_file, var, 0);
1793 fprintf (dump_file, "\n");
1794 }
1795 ret = true;
1796 }
1797
1798 return ret;
1799 }
1800
1801 /* Sort all accesses for the given variable, check for partial overlaps and
1802 return NULL if there are any. If there are none, pick a representative for
1803 each combination of offset and size and create a linked list out of them.
1804 Return the pointer to the first representative and make sure it is the first
1805 one in the vector of accesses. */
1806
1807 static struct access *
1808 sort_and_splice_var_accesses (tree var)
1809 {
1810 int i, j, access_count;
1811 struct access *res, **prev_acc_ptr = &res;
1812 VEC (access_p, heap) *access_vec;
1813 bool first = true;
1814 HOST_WIDE_INT low = -1, high = 0;
1815
1816 access_vec = get_base_access_vector (var);
1817 if (!access_vec)
1818 return NULL;
1819 access_count = VEC_length (access_p, access_vec);
1820
1821 /* Sort by <OFFSET, SIZE>. */
1822 VEC_qsort (access_p, access_vec, compare_access_positions);
1823
1824 i = 0;
1825 while (i < access_count)
1826 {
1827 struct access *access = VEC_index (access_p, access_vec, i);
1828 bool grp_write = access->write;
1829 bool grp_read = !access->write;
1830 bool grp_scalar_write = access->write
1831 && is_gimple_reg_type (access->type);
1832 bool grp_scalar_read = !access->write
1833 && is_gimple_reg_type (access->type);
1834 bool grp_assignment_read = access->grp_assignment_read;
1835 bool grp_assignment_write = access->grp_assignment_write;
1836 bool multiple_scalar_reads = false;
1837 bool total_scalarization = access->grp_total_scalarization;
1838 bool grp_partial_lhs = access->grp_partial_lhs;
1839 bool first_scalar = is_gimple_reg_type (access->type);
1840 bool unscalarizable_region = access->grp_unscalarizable_region;
1841
1842 if (first || access->offset >= high)
1843 {
1844 first = false;
1845 low = access->offset;
1846 high = access->offset + access->size;
1847 }
1848 else if (access->offset > low && access->offset + access->size > high)
1849 return NULL;
1850 else
1851 gcc_assert (access->offset >= low
1852 && access->offset + access->size <= high);
1853
1854 j = i + 1;
1855 while (j < access_count)
1856 {
1857 struct access *ac2 = VEC_index (access_p, access_vec, j);
1858 if (ac2->offset != access->offset || ac2->size != access->size)
1859 break;
1860 if (ac2->write)
1861 {
1862 grp_write = true;
1863 grp_scalar_write = (grp_scalar_write
1864 || is_gimple_reg_type (ac2->type));
1865 }
1866 else
1867 {
1868 grp_read = true;
1869 if (is_gimple_reg_type (ac2->type))
1870 {
1871 if (grp_scalar_read)
1872 multiple_scalar_reads = true;
1873 else
1874 grp_scalar_read = true;
1875 }
1876 }
1877 grp_assignment_read |= ac2->grp_assignment_read;
1878 grp_assignment_write |= ac2->grp_assignment_write;
1879 grp_partial_lhs |= ac2->grp_partial_lhs;
1880 unscalarizable_region |= ac2->grp_unscalarizable_region;
1881 total_scalarization |= ac2->grp_total_scalarization;
1882 relink_to_new_repr (access, ac2);
1883
1884 /* If there are both aggregate-type and scalar-type accesses with
1885 this combination of size and offset, the comparison function
1886 should have put the scalars first. */
1887 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1888 ac2->group_representative = access;
1889 j++;
1890 }
1891
1892 i = j;
1893
1894 access->group_representative = access;
1895 access->grp_write = grp_write;
1896 access->grp_read = grp_read;
1897 access->grp_scalar_read = grp_scalar_read;
1898 access->grp_scalar_write = grp_scalar_write;
1899 access->grp_assignment_read = grp_assignment_read;
1900 access->grp_assignment_write = grp_assignment_write;
1901 access->grp_hint = multiple_scalar_reads || total_scalarization;
1902 access->grp_total_scalarization = total_scalarization;
1903 access->grp_partial_lhs = grp_partial_lhs;
1904 access->grp_unscalarizable_region = unscalarizable_region;
1905 if (access->first_link)
1906 add_access_to_work_queue (access);
1907
1908 *prev_acc_ptr = access;
1909 prev_acc_ptr = &access->next_grp;
1910 }
1911
1912 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1913 return res;
1914 }
1915
1916 /* Create a variable for the given ACCESS which determines the type, name and a
1917 few other properties. Return the variable declaration and store it also to
1918 ACCESS->replacement. */
1919
1920 static tree
1921 create_access_replacement (struct access *access, bool rename)
1922 {
1923 tree repl;
1924
1925 repl = create_tmp_var (access->type, "SR");
1926 add_referenced_var (repl);
1927 if (!access->grp_partial_lhs
1928 && rename)
1929 mark_sym_for_renaming (repl);
1930
1931 if (TREE_CODE (access->type) == COMPLEX_TYPE
1932 || TREE_CODE (access->type) == VECTOR_TYPE)
1933 {
1934 if (!access->grp_partial_lhs)
1935 DECL_GIMPLE_REG_P (repl) = 1;
1936 }
1937 else if (access->grp_partial_lhs
1938 && is_gimple_reg_type (access->type))
1939 TREE_ADDRESSABLE (repl) = 1;
1940
1941 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1942 DECL_ARTIFICIAL (repl) = 1;
1943 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1944
1945 if (DECL_NAME (access->base)
1946 && !DECL_IGNORED_P (access->base)
1947 && !DECL_ARTIFICIAL (access->base))
1948 {
1949 char *pretty_name = make_fancy_name (access->expr);
1950 tree debug_expr = unshare_expr (access->expr), d;
1951
1952 DECL_NAME (repl) = get_identifier (pretty_name);
1953 obstack_free (&name_obstack, pretty_name);
1954
1955 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1956 as DECL_DEBUG_EXPR isn't considered when looking for still
1957 used SSA_NAMEs and thus they could be freed. All debug info
1958 generation cares is whether something is constant or variable
1959 and that get_ref_base_and_extent works properly on the
1960 expression. */
1961 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1962 switch (TREE_CODE (d))
1963 {
1964 case ARRAY_REF:
1965 case ARRAY_RANGE_REF:
1966 if (TREE_OPERAND (d, 1)
1967 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1968 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1969 if (TREE_OPERAND (d, 3)
1970 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1971 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1972 /* FALLTHRU */
1973 case COMPONENT_REF:
1974 if (TREE_OPERAND (d, 2)
1975 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1976 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1977 break;
1978 default:
1979 break;
1980 }
1981 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1982 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1983 if (access->grp_no_warning)
1984 TREE_NO_WARNING (repl) = 1;
1985 else
1986 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1987 }
1988 else
1989 TREE_NO_WARNING (repl) = 1;
1990
1991 if (dump_file)
1992 {
1993 fprintf (dump_file, "Created a replacement for ");
1994 print_generic_expr (dump_file, access->base, 0);
1995 fprintf (dump_file, " offset: %u, size: %u: ",
1996 (unsigned) access->offset, (unsigned) access->size);
1997 print_generic_expr (dump_file, repl, 0);
1998 fprintf (dump_file, "\n");
1999 }
2000 sra_stats.replacements++;
2001
2002 return repl;
2003 }
2004
2005 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2006
2007 static inline tree
2008 get_access_replacement (struct access *access)
2009 {
2010 gcc_assert (access->grp_to_be_replaced);
2011
2012 if (!access->replacement_decl)
2013 access->replacement_decl = create_access_replacement (access, true);
2014 return access->replacement_decl;
2015 }
2016
2017 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
2018 not mark it for renaming. */
2019
2020 static inline tree
2021 get_unrenamed_access_replacement (struct access *access)
2022 {
2023 gcc_assert (!access->grp_to_be_replaced);
2024
2025 if (!access->replacement_decl)
2026 access->replacement_decl = create_access_replacement (access, false);
2027 return access->replacement_decl;
2028 }
2029
2030
2031 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2032 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2033 to it is not "within" the root. Return false iff some accesses partially
2034 overlap. */
2035
2036 static bool
2037 build_access_subtree (struct access **access)
2038 {
2039 struct access *root = *access, *last_child = NULL;
2040 HOST_WIDE_INT limit = root->offset + root->size;
2041
2042 *access = (*access)->next_grp;
2043 while (*access && (*access)->offset + (*access)->size <= limit)
2044 {
2045 if (!last_child)
2046 root->first_child = *access;
2047 else
2048 last_child->next_sibling = *access;
2049 last_child = *access;
2050
2051 if (!build_access_subtree (access))
2052 return false;
2053 }
2054
2055 if (*access && (*access)->offset < limit)
2056 return false;
2057
2058 return true;
2059 }
2060
2061 /* Build a tree of access representatives, ACCESS is the pointer to the first
2062 one, others are linked in a list by the next_grp field. Return false iff
2063 some accesses partially overlap. */
2064
2065 static bool
2066 build_access_trees (struct access *access)
2067 {
2068 while (access)
2069 {
2070 struct access *root = access;
2071
2072 if (!build_access_subtree (&access))
2073 return false;
2074 root->next_grp = access;
2075 }
2076 return true;
2077 }
2078
2079 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2080 array. */
2081
2082 static bool
2083 expr_with_var_bounded_array_refs_p (tree expr)
2084 {
2085 while (handled_component_p (expr))
2086 {
2087 if (TREE_CODE (expr) == ARRAY_REF
2088 && !host_integerp (array_ref_low_bound (expr), 0))
2089 return true;
2090 expr = TREE_OPERAND (expr, 0);
2091 }
2092 return false;
2093 }
2094
2095 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2096 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2097 sorts of access flags appropriately along the way, notably always set
2098 grp_read and grp_assign_read according to MARK_READ and grp_write when
2099 MARK_WRITE is true.
2100
2101 Creating a replacement for a scalar access is considered beneficial if its
2102 grp_hint is set (this means we are either attempting total scalarization or
2103 there is more than one direct read access) or according to the following
2104 table:
2105
2106 Access written to through a scalar type (once or more times)
2107 |
2108 | Written to in an assignment statement
2109 | |
2110 | | Access read as scalar _once_
2111 | | |
2112 | | | Read in an assignment statement
2113 | | | |
2114 | | | | Scalarize Comment
2115 -----------------------------------------------------------------------------
2116 0 0 0 0 No access for the scalar
2117 0 0 0 1 No access for the scalar
2118 0 0 1 0 No Single read - won't help
2119 0 0 1 1 No The same case
2120 0 1 0 0 No access for the scalar
2121 0 1 0 1 No access for the scalar
2122 0 1 1 0 Yes s = *g; return s.i;
2123 0 1 1 1 Yes The same case as above
2124 1 0 0 0 No Won't help
2125 1 0 0 1 Yes s.i = 1; *g = s;
2126 1 0 1 0 Yes s.i = 5; g = s.i;
2127 1 0 1 1 Yes The same case as above
2128 1 1 0 0 No Won't help.
2129 1 1 0 1 Yes s.i = 1; *g = s;
2130 1 1 1 0 Yes s = *g; return s.i;
2131 1 1 1 1 Yes Any of the above yeses */
2132
2133 static bool
2134 analyze_access_subtree (struct access *root, struct access *parent,
2135 bool allow_replacements)
2136 {
2137 struct access *child;
2138 HOST_WIDE_INT limit = root->offset + root->size;
2139 HOST_WIDE_INT covered_to = root->offset;
2140 bool scalar = is_gimple_reg_type (root->type);
2141 bool hole = false, sth_created = false;
2142
2143 if (parent)
2144 {
2145 if (parent->grp_read)
2146 root->grp_read = 1;
2147 if (parent->grp_assignment_read)
2148 root->grp_assignment_read = 1;
2149 if (parent->grp_write)
2150 root->grp_write = 1;
2151 if (parent->grp_assignment_write)
2152 root->grp_assignment_write = 1;
2153 if (parent->grp_total_scalarization)
2154 root->grp_total_scalarization = 1;
2155 }
2156
2157 if (root->grp_unscalarizable_region)
2158 allow_replacements = false;
2159
2160 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2161 allow_replacements = false;
2162
2163 for (child = root->first_child; child; child = child->next_sibling)
2164 {
2165 hole |= covered_to < child->offset;
2166 sth_created |= analyze_access_subtree (child, root,
2167 allow_replacements && !scalar);
2168
2169 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2170 root->grp_total_scalarization &= child->grp_total_scalarization;
2171 if (child->grp_covered)
2172 covered_to += child->size;
2173 else
2174 hole = true;
2175 }
2176
2177 if (allow_replacements && scalar && !root->first_child
2178 && (root->grp_hint
2179 || ((root->grp_scalar_read || root->grp_assignment_read)
2180 && (root->grp_scalar_write || root->grp_assignment_write))))
2181 {
2182 bool new_integer_type;
2183 /* Always create access replacements that cover the whole access.
2184 For integral types this means the precision has to match.
2185 Avoid assumptions based on the integral type kind, too. */
2186 if (INTEGRAL_TYPE_P (root->type)
2187 && (TREE_CODE (root->type) != INTEGER_TYPE
2188 || TYPE_PRECISION (root->type) != root->size)
2189 /* But leave bitfield accesses alone. */
2190 && (root->offset % BITS_PER_UNIT) == 0)
2191 {
2192 tree rt = root->type;
2193 root->type = build_nonstandard_integer_type (root->size,
2194 TYPE_UNSIGNED (rt));
2195 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2196 root->base, root->offset,
2197 root->type, NULL, false);
2198 new_integer_type = true;
2199 }
2200 else
2201 new_integer_type = false;
2202
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 {
2205 fprintf (dump_file, "Marking ");
2206 print_generic_expr (dump_file, root->base, 0);
2207 fprintf (dump_file, " offset: %u, size: %u ",
2208 (unsigned) root->offset, (unsigned) root->size);
2209 fprintf (dump_file, " to be replaced%s.\n",
2210 new_integer_type ? " with an integer": "");
2211 }
2212
2213 root->grp_to_be_replaced = 1;
2214 sth_created = true;
2215 hole = false;
2216 }
2217 else
2218 {
2219 if (covered_to < limit)
2220 hole = true;
2221 if (scalar)
2222 root->grp_total_scalarization = 0;
2223 }
2224
2225 if (sth_created
2226 && (!hole || root->grp_total_scalarization))
2227 {
2228 root->grp_covered = 1;
2229 return true;
2230 }
2231 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2232 root->grp_unscalarized_data = 1; /* not covered and written to */
2233 if (sth_created)
2234 return true;
2235 return false;
2236 }
2237
2238 /* Analyze all access trees linked by next_grp by the means of
2239 analyze_access_subtree. */
2240 static bool
2241 analyze_access_trees (struct access *access)
2242 {
2243 bool ret = false;
2244
2245 while (access)
2246 {
2247 if (analyze_access_subtree (access, NULL, true))
2248 ret = true;
2249 access = access->next_grp;
2250 }
2251
2252 return ret;
2253 }
2254
2255 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2256 SIZE would conflict with an already existing one. If exactly such a child
2257 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2258
2259 static bool
2260 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2261 HOST_WIDE_INT size, struct access **exact_match)
2262 {
2263 struct access *child;
2264
2265 for (child = lacc->first_child; child; child = child->next_sibling)
2266 {
2267 if (child->offset == norm_offset && child->size == size)
2268 {
2269 *exact_match = child;
2270 return true;
2271 }
2272
2273 if (child->offset < norm_offset + size
2274 && child->offset + child->size > norm_offset)
2275 return true;
2276 }
2277
2278 return false;
2279 }
2280
2281 /* Create a new child access of PARENT, with all properties just like MODEL
2282 except for its offset and with its grp_write false and grp_read true.
2283 Return the new access or NULL if it cannot be created. Note that this access
2284 is created long after all splicing and sorting, it's not located in any
2285 access vector and is automatically a representative of its group. */
2286
2287 static struct access *
2288 create_artificial_child_access (struct access *parent, struct access *model,
2289 HOST_WIDE_INT new_offset)
2290 {
2291 struct access *access;
2292 struct access **child;
2293 tree expr = parent->base;
2294
2295 gcc_assert (!model->grp_unscalarizable_region);
2296
2297 access = (struct access *) pool_alloc (access_pool);
2298 memset (access, 0, sizeof (struct access));
2299 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2300 model->type))
2301 {
2302 access->grp_no_warning = true;
2303 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2304 new_offset, model, NULL, false);
2305 }
2306
2307 access->base = parent->base;
2308 access->expr = expr;
2309 access->offset = new_offset;
2310 access->size = model->size;
2311 access->type = model->type;
2312 access->grp_write = true;
2313 access->grp_read = false;
2314
2315 child = &parent->first_child;
2316 while (*child && (*child)->offset < new_offset)
2317 child = &(*child)->next_sibling;
2318
2319 access->next_sibling = *child;
2320 *child = access;
2321
2322 return access;
2323 }
2324
2325
2326 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2327 true if any new subaccess was created. Additionally, if RACC is a scalar
2328 access but LACC is not, change the type of the latter, if possible. */
2329
2330 static bool
2331 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2332 {
2333 struct access *rchild;
2334 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2335 bool ret = false;
2336
2337 if (is_gimple_reg_type (lacc->type)
2338 || lacc->grp_unscalarizable_region
2339 || racc->grp_unscalarizable_region)
2340 return false;
2341
2342 if (is_gimple_reg_type (racc->type))
2343 {
2344 if (!lacc->first_child && !racc->first_child)
2345 {
2346 tree t = lacc->base;
2347
2348 lacc->type = racc->type;
2349 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2350 lacc->offset, racc->type))
2351 lacc->expr = t;
2352 else
2353 {
2354 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2355 lacc->base, lacc->offset,
2356 racc, NULL, false);
2357 lacc->grp_no_warning = true;
2358 }
2359 }
2360 return false;
2361 }
2362
2363 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2364 {
2365 struct access *new_acc = NULL;
2366 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2367
2368 if (rchild->grp_unscalarizable_region)
2369 continue;
2370
2371 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2372 &new_acc))
2373 {
2374 if (new_acc)
2375 {
2376 rchild->grp_hint = 1;
2377 new_acc->grp_hint |= new_acc->grp_read;
2378 if (rchild->first_child)
2379 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2380 }
2381 continue;
2382 }
2383
2384 rchild->grp_hint = 1;
2385 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2386 if (new_acc)
2387 {
2388 ret = true;
2389 if (racc->first_child)
2390 propagate_subaccesses_across_link (new_acc, rchild);
2391 }
2392 }
2393
2394 return ret;
2395 }
2396
2397 /* Propagate all subaccesses across assignment links. */
2398
2399 static void
2400 propagate_all_subaccesses (void)
2401 {
2402 while (work_queue_head)
2403 {
2404 struct access *racc = pop_access_from_work_queue ();
2405 struct assign_link *link;
2406
2407 gcc_assert (racc->first_link);
2408
2409 for (link = racc->first_link; link; link = link->next)
2410 {
2411 struct access *lacc = link->lacc;
2412
2413 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2414 continue;
2415 lacc = lacc->group_representative;
2416 if (propagate_subaccesses_across_link (lacc, racc)
2417 && lacc->first_link)
2418 add_access_to_work_queue (lacc);
2419 }
2420 }
2421 }
2422
2423 /* Go through all accesses collected throughout the (intraprocedural) analysis
2424 stage, exclude overlapping ones, identify representatives and build trees
2425 out of them, making decisions about scalarization on the way. Return true
2426 iff there are any to-be-scalarized variables after this stage. */
2427
2428 static bool
2429 analyze_all_variable_accesses (void)
2430 {
2431 int res = 0;
2432 bitmap tmp = BITMAP_ALLOC (NULL);
2433 bitmap_iterator bi;
2434 unsigned i, max_total_scalarization_size;
2435
2436 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2437 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2438
2439 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2440 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2441 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2442 {
2443 tree var = referenced_var (i);
2444
2445 if (TREE_CODE (var) == VAR_DECL
2446 && type_consists_of_records_p (TREE_TYPE (var)))
2447 {
2448 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2449 <= max_total_scalarization_size)
2450 {
2451 completely_scalarize_var (var);
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2453 {
2454 fprintf (dump_file, "Will attempt to totally scalarize ");
2455 print_generic_expr (dump_file, var, 0);
2456 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2457 }
2458 }
2459 else if (dump_file && (dump_flags & TDF_DETAILS))
2460 {
2461 fprintf (dump_file, "Too big to totally scalarize: ");
2462 print_generic_expr (dump_file, var, 0);
2463 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2464 }
2465 }
2466 }
2467
2468 bitmap_copy (tmp, candidate_bitmap);
2469 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2470 {
2471 tree var = referenced_var (i);
2472 struct access *access;
2473
2474 access = sort_and_splice_var_accesses (var);
2475 if (!access || !build_access_trees (access))
2476 disqualify_candidate (var,
2477 "No or inhibitingly overlapping accesses.");
2478 }
2479
2480 propagate_all_subaccesses ();
2481
2482 bitmap_copy (tmp, candidate_bitmap);
2483 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2484 {
2485 tree var = referenced_var (i);
2486 struct access *access = get_first_repr_for_decl (var);
2487
2488 if (analyze_access_trees (access))
2489 {
2490 res++;
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 {
2493 fprintf (dump_file, "\nAccess trees for ");
2494 print_generic_expr (dump_file, var, 0);
2495 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2496 dump_access_tree (dump_file, access);
2497 fprintf (dump_file, "\n");
2498 }
2499 }
2500 else
2501 disqualify_candidate (var, "No scalar replacements to be created.");
2502 }
2503
2504 BITMAP_FREE (tmp);
2505
2506 if (res)
2507 {
2508 statistics_counter_event (cfun, "Scalarized aggregates", res);
2509 return true;
2510 }
2511 else
2512 return false;
2513 }
2514
2515 /* Generate statements copying scalar replacements of accesses within a subtree
2516 into or out of AGG. ACCESS, all its children, siblings and their children
2517 are to be processed. AGG is an aggregate type expression (can be a
2518 declaration but does not have to be, it can for example also be a mem_ref or
2519 a series of handled components). TOP_OFFSET is the offset of the processed
2520 subtree which has to be subtracted from offsets of individual accesses to
2521 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2522 replacements in the interval <start_offset, start_offset + chunk_size>,
2523 otherwise copy all. GSI is a statement iterator used to place the new
2524 statements. WRITE should be true when the statements should write from AGG
2525 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2526 statements will be added after the current statement in GSI, they will be
2527 added before the statement otherwise. */
2528
2529 static void
2530 generate_subtree_copies (struct access *access, tree agg,
2531 HOST_WIDE_INT top_offset,
2532 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2533 gimple_stmt_iterator *gsi, bool write,
2534 bool insert_after, location_t loc)
2535 {
2536 do
2537 {
2538 if (chunk_size && access->offset >= start_offset + chunk_size)
2539 return;
2540
2541 if (access->grp_to_be_replaced
2542 && (chunk_size == 0
2543 || access->offset + access->size > start_offset))
2544 {
2545 tree expr, repl = get_access_replacement (access);
2546 gimple stmt;
2547
2548 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2549 access, gsi, insert_after);
2550
2551 if (write)
2552 {
2553 if (access->grp_partial_lhs)
2554 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2555 !insert_after,
2556 insert_after ? GSI_NEW_STMT
2557 : GSI_SAME_STMT);
2558 stmt = gimple_build_assign (repl, expr);
2559 }
2560 else
2561 {
2562 TREE_NO_WARNING (repl) = 1;
2563 if (access->grp_partial_lhs)
2564 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2565 !insert_after,
2566 insert_after ? GSI_NEW_STMT
2567 : GSI_SAME_STMT);
2568 stmt = gimple_build_assign (expr, repl);
2569 }
2570 gimple_set_location (stmt, loc);
2571
2572 if (insert_after)
2573 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2574 else
2575 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2576 update_stmt (stmt);
2577 sra_stats.subtree_copies++;
2578 }
2579
2580 if (access->first_child)
2581 generate_subtree_copies (access->first_child, agg, top_offset,
2582 start_offset, chunk_size, gsi,
2583 write, insert_after, loc);
2584
2585 access = access->next_sibling;
2586 }
2587 while (access);
2588 }
2589
2590 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2591 the root of the subtree to be processed. GSI is the statement iterator used
2592 for inserting statements which are added after the current statement if
2593 INSERT_AFTER is true or before it otherwise. */
2594
2595 static void
2596 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2597 bool insert_after, location_t loc)
2598
2599 {
2600 struct access *child;
2601
2602 if (access->grp_to_be_replaced)
2603 {
2604 gimple stmt;
2605
2606 stmt = gimple_build_assign (get_access_replacement (access),
2607 build_zero_cst (access->type));
2608 if (insert_after)
2609 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2610 else
2611 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2612 update_stmt (stmt);
2613 gimple_set_location (stmt, loc);
2614 }
2615
2616 for (child = access->first_child; child; child = child->next_sibling)
2617 init_subtree_with_zero (child, gsi, insert_after, loc);
2618 }
2619
2620 /* Search for an access representative for the given expression EXPR and
2621 return it or NULL if it cannot be found. */
2622
2623 static struct access *
2624 get_access_for_expr (tree expr)
2625 {
2626 HOST_WIDE_INT offset, size, max_size;
2627 tree base;
2628
2629 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2630 a different size than the size of its argument and we need the latter
2631 one. */
2632 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2633 expr = TREE_OPERAND (expr, 0);
2634
2635 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2636 if (max_size == -1 || !DECL_P (base))
2637 return NULL;
2638
2639 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2640 return NULL;
2641
2642 return get_var_base_offset_size_access (base, offset, max_size);
2643 }
2644
2645 /* Replace the expression EXPR with a scalar replacement if there is one and
2646 generate other statements to do type conversion or subtree copying if
2647 necessary. GSI is used to place newly created statements, WRITE is true if
2648 the expression is being written to (it is on a LHS of a statement or output
2649 in an assembly statement). */
2650
2651 static bool
2652 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2653 {
2654 location_t loc;
2655 struct access *access;
2656 tree type, bfr;
2657
2658 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2659 {
2660 bfr = *expr;
2661 expr = &TREE_OPERAND (*expr, 0);
2662 }
2663 else
2664 bfr = NULL_TREE;
2665
2666 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2667 expr = &TREE_OPERAND (*expr, 0);
2668 access = get_access_for_expr (*expr);
2669 if (!access)
2670 return false;
2671 type = TREE_TYPE (*expr);
2672
2673 loc = gimple_location (gsi_stmt (*gsi));
2674 if (access->grp_to_be_replaced)
2675 {
2676 tree repl = get_access_replacement (access);
2677 /* If we replace a non-register typed access simply use the original
2678 access expression to extract the scalar component afterwards.
2679 This happens if scalarizing a function return value or parameter
2680 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2681 gcc.c-torture/compile/20011217-1.c.
2682
2683 We also want to use this when accessing a complex or vector which can
2684 be accessed as a different type too, potentially creating a need for
2685 type conversion (see PR42196) and when scalarized unions are involved
2686 in assembler statements (see PR42398). */
2687 if (!useless_type_conversion_p (type, access->type))
2688 {
2689 tree ref;
2690
2691 ref = build_ref_for_model (loc, access->base, access->offset, access,
2692 NULL, false);
2693
2694 if (write)
2695 {
2696 gimple stmt;
2697
2698 if (access->grp_partial_lhs)
2699 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2700 false, GSI_NEW_STMT);
2701 stmt = gimple_build_assign (repl, ref);
2702 gimple_set_location (stmt, loc);
2703 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2704 }
2705 else
2706 {
2707 gimple stmt;
2708
2709 if (access->grp_partial_lhs)
2710 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2711 true, GSI_SAME_STMT);
2712 stmt = gimple_build_assign (ref, repl);
2713 gimple_set_location (stmt, loc);
2714 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2715 }
2716 }
2717 else
2718 *expr = repl;
2719 sra_stats.exprs++;
2720 }
2721
2722 if (access->first_child)
2723 {
2724 HOST_WIDE_INT start_offset, chunk_size;
2725 if (bfr
2726 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2727 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2728 {
2729 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2730 start_offset = access->offset
2731 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2732 }
2733 else
2734 start_offset = chunk_size = 0;
2735
2736 generate_subtree_copies (access->first_child, access->base, 0,
2737 start_offset, chunk_size, gsi, write, write,
2738 loc);
2739 }
2740 return true;
2741 }
2742
2743 /* Where scalar replacements of the RHS have been written to when a replacement
2744 of a LHS of an assigments cannot be direclty loaded from a replacement of
2745 the RHS. */
2746 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2747 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2748 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2749
2750 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2751 base aggregate if there are unscalarized data or directly to LHS of the
2752 statement that is pointed to by GSI otherwise. */
2753
2754 static enum unscalarized_data_handling
2755 handle_unscalarized_data_in_subtree (struct access *top_racc,
2756 gimple_stmt_iterator *gsi)
2757 {
2758 if (top_racc->grp_unscalarized_data)
2759 {
2760 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2761 gsi, false, false,
2762 gimple_location (gsi_stmt (*gsi)));
2763 return SRA_UDH_RIGHT;
2764 }
2765 else
2766 {
2767 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2768 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2769 0, 0, gsi, false, false,
2770 gimple_location (gsi_stmt (*gsi)));
2771 return SRA_UDH_LEFT;
2772 }
2773 }
2774
2775
2776 /* Try to generate statements to load all sub-replacements in an access subtree
2777 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2778 If that is not possible, refresh the TOP_RACC base aggregate and load the
2779 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2780 copied. NEW_GSI is stmt iterator used for statement insertions after the
2781 original assignment, OLD_GSI is used to insert statements before the
2782 assignment. *REFRESHED keeps the information whether we have needed to
2783 refresh replacements of the LHS and from which side of the assignments this
2784 takes place. */
2785
2786 static void
2787 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2788 HOST_WIDE_INT left_offset,
2789 gimple_stmt_iterator *old_gsi,
2790 gimple_stmt_iterator *new_gsi,
2791 enum unscalarized_data_handling *refreshed)
2792 {
2793 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2794 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2795 {
2796 if (lacc->grp_to_be_replaced)
2797 {
2798 struct access *racc;
2799 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2800 gimple stmt;
2801 tree rhs;
2802
2803 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2804 if (racc && racc->grp_to_be_replaced)
2805 {
2806 rhs = get_access_replacement (racc);
2807 if (!useless_type_conversion_p (lacc->type, racc->type))
2808 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2809
2810 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2811 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2812 true, GSI_SAME_STMT);
2813 }
2814 else
2815 {
2816 /* No suitable access on the right hand side, need to load from
2817 the aggregate. See if we have to update it first... */
2818 if (*refreshed == SRA_UDH_NONE)
2819 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2820 old_gsi);
2821
2822 if (*refreshed == SRA_UDH_LEFT)
2823 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2824 new_gsi, true);
2825 else
2826 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2827 new_gsi, true);
2828 if (lacc->grp_partial_lhs)
2829 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2830 false, GSI_NEW_STMT);
2831 }
2832
2833 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2834 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2835 gimple_set_location (stmt, loc);
2836 update_stmt (stmt);
2837 sra_stats.subreplacements++;
2838 }
2839 else if (*refreshed == SRA_UDH_NONE
2840 && lacc->grp_read && !lacc->grp_covered)
2841 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2842 old_gsi);
2843
2844 if (lacc->first_child)
2845 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2846 old_gsi, new_gsi, refreshed);
2847 }
2848 }
2849
2850 /* Result code for SRA assignment modification. */
2851 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2852 SRA_AM_MODIFIED, /* stmt changed but not
2853 removed */
2854 SRA_AM_REMOVED }; /* stmt eliminated */
2855
2856 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2857 to the assignment and GSI is the statement iterator pointing at it. Returns
2858 the same values as sra_modify_assign. */
2859
2860 static enum assignment_mod_result
2861 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2862 {
2863 tree lhs = gimple_assign_lhs (*stmt);
2864 struct access *acc;
2865 location_t loc;
2866
2867 acc = get_access_for_expr (lhs);
2868 if (!acc)
2869 return SRA_AM_NONE;
2870
2871 if (gimple_clobber_p (*stmt))
2872 {
2873 /* Remove clobbers of fully scalarized variables, otherwise
2874 do nothing. */
2875 if (acc->grp_covered)
2876 {
2877 unlink_stmt_vdef (*stmt);
2878 gsi_remove (gsi, true);
2879 return SRA_AM_REMOVED;
2880 }
2881 else
2882 return SRA_AM_NONE;
2883 }
2884
2885 loc = gimple_location (*stmt);
2886 if (VEC_length (constructor_elt,
2887 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2888 {
2889 /* I have never seen this code path trigger but if it can happen the
2890 following should handle it gracefully. */
2891 if (access_has_children_p (acc))
2892 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2893 true, true, loc);
2894 return SRA_AM_MODIFIED;
2895 }
2896
2897 if (acc->grp_covered)
2898 {
2899 init_subtree_with_zero (acc, gsi, false, loc);
2900 unlink_stmt_vdef (*stmt);
2901 gsi_remove (gsi, true);
2902 return SRA_AM_REMOVED;
2903 }
2904 else
2905 {
2906 init_subtree_with_zero (acc, gsi, true, loc);
2907 return SRA_AM_MODIFIED;
2908 }
2909 }
2910
2911 /* Create and return a new suitable default definition SSA_NAME for RACC which
2912 is an access describing an uninitialized part of an aggregate that is being
2913 loaded. */
2914
2915 static tree
2916 get_repl_default_def_ssa_name (struct access *racc)
2917 {
2918 tree repl, decl;
2919
2920 decl = get_unrenamed_access_replacement (racc);
2921
2922 repl = gimple_default_def (cfun, decl);
2923 if (!repl)
2924 {
2925 repl = make_ssa_name (decl, gimple_build_nop ());
2926 set_default_def (decl, repl);
2927 }
2928
2929 return repl;
2930 }
2931
2932 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2933 somewhere in it. */
2934
2935 static inline bool
2936 contains_bitfld_comp_ref_p (const_tree ref)
2937 {
2938 while (handled_component_p (ref))
2939 {
2940 if (TREE_CODE (ref) == COMPONENT_REF
2941 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2942 return true;
2943 ref = TREE_OPERAND (ref, 0);
2944 }
2945
2946 return false;
2947 }
2948
2949 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2950 bit-field field declaration somewhere in it. */
2951
2952 static inline bool
2953 contains_vce_or_bfcref_p (const_tree ref)
2954 {
2955 while (handled_component_p (ref))
2956 {
2957 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2958 || (TREE_CODE (ref) == COMPONENT_REF
2959 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2960 return true;
2961 ref = TREE_OPERAND (ref, 0);
2962 }
2963
2964 return false;
2965 }
2966
2967 /* Examine both sides of the assignment statement pointed to by STMT, replace
2968 them with a scalare replacement if there is one and generate copying of
2969 replacements if scalarized aggregates have been used in the assignment. GSI
2970 is used to hold generated statements for type conversions and subtree
2971 copying. */
2972
2973 static enum assignment_mod_result
2974 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2975 {
2976 struct access *lacc, *racc;
2977 tree lhs, rhs;
2978 bool modify_this_stmt = false;
2979 bool force_gimple_rhs = false;
2980 location_t loc;
2981 gimple_stmt_iterator orig_gsi = *gsi;
2982
2983 if (!gimple_assign_single_p (*stmt))
2984 return SRA_AM_NONE;
2985 lhs = gimple_assign_lhs (*stmt);
2986 rhs = gimple_assign_rhs1 (*stmt);
2987
2988 if (TREE_CODE (rhs) == CONSTRUCTOR)
2989 return sra_modify_constructor_assign (stmt, gsi);
2990
2991 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2992 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2993 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2994 {
2995 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2996 gsi, false);
2997 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2998 gsi, true);
2999 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3000 }
3001
3002 lacc = get_access_for_expr (lhs);
3003 racc = get_access_for_expr (rhs);
3004 if (!lacc && !racc)
3005 return SRA_AM_NONE;
3006
3007 loc = gimple_location (*stmt);
3008 if (lacc && lacc->grp_to_be_replaced)
3009 {
3010 lhs = get_access_replacement (lacc);
3011 gimple_assign_set_lhs (*stmt, lhs);
3012 modify_this_stmt = true;
3013 if (lacc->grp_partial_lhs)
3014 force_gimple_rhs = true;
3015 sra_stats.exprs++;
3016 }
3017
3018 if (racc && racc->grp_to_be_replaced)
3019 {
3020 rhs = get_access_replacement (racc);
3021 modify_this_stmt = true;
3022 if (racc->grp_partial_lhs)
3023 force_gimple_rhs = true;
3024 sra_stats.exprs++;
3025 }
3026 else if (racc
3027 && !racc->grp_unscalarized_data
3028 && TREE_CODE (lhs) == SSA_NAME
3029 && !access_has_replacements_p (racc))
3030 {
3031 rhs = get_repl_default_def_ssa_name (racc);
3032 modify_this_stmt = true;
3033 sra_stats.exprs++;
3034 }
3035
3036 if (modify_this_stmt)
3037 {
3038 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3039 {
3040 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3041 ??? This should move to fold_stmt which we simply should
3042 call after building a VIEW_CONVERT_EXPR here. */
3043 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3044 && !contains_bitfld_comp_ref_p (lhs)
3045 && !access_has_children_p (lacc))
3046 {
3047 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3048 gimple_assign_set_lhs (*stmt, lhs);
3049 }
3050 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3051 && !contains_vce_or_bfcref_p (rhs)
3052 && !access_has_children_p (racc))
3053 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3054
3055 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3056 {
3057 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3058 rhs);
3059 if (is_gimple_reg_type (TREE_TYPE (lhs))
3060 && TREE_CODE (lhs) != SSA_NAME)
3061 force_gimple_rhs = true;
3062 }
3063 }
3064 }
3065
3066 /* From this point on, the function deals with assignments in between
3067 aggregates when at least one has scalar reductions of some of its
3068 components. There are three possible scenarios: Both the LHS and RHS have
3069 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3070
3071 In the first case, we would like to load the LHS components from RHS
3072 components whenever possible. If that is not possible, we would like to
3073 read it directly from the RHS (after updating it by storing in it its own
3074 components). If there are some necessary unscalarized data in the LHS,
3075 those will be loaded by the original assignment too. If neither of these
3076 cases happen, the original statement can be removed. Most of this is done
3077 by load_assign_lhs_subreplacements.
3078
3079 In the second case, we would like to store all RHS scalarized components
3080 directly into LHS and if they cover the aggregate completely, remove the
3081 statement too. In the third case, we want the LHS components to be loaded
3082 directly from the RHS (DSE will remove the original statement if it
3083 becomes redundant).
3084
3085 This is a bit complex but manageable when types match and when unions do
3086 not cause confusion in a way that we cannot really load a component of LHS
3087 from the RHS or vice versa (the access representing this level can have
3088 subaccesses that are accessible only through a different union field at a
3089 higher level - different from the one used in the examined expression).
3090 Unions are fun.
3091
3092 Therefore, I specially handle a fourth case, happening when there is a
3093 specific type cast or it is impossible to locate a scalarized subaccess on
3094 the other side of the expression. If that happens, I simply "refresh" the
3095 RHS by storing in it is scalarized components leave the original statement
3096 there to do the copying and then load the scalar replacements of the LHS.
3097 This is what the first branch does. */
3098
3099 if (modify_this_stmt
3100 || gimple_has_volatile_ops (*stmt)
3101 || contains_vce_or_bfcref_p (rhs)
3102 || contains_vce_or_bfcref_p (lhs))
3103 {
3104 if (access_has_children_p (racc))
3105 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3106 gsi, false, false, loc);
3107 if (access_has_children_p (lacc))
3108 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3109 gsi, true, true, loc);
3110 sra_stats.separate_lhs_rhs_handling++;
3111
3112 /* This gimplification must be done after generate_subtree_copies,
3113 lest we insert the subtree copies in the middle of the gimplified
3114 sequence. */
3115 if (force_gimple_rhs)
3116 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3117 true, GSI_SAME_STMT);
3118 if (gimple_assign_rhs1 (*stmt) != rhs)
3119 {
3120 modify_this_stmt = true;
3121 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3122 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3123 }
3124
3125 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3126 }
3127 else
3128 {
3129 if (access_has_children_p (lacc) && access_has_children_p (racc))
3130 {
3131 gimple_stmt_iterator orig_gsi = *gsi;
3132 enum unscalarized_data_handling refreshed;
3133
3134 if (lacc->grp_read && !lacc->grp_covered)
3135 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3136 else
3137 refreshed = SRA_UDH_NONE;
3138
3139 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3140 &orig_gsi, gsi, &refreshed);
3141 if (refreshed != SRA_UDH_RIGHT)
3142 {
3143 gsi_next (gsi);
3144 unlink_stmt_vdef (*stmt);
3145 gsi_remove (&orig_gsi, true);
3146 sra_stats.deleted++;
3147 return SRA_AM_REMOVED;
3148 }
3149 }
3150 else
3151 {
3152 if (access_has_children_p (racc)
3153 && !racc->grp_unscalarized_data)
3154 {
3155 if (dump_file)
3156 {
3157 fprintf (dump_file, "Removing load: ");
3158 print_gimple_stmt (dump_file, *stmt, 0, 0);
3159 }
3160 generate_subtree_copies (racc->first_child, lhs,
3161 racc->offset, 0, 0, gsi,
3162 false, false, loc);
3163 gcc_assert (*stmt == gsi_stmt (*gsi));
3164 unlink_stmt_vdef (*stmt);
3165 gsi_remove (gsi, true);
3166 sra_stats.deleted++;
3167 return SRA_AM_REMOVED;
3168 }
3169 /* Restore the aggregate RHS from its components so the
3170 prevailing aggregate copy does the right thing. */
3171 if (access_has_children_p (racc))
3172 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3173 gsi, false, false, loc);
3174 /* Re-load the components of the aggregate copy destination.
3175 But use the RHS aggregate to load from to expose more
3176 optimization opportunities. */
3177 if (access_has_children_p (lacc))
3178 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3179 0, 0, gsi, true, true, loc);
3180 }
3181
3182 return SRA_AM_NONE;
3183 }
3184 }
3185
3186 /* Traverse the function body and all modifications as decided in
3187 analyze_all_variable_accesses. Return true iff the CFG has been
3188 changed. */
3189
3190 static bool
3191 sra_modify_function_body (void)
3192 {
3193 bool cfg_changed = false;
3194 basic_block bb;
3195
3196 FOR_EACH_BB (bb)
3197 {
3198 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3199 while (!gsi_end_p (gsi))
3200 {
3201 gimple stmt = gsi_stmt (gsi);
3202 enum assignment_mod_result assign_result;
3203 bool modified = false, deleted = false;
3204 tree *t;
3205 unsigned i;
3206
3207 switch (gimple_code (stmt))
3208 {
3209 case GIMPLE_RETURN:
3210 t = gimple_return_retval_ptr (stmt);
3211 if (*t != NULL_TREE)
3212 modified |= sra_modify_expr (t, &gsi, false);
3213 break;
3214
3215 case GIMPLE_ASSIGN:
3216 assign_result = sra_modify_assign (&stmt, &gsi);
3217 modified |= assign_result == SRA_AM_MODIFIED;
3218 deleted = assign_result == SRA_AM_REMOVED;
3219 break;
3220
3221 case GIMPLE_CALL:
3222 /* Operands must be processed before the lhs. */
3223 for (i = 0; i < gimple_call_num_args (stmt); i++)
3224 {
3225 t = gimple_call_arg_ptr (stmt, i);
3226 modified |= sra_modify_expr (t, &gsi, false);
3227 }
3228
3229 if (gimple_call_lhs (stmt))
3230 {
3231 t = gimple_call_lhs_ptr (stmt);
3232 modified |= sra_modify_expr (t, &gsi, true);
3233 }
3234 break;
3235
3236 case GIMPLE_ASM:
3237 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3238 {
3239 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3240 modified |= sra_modify_expr (t, &gsi, false);
3241 }
3242 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3243 {
3244 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3245 modified |= sra_modify_expr (t, &gsi, true);
3246 }
3247 break;
3248
3249 default:
3250 break;
3251 }
3252
3253 if (modified)
3254 {
3255 update_stmt (stmt);
3256 if (maybe_clean_eh_stmt (stmt)
3257 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3258 cfg_changed = true;
3259 }
3260 if (!deleted)
3261 gsi_next (&gsi);
3262 }
3263 }
3264
3265 return cfg_changed;
3266 }
3267
3268 /* Generate statements initializing scalar replacements of parts of function
3269 parameters. */
3270
3271 static void
3272 initialize_parameter_reductions (void)
3273 {
3274 gimple_stmt_iterator gsi;
3275 gimple_seq seq = NULL;
3276 tree parm;
3277
3278 for (parm = DECL_ARGUMENTS (current_function_decl);
3279 parm;
3280 parm = DECL_CHAIN (parm))
3281 {
3282 VEC (access_p, heap) *access_vec;
3283 struct access *access;
3284
3285 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3286 continue;
3287 access_vec = get_base_access_vector (parm);
3288 if (!access_vec)
3289 continue;
3290
3291 if (!seq)
3292 {
3293 seq = gimple_seq_alloc ();
3294 gsi = gsi_start (seq);
3295 }
3296
3297 for (access = VEC_index (access_p, access_vec, 0);
3298 access;
3299 access = access->next_grp)
3300 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3301 EXPR_LOCATION (parm));
3302 }
3303
3304 if (seq)
3305 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3306 }
3307
3308 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3309 it reveals there are components of some aggregates to be scalarized, it runs
3310 the required transformations. */
3311 static unsigned int
3312 perform_intra_sra (void)
3313 {
3314 int ret = 0;
3315 sra_initialize ();
3316
3317 if (!find_var_candidates ())
3318 goto out;
3319
3320 if (!scan_function ())
3321 goto out;
3322
3323 if (!analyze_all_variable_accesses ())
3324 goto out;
3325
3326 if (sra_modify_function_body ())
3327 ret = TODO_update_ssa | TODO_cleanup_cfg;
3328 else
3329 ret = TODO_update_ssa;
3330 initialize_parameter_reductions ();
3331
3332 statistics_counter_event (cfun, "Scalar replacements created",
3333 sra_stats.replacements);
3334 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3335 statistics_counter_event (cfun, "Subtree copy stmts",
3336 sra_stats.subtree_copies);
3337 statistics_counter_event (cfun, "Subreplacement stmts",
3338 sra_stats.subreplacements);
3339 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3340 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3341 sra_stats.separate_lhs_rhs_handling);
3342
3343 out:
3344 sra_deinitialize ();
3345 return ret;
3346 }
3347
3348 /* Perform early intraprocedural SRA. */
3349 static unsigned int
3350 early_intra_sra (void)
3351 {
3352 sra_mode = SRA_MODE_EARLY_INTRA;
3353 return perform_intra_sra ();
3354 }
3355
3356 /* Perform "late" intraprocedural SRA. */
3357 static unsigned int
3358 late_intra_sra (void)
3359 {
3360 sra_mode = SRA_MODE_INTRA;
3361 return perform_intra_sra ();
3362 }
3363
3364
3365 static bool
3366 gate_intra_sra (void)
3367 {
3368 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3369 }
3370
3371
3372 struct gimple_opt_pass pass_sra_early =
3373 {
3374 {
3375 GIMPLE_PASS,
3376 "esra", /* name */
3377 gate_intra_sra, /* gate */
3378 early_intra_sra, /* execute */
3379 NULL, /* sub */
3380 NULL, /* next */
3381 0, /* static_pass_number */
3382 TV_TREE_SRA, /* tv_id */
3383 PROP_cfg | PROP_ssa, /* properties_required */
3384 0, /* properties_provided */
3385 0, /* properties_destroyed */
3386 0, /* todo_flags_start */
3387 TODO_update_ssa
3388 | TODO_ggc_collect
3389 | TODO_verify_ssa /* todo_flags_finish */
3390 }
3391 };
3392
3393 struct gimple_opt_pass pass_sra =
3394 {
3395 {
3396 GIMPLE_PASS,
3397 "sra", /* name */
3398 gate_intra_sra, /* gate */
3399 late_intra_sra, /* execute */
3400 NULL, /* sub */
3401 NULL, /* next */
3402 0, /* static_pass_number */
3403 TV_TREE_SRA, /* tv_id */
3404 PROP_cfg | PROP_ssa, /* properties_required */
3405 0, /* properties_provided */
3406 0, /* properties_destroyed */
3407 TODO_update_address_taken, /* todo_flags_start */
3408 TODO_update_ssa
3409 | TODO_ggc_collect
3410 | TODO_verify_ssa /* todo_flags_finish */
3411 }
3412 };
3413
3414
3415 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3416 parameter. */
3417
3418 static bool
3419 is_unused_scalar_param (tree parm)
3420 {
3421 tree name;
3422 return (is_gimple_reg (parm)
3423 && (!(name = gimple_default_def (cfun, parm))
3424 || has_zero_uses (name)));
3425 }
3426
3427 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3428 examine whether there are any direct or otherwise infeasible ones. If so,
3429 return true, otherwise return false. PARM must be a gimple register with a
3430 non-NULL default definition. */
3431
3432 static bool
3433 ptr_parm_has_direct_uses (tree parm)
3434 {
3435 imm_use_iterator ui;
3436 gimple stmt;
3437 tree name = gimple_default_def (cfun, parm);
3438 bool ret = false;
3439
3440 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3441 {
3442 int uses_ok = 0;
3443 use_operand_p use_p;
3444
3445 if (is_gimple_debug (stmt))
3446 continue;
3447
3448 /* Valid uses include dereferences on the lhs and the rhs. */
3449 if (gimple_has_lhs (stmt))
3450 {
3451 tree lhs = gimple_get_lhs (stmt);
3452 while (handled_component_p (lhs))
3453 lhs = TREE_OPERAND (lhs, 0);
3454 if (TREE_CODE (lhs) == MEM_REF
3455 && TREE_OPERAND (lhs, 0) == name
3456 && integer_zerop (TREE_OPERAND (lhs, 1))
3457 && types_compatible_p (TREE_TYPE (lhs),
3458 TREE_TYPE (TREE_TYPE (name)))
3459 && !TREE_THIS_VOLATILE (lhs))
3460 uses_ok++;
3461 }
3462 if (gimple_assign_single_p (stmt))
3463 {
3464 tree rhs = gimple_assign_rhs1 (stmt);
3465 while (handled_component_p (rhs))
3466 rhs = TREE_OPERAND (rhs, 0);
3467 if (TREE_CODE (rhs) == MEM_REF
3468 && TREE_OPERAND (rhs, 0) == name
3469 && integer_zerop (TREE_OPERAND (rhs, 1))
3470 && types_compatible_p (TREE_TYPE (rhs),
3471 TREE_TYPE (TREE_TYPE (name)))
3472 && !TREE_THIS_VOLATILE (rhs))
3473 uses_ok++;
3474 }
3475 else if (is_gimple_call (stmt))
3476 {
3477 unsigned i;
3478 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3479 {
3480 tree arg = gimple_call_arg (stmt, i);
3481 while (handled_component_p (arg))
3482 arg = TREE_OPERAND (arg, 0);
3483 if (TREE_CODE (arg) == MEM_REF
3484 && TREE_OPERAND (arg, 0) == name
3485 && integer_zerop (TREE_OPERAND (arg, 1))
3486 && types_compatible_p (TREE_TYPE (arg),
3487 TREE_TYPE (TREE_TYPE (name)))
3488 && !TREE_THIS_VOLATILE (arg))
3489 uses_ok++;
3490 }
3491 }
3492
3493 /* If the number of valid uses does not match the number of
3494 uses in this stmt there is an unhandled use. */
3495 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3496 --uses_ok;
3497
3498 if (uses_ok != 0)
3499 ret = true;
3500
3501 if (ret)
3502 BREAK_FROM_IMM_USE_STMT (ui);
3503 }
3504
3505 return ret;
3506 }
3507
3508 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3509 them in candidate_bitmap. Note that these do not necessarily include
3510 parameter which are unused and thus can be removed. Return true iff any
3511 such candidate has been found. */
3512
3513 static bool
3514 find_param_candidates (void)
3515 {
3516 tree parm;
3517 int count = 0;
3518 bool ret = false;
3519 const char *msg;
3520
3521 for (parm = DECL_ARGUMENTS (current_function_decl);
3522 parm;
3523 parm = DECL_CHAIN (parm))
3524 {
3525 tree type = TREE_TYPE (parm);
3526
3527 count++;
3528
3529 if (TREE_THIS_VOLATILE (parm)
3530 || TREE_ADDRESSABLE (parm)
3531 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3532 continue;
3533
3534 if (is_unused_scalar_param (parm))
3535 {
3536 ret = true;
3537 continue;
3538 }
3539
3540 if (POINTER_TYPE_P (type))
3541 {
3542 type = TREE_TYPE (type);
3543
3544 if (TREE_CODE (type) == FUNCTION_TYPE
3545 || TYPE_VOLATILE (type)
3546 || (TREE_CODE (type) == ARRAY_TYPE
3547 && TYPE_NONALIASED_COMPONENT (type))
3548 || !is_gimple_reg (parm)
3549 || is_va_list_type (type)
3550 || ptr_parm_has_direct_uses (parm))
3551 continue;
3552 }
3553 else if (!AGGREGATE_TYPE_P (type))
3554 continue;
3555
3556 if (!COMPLETE_TYPE_P (type)
3557 || !host_integerp (TYPE_SIZE (type), 1)
3558 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3559 || (AGGREGATE_TYPE_P (type)
3560 && type_internals_preclude_sra_p (type, &msg)))
3561 continue;
3562
3563 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3564 ret = true;
3565 if (dump_file && (dump_flags & TDF_DETAILS))
3566 {
3567 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3568 print_generic_expr (dump_file, parm, 0);
3569 fprintf (dump_file, "\n");
3570 }
3571 }
3572
3573 func_param_count = count;
3574 return ret;
3575 }
3576
3577 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3578 maybe_modified. */
3579
3580 static bool
3581 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3582 void *data)
3583 {
3584 struct access *repr = (struct access *) data;
3585
3586 repr->grp_maybe_modified = 1;
3587 return true;
3588 }
3589
3590 /* Analyze what representatives (in linked lists accessible from
3591 REPRESENTATIVES) can be modified by side effects of statements in the
3592 current function. */
3593
3594 static void
3595 analyze_modified_params (VEC (access_p, heap) *representatives)
3596 {
3597 int i;
3598
3599 for (i = 0; i < func_param_count; i++)
3600 {
3601 struct access *repr;
3602
3603 for (repr = VEC_index (access_p, representatives, i);
3604 repr;
3605 repr = repr->next_grp)
3606 {
3607 struct access *access;
3608 bitmap visited;
3609 ao_ref ar;
3610
3611 if (no_accesses_p (repr))
3612 continue;
3613 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3614 || repr->grp_maybe_modified)
3615 continue;
3616
3617 ao_ref_init (&ar, repr->expr);
3618 visited = BITMAP_ALLOC (NULL);
3619 for (access = repr; access; access = access->next_sibling)
3620 {
3621 /* All accesses are read ones, otherwise grp_maybe_modified would
3622 be trivially set. */
3623 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3624 mark_maybe_modified, repr, &visited);
3625 if (repr->grp_maybe_modified)
3626 break;
3627 }
3628 BITMAP_FREE (visited);
3629 }
3630 }
3631 }
3632
3633 /* Propagate distances in bb_dereferences in the opposite direction than the
3634 control flow edges, in each step storing the maximum of the current value
3635 and the minimum of all successors. These steps are repeated until the table
3636 stabilizes. Note that BBs which might terminate the functions (according to
3637 final_bbs bitmap) never updated in this way. */
3638
3639 static void
3640 propagate_dereference_distances (void)
3641 {
3642 VEC (basic_block, heap) *queue;
3643 basic_block bb;
3644
3645 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3646 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3647 FOR_EACH_BB (bb)
3648 {
3649 VEC_quick_push (basic_block, queue, bb);
3650 bb->aux = bb;
3651 }
3652
3653 while (!VEC_empty (basic_block, queue))
3654 {
3655 edge_iterator ei;
3656 edge e;
3657 bool change = false;
3658 int i;
3659
3660 bb = VEC_pop (basic_block, queue);
3661 bb->aux = NULL;
3662
3663 if (bitmap_bit_p (final_bbs, bb->index))
3664 continue;
3665
3666 for (i = 0; i < func_param_count; i++)
3667 {
3668 int idx = bb->index * func_param_count + i;
3669 bool first = true;
3670 HOST_WIDE_INT inh = 0;
3671
3672 FOR_EACH_EDGE (e, ei, bb->succs)
3673 {
3674 int succ_idx = e->dest->index * func_param_count + i;
3675
3676 if (e->src == EXIT_BLOCK_PTR)
3677 continue;
3678
3679 if (first)
3680 {
3681 first = false;
3682 inh = bb_dereferences [succ_idx];
3683 }
3684 else if (bb_dereferences [succ_idx] < inh)
3685 inh = bb_dereferences [succ_idx];
3686 }
3687
3688 if (!first && bb_dereferences[idx] < inh)
3689 {
3690 bb_dereferences[idx] = inh;
3691 change = true;
3692 }
3693 }
3694
3695 if (change && !bitmap_bit_p (final_bbs, bb->index))
3696 FOR_EACH_EDGE (e, ei, bb->preds)
3697 {
3698 if (e->src->aux)
3699 continue;
3700
3701 e->src->aux = e->src;
3702 VEC_quick_push (basic_block, queue, e->src);
3703 }
3704 }
3705
3706 VEC_free (basic_block, heap, queue);
3707 }
3708
3709 /* Dump a dereferences TABLE with heading STR to file F. */
3710
3711 static void
3712 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3713 {
3714 basic_block bb;
3715
3716 fprintf (dump_file, str);
3717 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3718 {
3719 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3720 if (bb != EXIT_BLOCK_PTR)
3721 {
3722 int i;
3723 for (i = 0; i < func_param_count; i++)
3724 {
3725 int idx = bb->index * func_param_count + i;
3726 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3727 }
3728 }
3729 fprintf (f, "\n");
3730 }
3731 fprintf (dump_file, "\n");
3732 }
3733
3734 /* Determine what (parts of) parameters passed by reference that are not
3735 assigned to are not certainly dereferenced in this function and thus the
3736 dereferencing cannot be safely moved to the caller without potentially
3737 introducing a segfault. Mark such REPRESENTATIVES as
3738 grp_not_necessarilly_dereferenced.
3739
3740 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3741 part is calculated rather than simple booleans are calculated for each
3742 pointer parameter to handle cases when only a fraction of the whole
3743 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3744 an example).
3745
3746 The maximum dereference distances for each pointer parameter and BB are
3747 already stored in bb_dereference. This routine simply propagates these
3748 values upwards by propagate_dereference_distances and then compares the
3749 distances of individual parameters in the ENTRY BB to the equivalent
3750 distances of each representative of a (fraction of a) parameter. */
3751
3752 static void
3753 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3754 {
3755 int i;
3756
3757 if (dump_file && (dump_flags & TDF_DETAILS))
3758 dump_dereferences_table (dump_file,
3759 "Dereference table before propagation:\n",
3760 bb_dereferences);
3761
3762 propagate_dereference_distances ();
3763
3764 if (dump_file && (dump_flags & TDF_DETAILS))
3765 dump_dereferences_table (dump_file,
3766 "Dereference table after propagation:\n",
3767 bb_dereferences);
3768
3769 for (i = 0; i < func_param_count; i++)
3770 {
3771 struct access *repr = VEC_index (access_p, representatives, i);
3772 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3773
3774 if (!repr || no_accesses_p (repr))
3775 continue;
3776
3777 do
3778 {
3779 if ((repr->offset + repr->size) > bb_dereferences[idx])
3780 repr->grp_not_necessarilly_dereferenced = 1;
3781 repr = repr->next_grp;
3782 }
3783 while (repr);
3784 }
3785 }
3786
3787 /* Return the representative access for the parameter declaration PARM if it is
3788 a scalar passed by reference which is not written to and the pointer value
3789 is not used directly. Thus, if it is legal to dereference it in the caller
3790 and we can rule out modifications through aliases, such parameter should be
3791 turned into one passed by value. Return NULL otherwise. */
3792
3793 static struct access *
3794 unmodified_by_ref_scalar_representative (tree parm)
3795 {
3796 int i, access_count;
3797 struct access *repr;
3798 VEC (access_p, heap) *access_vec;
3799
3800 access_vec = get_base_access_vector (parm);
3801 gcc_assert (access_vec);
3802 repr = VEC_index (access_p, access_vec, 0);
3803 if (repr->write)
3804 return NULL;
3805 repr->group_representative = repr;
3806
3807 access_count = VEC_length (access_p, access_vec);
3808 for (i = 1; i < access_count; i++)
3809 {
3810 struct access *access = VEC_index (access_p, access_vec, i);
3811 if (access->write)
3812 return NULL;
3813 access->group_representative = repr;
3814 access->next_sibling = repr->next_sibling;
3815 repr->next_sibling = access;
3816 }
3817
3818 repr->grp_read = 1;
3819 repr->grp_scalar_ptr = 1;
3820 return repr;
3821 }
3822
3823 /* Return true iff this access precludes IPA-SRA of the parameter it is
3824 associated with. */
3825
3826 static bool
3827 access_precludes_ipa_sra_p (struct access *access)
3828 {
3829 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3830 is incompatible assign in a call statement (and possibly even in asm
3831 statements). This can be relaxed by using a new temporary but only for
3832 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3833 intraprocedural SRA we deal with this by keeping the old aggregate around,
3834 something we cannot do in IPA-SRA.) */
3835 if (access->write
3836 && (is_gimple_call (access->stmt)
3837 || gimple_code (access->stmt) == GIMPLE_ASM))
3838 return true;
3839
3840 if (STRICT_ALIGNMENT
3841 && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3842 return true;
3843
3844 return false;
3845 }
3846
3847
3848 /* Sort collected accesses for parameter PARM, identify representatives for
3849 each accessed region and link them together. Return NULL if there are
3850 different but overlapping accesses, return the special ptr value meaning
3851 there are no accesses for this parameter if that is the case and return the
3852 first representative otherwise. Set *RO_GRP if there is a group of accesses
3853 with only read (i.e. no write) accesses. */
3854
3855 static struct access *
3856 splice_param_accesses (tree parm, bool *ro_grp)
3857 {
3858 int i, j, access_count, group_count;
3859 int agg_size, total_size = 0;
3860 struct access *access, *res, **prev_acc_ptr = &res;
3861 VEC (access_p, heap) *access_vec;
3862
3863 access_vec = get_base_access_vector (parm);
3864 if (!access_vec)
3865 return &no_accesses_representant;
3866 access_count = VEC_length (access_p, access_vec);
3867
3868 VEC_qsort (access_p, access_vec, compare_access_positions);
3869
3870 i = 0;
3871 total_size = 0;
3872 group_count = 0;
3873 while (i < access_count)
3874 {
3875 bool modification;
3876 tree a1_alias_type;
3877 access = VEC_index (access_p, access_vec, i);
3878 modification = access->write;
3879 if (access_precludes_ipa_sra_p (access))
3880 return NULL;
3881 a1_alias_type = reference_alias_ptr_type (access->expr);
3882
3883 /* Access is about to become group representative unless we find some
3884 nasty overlap which would preclude us from breaking this parameter
3885 apart. */
3886
3887 j = i + 1;
3888 while (j < access_count)
3889 {
3890 struct access *ac2 = VEC_index (access_p, access_vec, j);
3891 if (ac2->offset != access->offset)
3892 {
3893 /* All or nothing law for parameters. */
3894 if (access->offset + access->size > ac2->offset)
3895 return NULL;
3896 else
3897 break;
3898 }
3899 else if (ac2->size != access->size)
3900 return NULL;
3901
3902 if (access_precludes_ipa_sra_p (ac2)
3903 || (ac2->type != access->type
3904 && (TREE_ADDRESSABLE (ac2->type)
3905 || TREE_ADDRESSABLE (access->type)))
3906 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3907 return NULL;
3908
3909 modification |= ac2->write;
3910 ac2->group_representative = access;
3911 ac2->next_sibling = access->next_sibling;
3912 access->next_sibling = ac2;
3913 j++;
3914 }
3915
3916 group_count++;
3917 access->grp_maybe_modified = modification;
3918 if (!modification)
3919 *ro_grp = true;
3920 *prev_acc_ptr = access;
3921 prev_acc_ptr = &access->next_grp;
3922 total_size += access->size;
3923 i = j;
3924 }
3925
3926 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3927 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3928 else
3929 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3930 if (total_size >= agg_size)
3931 return NULL;
3932
3933 gcc_assert (group_count > 0);
3934 return res;
3935 }
3936
3937 /* Decide whether parameters with representative accesses given by REPR should
3938 be reduced into components. */
3939
3940 static int
3941 decide_one_param_reduction (struct access *repr)
3942 {
3943 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3944 bool by_ref;
3945 tree parm;
3946
3947 parm = repr->base;
3948 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3949 gcc_assert (cur_parm_size > 0);
3950
3951 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3952 {
3953 by_ref = true;
3954 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3955 }
3956 else
3957 {
3958 by_ref = false;
3959 agg_size = cur_parm_size;
3960 }
3961
3962 if (dump_file)
3963 {
3964 struct access *acc;
3965 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3966 print_generic_expr (dump_file, parm, 0);
3967 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3968 for (acc = repr; acc; acc = acc->next_grp)
3969 dump_access (dump_file, acc, true);
3970 }
3971
3972 total_size = 0;
3973 new_param_count = 0;
3974
3975 for (; repr; repr = repr->next_grp)
3976 {
3977 gcc_assert (parm == repr->base);
3978
3979 /* Taking the address of a non-addressable field is verboten. */
3980 if (by_ref && repr->non_addressable)
3981 return 0;
3982
3983 /* Do not decompose a non-BLKmode param in a way that would
3984 create BLKmode params. Especially for by-reference passing
3985 (thus, pointer-type param) this is hardly worthwhile. */
3986 if (DECL_MODE (parm) != BLKmode
3987 && TYPE_MODE (repr->type) == BLKmode)
3988 return 0;
3989
3990 if (!by_ref || (!repr->grp_maybe_modified
3991 && !repr->grp_not_necessarilly_dereferenced))
3992 total_size += repr->size;
3993 else
3994 total_size += cur_parm_size;
3995
3996 new_param_count++;
3997 }
3998
3999 gcc_assert (new_param_count > 0);
4000
4001 if (optimize_function_for_size_p (cfun))
4002 parm_size_limit = cur_parm_size;
4003 else
4004 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4005 * cur_parm_size);
4006
4007 if (total_size < agg_size
4008 && total_size <= parm_size_limit)
4009 {
4010 if (dump_file)
4011 fprintf (dump_file, " ....will be split into %i components\n",
4012 new_param_count);
4013 return new_param_count;
4014 }
4015 else
4016 return 0;
4017 }
4018
4019 /* The order of the following enums is important, we need to do extra work for
4020 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4021 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4022 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4023
4024 /* Identify representatives of all accesses to all candidate parameters for
4025 IPA-SRA. Return result based on what representatives have been found. */
4026
4027 static enum ipa_splicing_result
4028 splice_all_param_accesses (VEC (access_p, heap) **representatives)
4029 {
4030 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4031 tree parm;
4032 struct access *repr;
4033
4034 *representatives = VEC_alloc (access_p, heap, func_param_count);
4035
4036 for (parm = DECL_ARGUMENTS (current_function_decl);
4037 parm;
4038 parm = DECL_CHAIN (parm))
4039 {
4040 if (is_unused_scalar_param (parm))
4041 {
4042 VEC_quick_push (access_p, *representatives,
4043 &no_accesses_representant);
4044 if (result == NO_GOOD_ACCESS)
4045 result = UNUSED_PARAMS;
4046 }
4047 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4048 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4049 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4050 {
4051 repr = unmodified_by_ref_scalar_representative (parm);
4052 VEC_quick_push (access_p, *representatives, repr);
4053 if (repr)
4054 result = UNMODIF_BY_REF_ACCESSES;
4055 }
4056 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4057 {
4058 bool ro_grp = false;
4059 repr = splice_param_accesses (parm, &ro_grp);
4060 VEC_quick_push (access_p, *representatives, repr);
4061
4062 if (repr && !no_accesses_p (repr))
4063 {
4064 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4065 {
4066 if (ro_grp)
4067 result = UNMODIF_BY_REF_ACCESSES;
4068 else if (result < MODIF_BY_REF_ACCESSES)
4069 result = MODIF_BY_REF_ACCESSES;
4070 }
4071 else if (result < BY_VAL_ACCESSES)
4072 result = BY_VAL_ACCESSES;
4073 }
4074 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4075 result = UNUSED_PARAMS;
4076 }
4077 else
4078 VEC_quick_push (access_p, *representatives, NULL);
4079 }
4080
4081 if (result == NO_GOOD_ACCESS)
4082 {
4083 VEC_free (access_p, heap, *representatives);
4084 *representatives = NULL;
4085 return NO_GOOD_ACCESS;
4086 }
4087
4088 return result;
4089 }
4090
4091 /* Return the index of BASE in PARMS. Abort if it is not found. */
4092
4093 static inline int
4094 get_param_index (tree base, VEC(tree, heap) *parms)
4095 {
4096 int i, len;
4097
4098 len = VEC_length (tree, parms);
4099 for (i = 0; i < len; i++)
4100 if (VEC_index (tree, parms, i) == base)
4101 return i;
4102 gcc_unreachable ();
4103 }
4104
4105 /* Convert the decisions made at the representative level into compact
4106 parameter adjustments. REPRESENTATIVES are pointers to first
4107 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4108 final number of adjustments. */
4109
4110 static ipa_parm_adjustment_vec
4111 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4112 int adjustments_count)
4113 {
4114 VEC (tree, heap) *parms;
4115 ipa_parm_adjustment_vec adjustments;
4116 tree parm;
4117 int i;
4118
4119 gcc_assert (adjustments_count > 0);
4120 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4121 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4122 parm = DECL_ARGUMENTS (current_function_decl);
4123 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4124 {
4125 struct access *repr = VEC_index (access_p, representatives, i);
4126
4127 if (!repr || no_accesses_p (repr))
4128 {
4129 struct ipa_parm_adjustment *adj;
4130
4131 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4132 memset (adj, 0, sizeof (*adj));
4133 adj->base_index = get_param_index (parm, parms);
4134 adj->base = parm;
4135 if (!repr)
4136 adj->copy_param = 1;
4137 else
4138 adj->remove_param = 1;
4139 }
4140 else
4141 {
4142 struct ipa_parm_adjustment *adj;
4143 int index = get_param_index (parm, parms);
4144
4145 for (; repr; repr = repr->next_grp)
4146 {
4147 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4148 memset (adj, 0, sizeof (*adj));
4149 gcc_assert (repr->base == parm);
4150 adj->base_index = index;
4151 adj->base = repr->base;
4152 adj->type = repr->type;
4153 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4154 adj->offset = repr->offset;
4155 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4156 && (repr->grp_maybe_modified
4157 || repr->grp_not_necessarilly_dereferenced));
4158
4159 }
4160 }
4161 }
4162 VEC_free (tree, heap, parms);
4163 return adjustments;
4164 }
4165
4166 /* Analyze the collected accesses and produce a plan what to do with the
4167 parameters in the form of adjustments, NULL meaning nothing. */
4168
4169 static ipa_parm_adjustment_vec
4170 analyze_all_param_acesses (void)
4171 {
4172 enum ipa_splicing_result repr_state;
4173 bool proceed = false;
4174 int i, adjustments_count = 0;
4175 VEC (access_p, heap) *representatives;
4176 ipa_parm_adjustment_vec adjustments;
4177
4178 repr_state = splice_all_param_accesses (&representatives);
4179 if (repr_state == NO_GOOD_ACCESS)
4180 return NULL;
4181
4182 /* If there are any parameters passed by reference which are not modified
4183 directly, we need to check whether they can be modified indirectly. */
4184 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4185 {
4186 analyze_caller_dereference_legality (representatives);
4187 analyze_modified_params (representatives);
4188 }
4189
4190 for (i = 0; i < func_param_count; i++)
4191 {
4192 struct access *repr = VEC_index (access_p, representatives, i);
4193
4194 if (repr && !no_accesses_p (repr))
4195 {
4196 if (repr->grp_scalar_ptr)
4197 {
4198 adjustments_count++;
4199 if (repr->grp_not_necessarilly_dereferenced
4200 || repr->grp_maybe_modified)
4201 VEC_replace (access_p, representatives, i, NULL);
4202 else
4203 {
4204 proceed = true;
4205 sra_stats.scalar_by_ref_to_by_val++;
4206 }
4207 }
4208 else
4209 {
4210 int new_components = decide_one_param_reduction (repr);
4211
4212 if (new_components == 0)
4213 {
4214 VEC_replace (access_p, representatives, i, NULL);
4215 adjustments_count++;
4216 }
4217 else
4218 {
4219 adjustments_count += new_components;
4220 sra_stats.aggregate_params_reduced++;
4221 sra_stats.param_reductions_created += new_components;
4222 proceed = true;
4223 }
4224 }
4225 }
4226 else
4227 {
4228 if (no_accesses_p (repr))
4229 {
4230 proceed = true;
4231 sra_stats.deleted_unused_parameters++;
4232 }
4233 adjustments_count++;
4234 }
4235 }
4236
4237 if (!proceed && dump_file)
4238 fprintf (dump_file, "NOT proceeding to change params.\n");
4239
4240 if (proceed)
4241 adjustments = turn_representatives_into_adjustments (representatives,
4242 adjustments_count);
4243 else
4244 adjustments = NULL;
4245
4246 VEC_free (access_p, heap, representatives);
4247 return adjustments;
4248 }
4249
4250 /* If a parameter replacement identified by ADJ does not yet exist in the form
4251 of declaration, create it and record it, otherwise return the previously
4252 created one. */
4253
4254 static tree
4255 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4256 {
4257 tree repl;
4258 if (!adj->new_ssa_base)
4259 {
4260 char *pretty_name = make_fancy_name (adj->base);
4261
4262 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4263 DECL_NAME (repl) = get_identifier (pretty_name);
4264 obstack_free (&name_obstack, pretty_name);
4265
4266 add_referenced_var (repl);
4267 adj->new_ssa_base = repl;
4268 }
4269 else
4270 repl = adj->new_ssa_base;
4271 return repl;
4272 }
4273
4274 /* Find the first adjustment for a particular parameter BASE in a vector of
4275 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4276 adjustment. */
4277
4278 static struct ipa_parm_adjustment *
4279 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4280 {
4281 int i, len;
4282
4283 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4284 for (i = 0; i < len; i++)
4285 {
4286 struct ipa_parm_adjustment *adj;
4287
4288 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4289 if (!adj->copy_param && adj->base == base)
4290 return adj;
4291 }
4292
4293 return NULL;
4294 }
4295
4296 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4297 removed because its value is not used, replace the SSA_NAME with a one
4298 relating to a created VAR_DECL together all of its uses and return true.
4299 ADJUSTMENTS is a pointer to an adjustments vector. */
4300
4301 static bool
4302 replace_removed_params_ssa_names (gimple stmt,
4303 ipa_parm_adjustment_vec adjustments)
4304 {
4305 struct ipa_parm_adjustment *adj;
4306 tree lhs, decl, repl, name;
4307
4308 if (gimple_code (stmt) == GIMPLE_PHI)
4309 lhs = gimple_phi_result (stmt);
4310 else if (is_gimple_assign (stmt))
4311 lhs = gimple_assign_lhs (stmt);
4312 else if (is_gimple_call (stmt))
4313 lhs = gimple_call_lhs (stmt);
4314 else
4315 gcc_unreachable ();
4316
4317 if (TREE_CODE (lhs) != SSA_NAME)
4318 return false;
4319 decl = SSA_NAME_VAR (lhs);
4320 if (TREE_CODE (decl) != PARM_DECL)
4321 return false;
4322
4323 adj = get_adjustment_for_base (adjustments, decl);
4324 if (!adj)
4325 return false;
4326
4327 repl = get_replaced_param_substitute (adj);
4328 name = make_ssa_name (repl, stmt);
4329
4330 if (dump_file)
4331 {
4332 fprintf (dump_file, "replacing an SSA name of a removed param ");
4333 print_generic_expr (dump_file, lhs, 0);
4334 fprintf (dump_file, " with ");
4335 print_generic_expr (dump_file, name, 0);
4336 fprintf (dump_file, "\n");
4337 }
4338
4339 if (is_gimple_assign (stmt))
4340 gimple_assign_set_lhs (stmt, name);
4341 else if (is_gimple_call (stmt))
4342 gimple_call_set_lhs (stmt, name);
4343 else
4344 gimple_phi_set_result (stmt, name);
4345
4346 replace_uses_by (lhs, name);
4347 release_ssa_name (lhs);
4348 return true;
4349 }
4350
4351 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4352 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4353 specifies whether the function should care about type incompatibility the
4354 current and new expressions. If it is false, the function will leave
4355 incompatibility issues to the caller. Return true iff the expression
4356 was modified. */
4357
4358 static bool
4359 sra_ipa_modify_expr (tree *expr, bool convert,
4360 ipa_parm_adjustment_vec adjustments)
4361 {
4362 int i, len;
4363 struct ipa_parm_adjustment *adj, *cand = NULL;
4364 HOST_WIDE_INT offset, size, max_size;
4365 tree base, src;
4366
4367 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4368
4369 if (TREE_CODE (*expr) == BIT_FIELD_REF
4370 || TREE_CODE (*expr) == IMAGPART_EXPR
4371 || TREE_CODE (*expr) == REALPART_EXPR)
4372 {
4373 expr = &TREE_OPERAND (*expr, 0);
4374 convert = true;
4375 }
4376
4377 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4378 if (!base || size == -1 || max_size == -1)
4379 return false;
4380
4381 if (TREE_CODE (base) == MEM_REF)
4382 {
4383 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4384 base = TREE_OPERAND (base, 0);
4385 }
4386
4387 base = get_ssa_base_param (base);
4388 if (!base || TREE_CODE (base) != PARM_DECL)
4389 return false;
4390
4391 for (i = 0; i < len; i++)
4392 {
4393 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4394
4395 if (adj->base == base &&
4396 (adj->offset == offset || adj->remove_param))
4397 {
4398 cand = adj;
4399 break;
4400 }
4401 }
4402 if (!cand || cand->copy_param || cand->remove_param)
4403 return false;
4404
4405 if (cand->by_ref)
4406 src = build_simple_mem_ref (cand->reduction);
4407 else
4408 src = cand->reduction;
4409
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4411 {
4412 fprintf (dump_file, "About to replace expr ");
4413 print_generic_expr (dump_file, *expr, 0);
4414 fprintf (dump_file, " with ");
4415 print_generic_expr (dump_file, src, 0);
4416 fprintf (dump_file, "\n");
4417 }
4418
4419 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4420 {
4421 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4422 *expr = vce;
4423 }
4424 else
4425 *expr = src;
4426 return true;
4427 }
4428
4429 /* If the statement pointed to by STMT_PTR contains any expressions that need
4430 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4431 potential type incompatibilities (GSI is used to accommodate conversion
4432 statements and must point to the statement). Return true iff the statement
4433 was modified. */
4434
4435 static bool
4436 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4437 ipa_parm_adjustment_vec adjustments)
4438 {
4439 gimple stmt = *stmt_ptr;
4440 tree *lhs_p, *rhs_p;
4441 bool any;
4442
4443 if (!gimple_assign_single_p (stmt))
4444 return false;
4445
4446 rhs_p = gimple_assign_rhs1_ptr (stmt);
4447 lhs_p = gimple_assign_lhs_ptr (stmt);
4448
4449 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4450 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4451 if (any)
4452 {
4453 tree new_rhs = NULL_TREE;
4454
4455 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4456 {
4457 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4458 {
4459 /* V_C_Es of constructors can cause trouble (PR 42714). */
4460 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4461 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4462 else
4463 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4464 }
4465 else
4466 new_rhs = fold_build1_loc (gimple_location (stmt),
4467 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4468 *rhs_p);
4469 }
4470 else if (REFERENCE_CLASS_P (*rhs_p)
4471 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4472 && !is_gimple_reg (*lhs_p))
4473 /* This can happen when an assignment in between two single field
4474 structures is turned into an assignment in between two pointers to
4475 scalars (PR 42237). */
4476 new_rhs = *rhs_p;
4477
4478 if (new_rhs)
4479 {
4480 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4481 true, GSI_SAME_STMT);
4482
4483 gimple_assign_set_rhs_from_tree (gsi, tmp);
4484 }
4485
4486 return true;
4487 }
4488
4489 return false;
4490 }
4491
4492 /* Traverse the function body and all modifications as described in
4493 ADJUSTMENTS. Return true iff the CFG has been changed. */
4494
4495 static bool
4496 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4497 {
4498 bool cfg_changed = false;
4499 basic_block bb;
4500
4501 FOR_EACH_BB (bb)
4502 {
4503 gimple_stmt_iterator gsi;
4504
4505 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4506 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4507
4508 gsi = gsi_start_bb (bb);
4509 while (!gsi_end_p (gsi))
4510 {
4511 gimple stmt = gsi_stmt (gsi);
4512 bool modified = false;
4513 tree *t;
4514 unsigned i;
4515
4516 switch (gimple_code (stmt))
4517 {
4518 case GIMPLE_RETURN:
4519 t = gimple_return_retval_ptr (stmt);
4520 if (*t != NULL_TREE)
4521 modified |= sra_ipa_modify_expr (t, true, adjustments);
4522 break;
4523
4524 case GIMPLE_ASSIGN:
4525 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4526 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4527 break;
4528
4529 case GIMPLE_CALL:
4530 /* Operands must be processed before the lhs. */
4531 for (i = 0; i < gimple_call_num_args (stmt); i++)
4532 {
4533 t = gimple_call_arg_ptr (stmt, i);
4534 modified |= sra_ipa_modify_expr (t, true, adjustments);
4535 }
4536
4537 if (gimple_call_lhs (stmt))
4538 {
4539 t = gimple_call_lhs_ptr (stmt);
4540 modified |= sra_ipa_modify_expr (t, false, adjustments);
4541 modified |= replace_removed_params_ssa_names (stmt,
4542 adjustments);
4543 }
4544 break;
4545
4546 case GIMPLE_ASM:
4547 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4548 {
4549 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4550 modified |= sra_ipa_modify_expr (t, true, adjustments);
4551 }
4552 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4553 {
4554 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4555 modified |= sra_ipa_modify_expr (t, false, adjustments);
4556 }
4557 break;
4558
4559 default:
4560 break;
4561 }
4562
4563 if (modified)
4564 {
4565 update_stmt (stmt);
4566 if (maybe_clean_eh_stmt (stmt)
4567 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4568 cfg_changed = true;
4569 }
4570 gsi_next (&gsi);
4571 }
4572 }
4573
4574 return cfg_changed;
4575 }
4576
4577 /* Call gimple_debug_bind_reset_value on all debug statements describing
4578 gimple register parameters that are being removed or replaced. */
4579
4580 static void
4581 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4582 {
4583 int i, len;
4584 gimple_stmt_iterator *gsip = NULL, gsi;
4585
4586 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4587 {
4588 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4589 gsip = &gsi;
4590 }
4591 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4592 for (i = 0; i < len; i++)
4593 {
4594 struct ipa_parm_adjustment *adj;
4595 imm_use_iterator ui;
4596 gimple stmt, def_temp;
4597 tree name, vexpr, copy = NULL_TREE;
4598 use_operand_p use_p;
4599
4600 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4601 if (adj->copy_param || !is_gimple_reg (adj->base))
4602 continue;
4603 name = gimple_default_def (cfun, adj->base);
4604 vexpr = NULL;
4605 if (name)
4606 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4607 {
4608 /* All other users must have been removed by
4609 ipa_sra_modify_function_body. */
4610 gcc_assert (is_gimple_debug (stmt));
4611 if (vexpr == NULL && gsip != NULL)
4612 {
4613 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4614 vexpr = make_node (DEBUG_EXPR_DECL);
4615 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4616 NULL);
4617 DECL_ARTIFICIAL (vexpr) = 1;
4618 TREE_TYPE (vexpr) = TREE_TYPE (name);
4619 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4620 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4621 }
4622 if (vexpr)
4623 {
4624 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4625 SET_USE (use_p, vexpr);
4626 }
4627 else
4628 gimple_debug_bind_reset_value (stmt);
4629 update_stmt (stmt);
4630 }
4631 /* Create a VAR_DECL for debug info purposes. */
4632 if (!DECL_IGNORED_P (adj->base))
4633 {
4634 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4635 VAR_DECL, DECL_NAME (adj->base),
4636 TREE_TYPE (adj->base));
4637 if (DECL_PT_UID_SET_P (adj->base))
4638 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4639 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4640 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4641 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4642 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4643 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4644 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4645 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4646 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4647 SET_DECL_RTL (copy, 0);
4648 TREE_USED (copy) = 1;
4649 DECL_CONTEXT (copy) = current_function_decl;
4650 add_referenced_var (copy);
4651 add_local_decl (cfun, copy);
4652 DECL_CHAIN (copy) =
4653 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4654 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4655 }
4656 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4657 {
4658 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4659 if (vexpr)
4660 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4661 else
4662 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4663 NULL);
4664 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4665 }
4666 }
4667 }
4668
4669 /* Return false iff all callers have at least as many actual arguments as there
4670 are formal parameters in the current function. */
4671
4672 static bool
4673 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4674 void *data ATTRIBUTE_UNUSED)
4675 {
4676 struct cgraph_edge *cs;
4677 for (cs = node->callers; cs; cs = cs->next_caller)
4678 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4679 return true;
4680
4681 return false;
4682 }
4683
4684 /* Convert all callers of NODE. */
4685
4686 static bool
4687 convert_callers_for_node (struct cgraph_node *node,
4688 void *data)
4689 {
4690 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4691 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4692 struct cgraph_edge *cs;
4693
4694 for (cs = node->callers; cs; cs = cs->next_caller)
4695 {
4696 current_function_decl = cs->caller->decl;
4697 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4698
4699 if (dump_file)
4700 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4701 cs->caller->uid, cs->callee->uid,
4702 cgraph_node_name (cs->caller),
4703 cgraph_node_name (cs->callee));
4704
4705 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4706
4707 pop_cfun ();
4708 }
4709
4710 for (cs = node->callers; cs; cs = cs->next_caller)
4711 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4712 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4713 compute_inline_parameters (cs->caller, true);
4714 BITMAP_FREE (recomputed_callers);
4715
4716 return true;
4717 }
4718
4719 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4720
4721 static void
4722 convert_callers (struct cgraph_node *node, tree old_decl,
4723 ipa_parm_adjustment_vec adjustments)
4724 {
4725 tree old_cur_fndecl = current_function_decl;
4726 basic_block this_block;
4727
4728 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4729 adjustments, false);
4730
4731 current_function_decl = old_cur_fndecl;
4732
4733 if (!encountered_recursive_call)
4734 return;
4735
4736 FOR_EACH_BB (this_block)
4737 {
4738 gimple_stmt_iterator gsi;
4739
4740 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4741 {
4742 gimple stmt = gsi_stmt (gsi);
4743 tree call_fndecl;
4744 if (gimple_code (stmt) != GIMPLE_CALL)
4745 continue;
4746 call_fndecl = gimple_call_fndecl (stmt);
4747 if (call_fndecl == old_decl)
4748 {
4749 if (dump_file)
4750 fprintf (dump_file, "Adjusting recursive call");
4751 gimple_call_set_fndecl (stmt, node->decl);
4752 ipa_modify_call_arguments (NULL, stmt, adjustments);
4753 }
4754 }
4755 }
4756
4757 return;
4758 }
4759
4760 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4761 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4762
4763 static bool
4764 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4765 {
4766 struct cgraph_node *new_node;
4767 bool cfg_changed;
4768 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4769
4770 rebuild_cgraph_edges ();
4771 free_dominance_info (CDI_DOMINATORS);
4772 pop_cfun ();
4773 current_function_decl = NULL_TREE;
4774
4775 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4776 false, NULL, NULL, "isra");
4777 current_function_decl = new_node->decl;
4778 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4779
4780 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4781 cfg_changed = ipa_sra_modify_function_body (adjustments);
4782 sra_ipa_reset_debug_stmts (adjustments);
4783 convert_callers (new_node, node->decl, adjustments);
4784 cgraph_make_node_local (new_node);
4785 return cfg_changed;
4786 }
4787
4788 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4789 attributes, return true otherwise. NODE is the cgraph node of the current
4790 function. */
4791
4792 static bool
4793 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4794 {
4795 if (!cgraph_node_can_be_local_p (node))
4796 {
4797 if (dump_file)
4798 fprintf (dump_file, "Function not local to this compilation unit.\n");
4799 return false;
4800 }
4801
4802 if (!node->local.can_change_signature)
4803 {
4804 if (dump_file)
4805 fprintf (dump_file, "Function can not change signature.\n");
4806 return false;
4807 }
4808
4809 if (!tree_versionable_function_p (node->decl))
4810 {
4811 if (dump_file)
4812 fprintf (dump_file, "Function is not versionable.\n");
4813 return false;
4814 }
4815
4816 if (DECL_VIRTUAL_P (current_function_decl))
4817 {
4818 if (dump_file)
4819 fprintf (dump_file, "Function is a virtual method.\n");
4820 return false;
4821 }
4822
4823 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4824 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4825 {
4826 if (dump_file)
4827 fprintf (dump_file, "Function too big to be made truly local.\n");
4828 return false;
4829 }
4830
4831 if (!node->callers)
4832 {
4833 if (dump_file)
4834 fprintf (dump_file,
4835 "Function has no callers in this compilation unit.\n");
4836 return false;
4837 }
4838
4839 if (cfun->stdarg)
4840 {
4841 if (dump_file)
4842 fprintf (dump_file, "Function uses stdarg. \n");
4843 return false;
4844 }
4845
4846 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4847 return false;
4848
4849 return true;
4850 }
4851
4852 /* Perform early interprocedural SRA. */
4853
4854 static unsigned int
4855 ipa_early_sra (void)
4856 {
4857 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4858 ipa_parm_adjustment_vec adjustments;
4859 int ret = 0;
4860
4861 if (!ipa_sra_preliminary_function_checks (node))
4862 return 0;
4863
4864 sra_initialize ();
4865 sra_mode = SRA_MODE_EARLY_IPA;
4866
4867 if (!find_param_candidates ())
4868 {
4869 if (dump_file)
4870 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4871 goto simple_out;
4872 }
4873
4874 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4875 NULL, true))
4876 {
4877 if (dump_file)
4878 fprintf (dump_file, "There are callers with insufficient number of "
4879 "arguments.\n");
4880 goto simple_out;
4881 }
4882
4883 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4884 func_param_count
4885 * last_basic_block_for_function (cfun));
4886 final_bbs = BITMAP_ALLOC (NULL);
4887
4888 scan_function ();
4889 if (encountered_apply_args)
4890 {
4891 if (dump_file)
4892 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4893 goto out;
4894 }
4895
4896 if (encountered_unchangable_recursive_call)
4897 {
4898 if (dump_file)
4899 fprintf (dump_file, "Function calls itself with insufficient "
4900 "number of arguments.\n");
4901 goto out;
4902 }
4903
4904 adjustments = analyze_all_param_acesses ();
4905 if (!adjustments)
4906 goto out;
4907 if (dump_file)
4908 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4909
4910 if (modify_function (node, adjustments))
4911 ret = TODO_update_ssa | TODO_cleanup_cfg;
4912 else
4913 ret = TODO_update_ssa;
4914 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4915
4916 statistics_counter_event (cfun, "Unused parameters deleted",
4917 sra_stats.deleted_unused_parameters);
4918 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4919 sra_stats.scalar_by_ref_to_by_val);
4920 statistics_counter_event (cfun, "Aggregate parameters broken up",
4921 sra_stats.aggregate_params_reduced);
4922 statistics_counter_event (cfun, "Aggregate parameter components created",
4923 sra_stats.param_reductions_created);
4924
4925 out:
4926 BITMAP_FREE (final_bbs);
4927 free (bb_dereferences);
4928 simple_out:
4929 sra_deinitialize ();
4930 return ret;
4931 }
4932
4933 /* Return if early ipa sra shall be performed. */
4934 static bool
4935 ipa_early_sra_gate (void)
4936 {
4937 return flag_ipa_sra && dbg_cnt (eipa_sra);
4938 }
4939
4940 struct gimple_opt_pass pass_early_ipa_sra =
4941 {
4942 {
4943 GIMPLE_PASS,
4944 "eipa_sra", /* name */
4945 ipa_early_sra_gate, /* gate */
4946 ipa_early_sra, /* execute */
4947 NULL, /* sub */
4948 NULL, /* next */
4949 0, /* static_pass_number */
4950 TV_IPA_SRA, /* tv_id */
4951 0, /* properties_required */
4952 0, /* properties_provided */
4953 0, /* properties_destroyed */
4954 0, /* todo_flags_start */
4955 TODO_dump_cgraph /* todo_flags_finish */
4956 }
4957 };