* de.po: Update.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
95
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
104
105 struct assign_link;
106
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
112
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
118
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
122
123 struct access
124 {
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
131
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
138
139 /* The statement this access belongs to. */
140 gimple stmt;
141
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
144
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
148
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
152
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
157
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
161
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
164
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
169
170 /* Is this particular access write access? */
171 unsigned write : 1;
172
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
175
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
178
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
182
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
186
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
190
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
194
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
198
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
202
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
206
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
211
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
215
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
219
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
224
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
228
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
233
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
236
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
240
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
245
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250
251 typedef struct access *access_p;
252
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
255
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
258
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
263 {
264 struct access *lacc, *racc;
265 struct assign_link *next;
266 };
267
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
270
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
273
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
276
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
283
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
287
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
290
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
294
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
297
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
301
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
310
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
313
314 /* Predicate to test the special value. */
315
316 static inline bool
317 no_accesses_p (struct access *access)
318 {
319 return access == &no_accesses_representant;
320 }
321
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
325
326 static struct
327 {
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
330
331 /* Number of created scalar replacements. */
332 int replacements;
333
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
337
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
340
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
343
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
346
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
351
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
354
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
358
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
362
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
366
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
369 {
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
401 }
402
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
404
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
407 {
408 do
409 {
410 int i;
411
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
414
415 dump_access (f, access, true);
416
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
419
420 access = access->next_sibling;
421 }
422 while (access);
423 }
424
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
427
428 static void
429 dump_access_tree (FILE *f, struct access *access)
430 {
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
433 }
434
435 /* Return true iff ACC is non-NULL and has subaccesses. */
436
437 static inline bool
438 access_has_children_p (struct access *acc)
439 {
440 return acc && acc->first_child;
441 }
442
443 /* Return true iff ACC is (partly) covered by at least one replacement. */
444
445 static bool
446 access_has_replacements_p (struct access *acc)
447 {
448 struct access *child;
449 if (acc->grp_to_be_replaced)
450 return true;
451 for (child = acc->first_child; child; child = child->next_sibling)
452 if (access_has_replacements_p (child))
453 return true;
454 return false;
455 }
456
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458 NULL if there is none. */
459
460 static VEC (access_p, heap) *
461 get_base_access_vector (tree base)
462 {
463 void **slot;
464
465 slot = pointer_map_contains (base_access_vec, base);
466 if (!slot)
467 return NULL;
468 else
469 return *(VEC (access_p, heap) **) slot;
470 }
471
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473 in ACCESS. Return NULL if it cannot be found. */
474
475 static struct access *
476 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477 HOST_WIDE_INT size)
478 {
479 while (access && (access->offset != offset || access->size != size))
480 {
481 struct access *child = access->first_child;
482
483 while (child && (child->offset + child->size <= offset))
484 child = child->next_sibling;
485 access = child;
486 }
487
488 return access;
489 }
490
491 /* Return the first group representative for DECL or NULL if none exists. */
492
493 static struct access *
494 get_first_repr_for_decl (tree base)
495 {
496 VEC (access_p, heap) *access_vec;
497
498 access_vec = get_base_access_vector (base);
499 if (!access_vec)
500 return NULL;
501
502 return VEC_index (access_p, access_vec, 0);
503 }
504
505 /* Find an access representative for the variable BASE and given OFFSET and
506 SIZE. Requires that access trees have already been built. Return NULL if
507 it cannot be found. */
508
509 static struct access *
510 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511 HOST_WIDE_INT size)
512 {
513 struct access *access;
514
515 access = get_first_repr_for_decl (base);
516 while (access && (access->offset + access->size <= offset))
517 access = access->next_grp;
518 if (!access)
519 return NULL;
520
521 return find_access_in_subtree (access, offset, size);
522 }
523
524 /* Add LINK to the linked list of assign links of RACC. */
525 static void
526 add_link_to_rhs (struct access *racc, struct assign_link *link)
527 {
528 gcc_assert (link->racc == racc);
529
530 if (!racc->first_link)
531 {
532 gcc_assert (!racc->last_link);
533 racc->first_link = link;
534 }
535 else
536 racc->last_link->next = link;
537
538 racc->last_link = link;
539 link->next = NULL;
540 }
541
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
543 in NEW_RACC. */
544 static void
545 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
546 {
547 if (!old_racc->first_link)
548 {
549 gcc_assert (!old_racc->last_link);
550 return;
551 }
552
553 if (new_racc->first_link)
554 {
555 gcc_assert (!new_racc->last_link->next);
556 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
557
558 new_racc->last_link->next = old_racc->first_link;
559 new_racc->last_link = old_racc->last_link;
560 }
561 else
562 {
563 gcc_assert (!new_racc->last_link);
564
565 new_racc->first_link = old_racc->first_link;
566 new_racc->last_link = old_racc->last_link;
567 }
568 old_racc->first_link = old_racc->last_link = NULL;
569 }
570
571 /* Add ACCESS to the work queue (which is actually a stack). */
572
573 static void
574 add_access_to_work_queue (struct access *access)
575 {
576 if (!access->grp_queued)
577 {
578 gcc_assert (!access->next_queued);
579 access->next_queued = work_queue_head;
580 access->grp_queued = 1;
581 work_queue_head = access;
582 }
583 }
584
585 /* Pop an access from the work queue, and return it, assuming there is one. */
586
587 static struct access *
588 pop_access_from_work_queue (void)
589 {
590 struct access *access = work_queue_head;
591
592 work_queue_head = access->next_queued;
593 access->next_queued = NULL;
594 access->grp_queued = 0;
595 return access;
596 }
597
598
599 /* Allocate necessary structures. */
600
601 static void
602 sra_initialize (void)
603 {
604 candidate_bitmap = BITMAP_ALLOC (NULL);
605 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607 gcc_obstack_init (&name_obstack);
608 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610 base_access_vec = pointer_map_create ();
611 memset (&sra_stats, 0, sizeof (sra_stats));
612 encountered_apply_args = false;
613 encountered_recursive_call = false;
614 encountered_unchangable_recursive_call = false;
615 }
616
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
618
619 static bool
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621 void *data ATTRIBUTE_UNUSED)
622 {
623 VEC (access_p, heap) *access_vec;
624 access_vec = (VEC (access_p, heap) *) *value;
625 VEC_free (access_p, heap, access_vec);
626
627 return true;
628 }
629
630 /* Deallocate all general structures. */
631
632 static void
633 sra_deinitialize (void)
634 {
635 BITMAP_FREE (candidate_bitmap);
636 BITMAP_FREE (should_scalarize_away_bitmap);
637 BITMAP_FREE (cannot_scalarize_away_bitmap);
638 free_alloc_pool (access_pool);
639 free_alloc_pool (link_pool);
640 obstack_free (&name_obstack, NULL);
641
642 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643 pointer_map_destroy (base_access_vec);
644 }
645
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
647 there is one. */
648 static void
649 disqualify_candidate (tree decl, const char *reason)
650 {
651 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
652
653 if (dump_file && (dump_flags & TDF_DETAILS))
654 {
655 fprintf (dump_file, "! Disqualifying ");
656 print_generic_expr (dump_file, decl, 0);
657 fprintf (dump_file, " - %s\n", reason);
658 }
659 }
660
661 /* Return true iff the type contains a field or an element which does not allow
662 scalarization. */
663
664 static bool
665 type_internals_preclude_sra_p (tree type, const char **msg)
666 {
667 tree fld;
668 tree et;
669
670 switch (TREE_CODE (type))
671 {
672 case RECORD_TYPE:
673 case UNION_TYPE:
674 case QUAL_UNION_TYPE:
675 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676 if (TREE_CODE (fld) == FIELD_DECL)
677 {
678 tree ft = TREE_TYPE (fld);
679
680 if (TREE_THIS_VOLATILE (fld))
681 {
682 *msg = "volatile structure field";
683 return true;
684 }
685 if (!DECL_FIELD_OFFSET (fld))
686 {
687 *msg = "no structure field offset";
688 return true;
689 }
690 if (!DECL_SIZE (fld))
691 {
692 *msg = "zero structure field size";
693 return true;
694 }
695 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
696 {
697 *msg = "structure field offset not fixed";
698 return true;
699 }
700 if (!host_integerp (DECL_SIZE (fld), 1))
701 {
702 *msg = "structure field size not fixed";
703 return true;
704 }
705 if (AGGREGATE_TYPE_P (ft)
706 && int_bit_position (fld) % BITS_PER_UNIT != 0)
707 {
708 *msg = "structure field is bit field";
709 return true;
710 }
711
712 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
713 return true;
714 }
715
716 return false;
717
718 case ARRAY_TYPE:
719 et = TREE_TYPE (type);
720
721 if (TYPE_VOLATILE (et))
722 {
723 *msg = "element type is volatile";
724 return true;
725 }
726
727 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
728 return true;
729
730 return false;
731
732 default:
733 return false;
734 }
735 }
736
737 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
738 base variable if it is. Return T if it is not an SSA_NAME. */
739
740 static tree
741 get_ssa_base_param (tree t)
742 {
743 if (TREE_CODE (t) == SSA_NAME)
744 {
745 if (SSA_NAME_IS_DEFAULT_DEF (t))
746 return SSA_NAME_VAR (t);
747 else
748 return NULL_TREE;
749 }
750 return t;
751 }
752
753 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754 belongs to, unless the BB has already been marked as a potentially
755 final. */
756
757 static void
758 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
759 {
760 basic_block bb = gimple_bb (stmt);
761 int idx, parm_index = 0;
762 tree parm;
763
764 if (bitmap_bit_p (final_bbs, bb->index))
765 return;
766
767 for (parm = DECL_ARGUMENTS (current_function_decl);
768 parm && parm != base;
769 parm = DECL_CHAIN (parm))
770 parm_index++;
771
772 gcc_assert (parm_index < func_param_count);
773
774 idx = bb->index * func_param_count + parm_index;
775 if (bb_dereferences[idx] < dist)
776 bb_dereferences[idx] = dist;
777 }
778
779 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780 the three fields. Also add it to the vector of accesses corresponding to
781 the base. Finally, return the new access. */
782
783 static struct access *
784 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
785 {
786 VEC (access_p, heap) *vec;
787 struct access *access;
788 void **slot;
789
790 access = (struct access *) pool_alloc (access_pool);
791 memset (access, 0, sizeof (struct access));
792 access->base = base;
793 access->offset = offset;
794 access->size = size;
795
796 slot = pointer_map_contains (base_access_vec, base);
797 if (slot)
798 vec = (VEC (access_p, heap) *) *slot;
799 else
800 vec = VEC_alloc (access_p, heap, 32);
801
802 VEC_safe_push (access_p, heap, vec, access);
803
804 *((struct VEC (access_p,heap) **)
805 pointer_map_insert (base_access_vec, base)) = vec;
806
807 return access;
808 }
809
810 /* Create and insert access for EXPR. Return created access, or NULL if it is
811 not possible. */
812
813 static struct access *
814 create_access (tree expr, gimple stmt, bool write)
815 {
816 struct access *access;
817 HOST_WIDE_INT offset, size, max_size;
818 tree base = expr;
819 bool ptr, unscalarizable_region = false;
820
821 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
822
823 if (sra_mode == SRA_MODE_EARLY_IPA
824 && TREE_CODE (base) == MEM_REF)
825 {
826 base = get_ssa_base_param (TREE_OPERAND (base, 0));
827 if (!base)
828 return NULL;
829 ptr = true;
830 }
831 else
832 ptr = false;
833
834 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
835 return NULL;
836
837 if (sra_mode == SRA_MODE_EARLY_IPA)
838 {
839 if (size < 0 || size != max_size)
840 {
841 disqualify_candidate (base, "Encountered a variable sized access.");
842 return NULL;
843 }
844 if (TREE_CODE (expr) == COMPONENT_REF
845 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
846 {
847 disqualify_candidate (base, "Encountered a bit-field access.");
848 return NULL;
849 }
850 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
851
852 if (ptr)
853 mark_parm_dereference (base, offset + size, stmt);
854 }
855 else
856 {
857 if (size != max_size)
858 {
859 size = max_size;
860 unscalarizable_region = true;
861 }
862 if (size < 0)
863 {
864 disqualify_candidate (base, "Encountered an unconstrained access.");
865 return NULL;
866 }
867 }
868
869 access = create_access_1 (base, offset, size);
870 access->expr = expr;
871 access->type = TREE_TYPE (expr);
872 access->write = write;
873 access->grp_unscalarizable_region = unscalarizable_region;
874 access->stmt = stmt;
875
876 if (TREE_CODE (expr) == COMPONENT_REF
877 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
878 access->non_addressable = 1;
879
880 return access;
881 }
882
883
884 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885 register types or (recursively) records with only these two kinds of fields.
886 It also returns false if any of these records contains a bit-field. */
887
888 static bool
889 type_consists_of_records_p (tree type)
890 {
891 tree fld;
892
893 if (TREE_CODE (type) != RECORD_TYPE)
894 return false;
895
896 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
897 if (TREE_CODE (fld) == FIELD_DECL)
898 {
899 tree ft = TREE_TYPE (fld);
900
901 if (DECL_BIT_FIELD (fld))
902 return false;
903
904 if (!is_gimple_reg_type (ft)
905 && !type_consists_of_records_p (ft))
906 return false;
907 }
908
909 return true;
910 }
911
912 /* Create total_scalarization accesses for all scalar type fields in DECL that
913 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
914 must be the top-most VAR_DECL representing the variable, OFFSET must be the
915 offset of DECL within BASE. REF must be the memory reference expression for
916 the given decl. */
917
918 static void
919 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
920 tree ref)
921 {
922 tree fld, decl_type = TREE_TYPE (decl);
923
924 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
925 if (TREE_CODE (fld) == FIELD_DECL)
926 {
927 HOST_WIDE_INT pos = offset + int_bit_position (fld);
928 tree ft = TREE_TYPE (fld);
929 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
930 NULL_TREE);
931
932 if (is_gimple_reg_type (ft))
933 {
934 struct access *access;
935 HOST_WIDE_INT size;
936
937 size = tree_low_cst (DECL_SIZE (fld), 1);
938 access = create_access_1 (base, pos, size);
939 access->expr = nref;
940 access->type = ft;
941 access->grp_total_scalarization = 1;
942 /* Accesses for intraprocedural SRA can have their stmt NULL. */
943 }
944 else
945 completely_scalarize_record (base, fld, pos, nref);
946 }
947 }
948
949 /* Create total_scalarization accesses for all scalar type fields in VAR and
950 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
951 type_consists_of_records_p. */
952
953 static void
954 completely_scalarize_var (tree var)
955 {
956 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
957 struct access *access;
958
959 access = create_access_1 (var, 0, size);
960 access->expr = var;
961 access->type = TREE_TYPE (var);
962 access->grp_total_scalarization = 1;
963
964 completely_scalarize_record (var, var, 0, var);
965 }
966
967 /* Search the given tree for a declaration by skipping handled components and
968 exclude it from the candidates. */
969
970 static void
971 disqualify_base_of_expr (tree t, const char *reason)
972 {
973 t = get_base_address (t);
974 if (sra_mode == SRA_MODE_EARLY_IPA
975 && TREE_CODE (t) == MEM_REF)
976 t = get_ssa_base_param (TREE_OPERAND (t, 0));
977
978 if (t && DECL_P (t))
979 disqualify_candidate (t, reason);
980 }
981
982 /* Scan expression EXPR and create access structures for all accesses to
983 candidates for scalarization. Return the created access or NULL if none is
984 created. */
985
986 static struct access *
987 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
988 {
989 struct access *ret = NULL;
990 bool partial_ref;
991
992 if (TREE_CODE (expr) == BIT_FIELD_REF
993 || TREE_CODE (expr) == IMAGPART_EXPR
994 || TREE_CODE (expr) == REALPART_EXPR)
995 {
996 expr = TREE_OPERAND (expr, 0);
997 partial_ref = true;
998 }
999 else
1000 partial_ref = false;
1001
1002 /* We need to dive through V_C_Es in order to get the size of its parameter
1003 and not the result type. Ada produces such statements. We are also
1004 capable of handling the topmost V_C_E but not any of those buried in other
1005 handled components. */
1006 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1007 expr = TREE_OPERAND (expr, 0);
1008
1009 if (contains_view_convert_expr_p (expr))
1010 {
1011 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1012 "component.");
1013 return NULL;
1014 }
1015
1016 switch (TREE_CODE (expr))
1017 {
1018 case MEM_REF:
1019 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1020 && sra_mode != SRA_MODE_EARLY_IPA)
1021 return NULL;
1022 /* fall through */
1023 case VAR_DECL:
1024 case PARM_DECL:
1025 case RESULT_DECL:
1026 case COMPONENT_REF:
1027 case ARRAY_REF:
1028 case ARRAY_RANGE_REF:
1029 ret = create_access (expr, stmt, write);
1030 break;
1031
1032 default:
1033 break;
1034 }
1035
1036 if (write && partial_ref && ret)
1037 ret->grp_partial_lhs = 1;
1038
1039 return ret;
1040 }
1041
1042 /* Scan expression EXPR and create access structures for all accesses to
1043 candidates for scalarization. Return true if any access has been inserted.
1044 STMT must be the statement from which the expression is taken, WRITE must be
1045 true if the expression is a store and false otherwise. */
1046
1047 static bool
1048 build_access_from_expr (tree expr, gimple stmt, bool write)
1049 {
1050 struct access *access;
1051
1052 access = build_access_from_expr_1 (expr, stmt, write);
1053 if (access)
1054 {
1055 /* This means the aggregate is accesses as a whole in a way other than an
1056 assign statement and thus cannot be removed even if we had a scalar
1057 replacement for everything. */
1058 if (cannot_scalarize_away_bitmap)
1059 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1060 return true;
1061 }
1062 return false;
1063 }
1064
1065 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1066 modes in which it matters, return true iff they have been disqualified. RHS
1067 may be NULL, in that case ignore it. If we scalarize an aggregate in
1068 intra-SRA we may need to add statements after each statement. This is not
1069 possible if a statement unconditionally has to end the basic block. */
1070 static bool
1071 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1072 {
1073 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1074 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1075 {
1076 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1077 if (rhs)
1078 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1079 return true;
1080 }
1081 return false;
1082 }
1083
1084 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1085 invoked only on strict-alignment targets. */
1086
1087 static bool
1088 tree_non_aligned_mem_p (tree exp, unsigned int align)
1089 {
1090 unsigned int exp_align;
1091
1092 if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1093 exp = TREE_OPERAND (exp, 0);
1094
1095 if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1096 return false;
1097
1098 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1099 compute an explicit alignment. Pretend that dereferenced pointers
1100 are always aligned on strict-alignment targets. */
1101 if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1102 exp_align = get_object_or_type_alignment (exp);
1103 else
1104 exp_align = get_object_alignment (exp);
1105
1106 if (exp_align < align)
1107 return true;
1108
1109 return false;
1110 }
1111
1112 /* Return true if EXP is a memory reference less aligned than what the access
1113 ACC would require. This is invoked only on strict-alignment targets. */
1114
1115 static bool
1116 tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
1117 {
1118 unsigned int acc_align;
1119
1120 /* The alignment of the access is that of its expression. However, it may
1121 have been artificially increased, e.g. by a local alignment promotion,
1122 so we cap it to the alignment of the type of the base, on the grounds
1123 that valid sub-accesses cannot be more aligned than that. */
1124 acc_align = get_object_alignment (acc->expr);
1125 if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
1126 acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
1127
1128 return tree_non_aligned_mem_p (exp, acc_align);
1129 }
1130
1131 /* Scan expressions occuring in STMT, create access structures for all accesses
1132 to candidates for scalarization and remove those candidates which occur in
1133 statements or expressions that prevent them from being split apart. Return
1134 true if any access has been inserted. */
1135
1136 static bool
1137 build_accesses_from_assign (gimple stmt)
1138 {
1139 tree lhs, rhs;
1140 struct access *lacc, *racc;
1141
1142 if (!gimple_assign_single_p (stmt)
1143 /* Scope clobbers don't influence scalarization. */
1144 || gimple_clobber_p (stmt))
1145 return false;
1146
1147 lhs = gimple_assign_lhs (stmt);
1148 rhs = gimple_assign_rhs1 (stmt);
1149
1150 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1151 return false;
1152
1153 racc = build_access_from_expr_1 (rhs, stmt, false);
1154 lacc = build_access_from_expr_1 (lhs, stmt, true);
1155
1156 if (lacc)
1157 {
1158 lacc->grp_assignment_write = 1;
1159 if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
1160 lacc->grp_unscalarizable_region = 1;
1161 }
1162
1163 if (racc)
1164 {
1165 racc->grp_assignment_read = 1;
1166 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1167 && !is_gimple_reg_type (racc->type))
1168 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1169 if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
1170 racc->grp_unscalarizable_region = 1;
1171 }
1172
1173 if (lacc && racc
1174 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1175 && !lacc->grp_unscalarizable_region
1176 && !racc->grp_unscalarizable_region
1177 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1178 /* FIXME: Turn the following line into an assert after PR 40058 is
1179 fixed. */
1180 && lacc->size == racc->size
1181 && useless_type_conversion_p (lacc->type, racc->type))
1182 {
1183 struct assign_link *link;
1184
1185 link = (struct assign_link *) pool_alloc (link_pool);
1186 memset (link, 0, sizeof (struct assign_link));
1187
1188 link->lacc = lacc;
1189 link->racc = racc;
1190
1191 add_link_to_rhs (racc, link);
1192 }
1193
1194 return lacc || racc;
1195 }
1196
1197 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1198 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1199
1200 static bool
1201 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1202 void *data ATTRIBUTE_UNUSED)
1203 {
1204 op = get_base_address (op);
1205 if (op
1206 && DECL_P (op))
1207 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1208
1209 return false;
1210 }
1211
1212 /* Return true iff callsite CALL has at least as many actual arguments as there
1213 are formal parameters of the function currently processed by IPA-SRA. */
1214
1215 static inline bool
1216 callsite_has_enough_arguments_p (gimple call)
1217 {
1218 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1219 }
1220
1221 /* Scan function and look for interesting expressions and create access
1222 structures for them. Return true iff any access is created. */
1223
1224 static bool
1225 scan_function (void)
1226 {
1227 basic_block bb;
1228 bool ret = false;
1229
1230 FOR_EACH_BB (bb)
1231 {
1232 gimple_stmt_iterator gsi;
1233 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1234 {
1235 gimple stmt = gsi_stmt (gsi);
1236 tree t;
1237 unsigned i;
1238
1239 if (final_bbs && stmt_can_throw_external (stmt))
1240 bitmap_set_bit (final_bbs, bb->index);
1241 switch (gimple_code (stmt))
1242 {
1243 case GIMPLE_RETURN:
1244 t = gimple_return_retval (stmt);
1245 if (t != NULL_TREE)
1246 ret |= build_access_from_expr (t, stmt, false);
1247 if (final_bbs)
1248 bitmap_set_bit (final_bbs, bb->index);
1249 break;
1250
1251 case GIMPLE_ASSIGN:
1252 ret |= build_accesses_from_assign (stmt);
1253 break;
1254
1255 case GIMPLE_CALL:
1256 for (i = 0; i < gimple_call_num_args (stmt); i++)
1257 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1258 stmt, false);
1259
1260 if (sra_mode == SRA_MODE_EARLY_IPA)
1261 {
1262 tree dest = gimple_call_fndecl (stmt);
1263 int flags = gimple_call_flags (stmt);
1264
1265 if (dest)
1266 {
1267 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1268 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1269 encountered_apply_args = true;
1270 if (cgraph_get_node (dest)
1271 == cgraph_get_node (current_function_decl))
1272 {
1273 encountered_recursive_call = true;
1274 if (!callsite_has_enough_arguments_p (stmt))
1275 encountered_unchangable_recursive_call = true;
1276 }
1277 }
1278
1279 if (final_bbs
1280 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1281 bitmap_set_bit (final_bbs, bb->index);
1282 }
1283
1284 t = gimple_call_lhs (stmt);
1285 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1286 ret |= build_access_from_expr (t, stmt, true);
1287 break;
1288
1289 case GIMPLE_ASM:
1290 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1291 asm_visit_addr);
1292 if (final_bbs)
1293 bitmap_set_bit (final_bbs, bb->index);
1294
1295 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1296 {
1297 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1298 ret |= build_access_from_expr (t, stmt, false);
1299 }
1300 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1301 {
1302 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1303 ret |= build_access_from_expr (t, stmt, true);
1304 }
1305 break;
1306
1307 default:
1308 break;
1309 }
1310 }
1311 }
1312
1313 return ret;
1314 }
1315
1316 /* Helper of QSORT function. There are pointers to accesses in the array. An
1317 access is considered smaller than another if it has smaller offset or if the
1318 offsets are the same but is size is bigger. */
1319
1320 static int
1321 compare_access_positions (const void *a, const void *b)
1322 {
1323 const access_p *fp1 = (const access_p *) a;
1324 const access_p *fp2 = (const access_p *) b;
1325 const access_p f1 = *fp1;
1326 const access_p f2 = *fp2;
1327
1328 if (f1->offset != f2->offset)
1329 return f1->offset < f2->offset ? -1 : 1;
1330
1331 if (f1->size == f2->size)
1332 {
1333 if (f1->type == f2->type)
1334 return 0;
1335 /* Put any non-aggregate type before any aggregate type. */
1336 else if (!is_gimple_reg_type (f1->type)
1337 && is_gimple_reg_type (f2->type))
1338 return 1;
1339 else if (is_gimple_reg_type (f1->type)
1340 && !is_gimple_reg_type (f2->type))
1341 return -1;
1342 /* Put any complex or vector type before any other scalar type. */
1343 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1344 && TREE_CODE (f1->type) != VECTOR_TYPE
1345 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1346 || TREE_CODE (f2->type) == VECTOR_TYPE))
1347 return 1;
1348 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1349 || TREE_CODE (f1->type) == VECTOR_TYPE)
1350 && TREE_CODE (f2->type) != COMPLEX_TYPE
1351 && TREE_CODE (f2->type) != VECTOR_TYPE)
1352 return -1;
1353 /* Put the integral type with the bigger precision first. */
1354 else if (INTEGRAL_TYPE_P (f1->type)
1355 && INTEGRAL_TYPE_P (f2->type))
1356 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1357 /* Put any integral type with non-full precision last. */
1358 else if (INTEGRAL_TYPE_P (f1->type)
1359 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1360 != TYPE_PRECISION (f1->type)))
1361 return 1;
1362 else if (INTEGRAL_TYPE_P (f2->type)
1363 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1364 != TYPE_PRECISION (f2->type)))
1365 return -1;
1366 /* Stabilize the sort. */
1367 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1368 }
1369
1370 /* We want the bigger accesses first, thus the opposite operator in the next
1371 line: */
1372 return f1->size > f2->size ? -1 : 1;
1373 }
1374
1375
1376 /* Append a name of the declaration to the name obstack. A helper function for
1377 make_fancy_name. */
1378
1379 static void
1380 make_fancy_decl_name (tree decl)
1381 {
1382 char buffer[32];
1383
1384 tree name = DECL_NAME (decl);
1385 if (name)
1386 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1387 IDENTIFIER_LENGTH (name));
1388 else
1389 {
1390 sprintf (buffer, "D%u", DECL_UID (decl));
1391 obstack_grow (&name_obstack, buffer, strlen (buffer));
1392 }
1393 }
1394
1395 /* Helper for make_fancy_name. */
1396
1397 static void
1398 make_fancy_name_1 (tree expr)
1399 {
1400 char buffer[32];
1401 tree index;
1402
1403 if (DECL_P (expr))
1404 {
1405 make_fancy_decl_name (expr);
1406 return;
1407 }
1408
1409 switch (TREE_CODE (expr))
1410 {
1411 case COMPONENT_REF:
1412 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1413 obstack_1grow (&name_obstack, '$');
1414 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1415 break;
1416
1417 case ARRAY_REF:
1418 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1419 obstack_1grow (&name_obstack, '$');
1420 /* Arrays with only one element may not have a constant as their
1421 index. */
1422 index = TREE_OPERAND (expr, 1);
1423 if (TREE_CODE (index) != INTEGER_CST)
1424 break;
1425 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1426 obstack_grow (&name_obstack, buffer, strlen (buffer));
1427 break;
1428
1429 case ADDR_EXPR:
1430 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1431 break;
1432
1433 case MEM_REF:
1434 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1435 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1436 {
1437 obstack_1grow (&name_obstack, '$');
1438 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1439 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1440 obstack_grow (&name_obstack, buffer, strlen (buffer));
1441 }
1442 break;
1443
1444 case BIT_FIELD_REF:
1445 case REALPART_EXPR:
1446 case IMAGPART_EXPR:
1447 gcc_unreachable (); /* we treat these as scalars. */
1448 break;
1449 default:
1450 break;
1451 }
1452 }
1453
1454 /* Create a human readable name for replacement variable of ACCESS. */
1455
1456 static char *
1457 make_fancy_name (tree expr)
1458 {
1459 make_fancy_name_1 (expr);
1460 obstack_1grow (&name_obstack, '\0');
1461 return XOBFINISH (&name_obstack, char *);
1462 }
1463
1464 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1465 EXP_TYPE at the given OFFSET. If BASE is something for which
1466 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1467 to insert new statements either before or below the current one as specified
1468 by INSERT_AFTER. This function is not capable of handling bitfields. */
1469
1470 tree
1471 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1472 tree exp_type, gimple_stmt_iterator *gsi,
1473 bool insert_after)
1474 {
1475 tree prev_base = base;
1476 tree off;
1477 HOST_WIDE_INT base_offset;
1478 unsigned HOST_WIDE_INT misalign;
1479 unsigned int align;
1480
1481 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1482
1483 base = get_addr_base_and_unit_offset (base, &base_offset);
1484
1485 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1486 offset such as array[var_index]. */
1487 if (!base)
1488 {
1489 gimple stmt;
1490 tree tmp, addr;
1491
1492 gcc_checking_assert (gsi);
1493 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1494 add_referenced_var (tmp);
1495 tmp = make_ssa_name (tmp, NULL);
1496 addr = build_fold_addr_expr (unshare_expr (prev_base));
1497 STRIP_USELESS_TYPE_CONVERSION (addr);
1498 stmt = gimple_build_assign (tmp, addr);
1499 gimple_set_location (stmt, loc);
1500 SSA_NAME_DEF_STMT (tmp) = stmt;
1501 if (insert_after)
1502 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1503 else
1504 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1505 update_stmt (stmt);
1506
1507 off = build_int_cst (reference_alias_ptr_type (prev_base),
1508 offset / BITS_PER_UNIT);
1509 base = tmp;
1510 }
1511 else if (TREE_CODE (base) == MEM_REF)
1512 {
1513 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1514 base_offset + offset / BITS_PER_UNIT);
1515 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1516 base = unshare_expr (TREE_OPERAND (base, 0));
1517 }
1518 else
1519 {
1520 off = build_int_cst (reference_alias_ptr_type (base),
1521 base_offset + offset / BITS_PER_UNIT);
1522 base = build_fold_addr_expr (unshare_expr (base));
1523 }
1524
1525 /* If prev_base were always an originally performed access
1526 we can extract more optimistic alignment information
1527 by looking at the access mode. That would constrain the
1528 alignment of base + base_offset which we would need to
1529 adjust according to offset.
1530 ??? But it is not at all clear that prev_base is an access
1531 that was in the IL that way, so be conservative for now. */
1532 align = get_pointer_alignment_1 (base, &misalign);
1533 misalign += (double_int_sext (tree_to_double_int (off),
1534 TYPE_PRECISION (TREE_TYPE (off))).low
1535 * BITS_PER_UNIT);
1536 misalign = misalign & (align - 1);
1537 if (misalign != 0)
1538 align = (misalign & -misalign);
1539 if (align < TYPE_ALIGN (exp_type))
1540 exp_type = build_aligned_type (exp_type, align);
1541
1542 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1543 }
1544
1545 DEF_VEC_ALLOC_P_STACK (tree);
1546 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1547
1548 /* Construct a memory reference to a part of an aggregate BASE at the given
1549 OFFSET and of the type of MODEL. In case this is a chain of references
1550 to component, the function will replicate the chain of COMPONENT_REFs of
1551 the expression of MODEL to access it. GSI and INSERT_AFTER have the same
1552 meaning as in build_ref_for_offset. */
1553
1554 static tree
1555 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1556 struct access *model, gimple_stmt_iterator *gsi,
1557 bool insert_after)
1558 {
1559 tree type = model->type, t;
1560 VEC(tree,stack) *cr_stack = NULL;
1561
1562 if (TREE_CODE (model->expr) == COMPONENT_REF)
1563 {
1564 tree expr = model->expr;
1565
1566 /* Create a stack of the COMPONENT_REFs so later we can walk them in
1567 order from inner to outer. */
1568 cr_stack = VEC_alloc (tree, stack, 6);
1569
1570 do {
1571 tree field = TREE_OPERAND (expr, 1);
1572 tree cr_offset = component_ref_field_offset (expr);
1573 HOST_WIDE_INT bit_pos
1574 = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
1575 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1576
1577 /* We can be called with a model different from the one associated
1578 with BASE so we need to avoid going up the chain too far. */
1579 if (offset - bit_pos < 0)
1580 break;
1581
1582 offset -= bit_pos;
1583 VEC_safe_push (tree, stack, cr_stack, expr);
1584
1585 expr = TREE_OPERAND (expr, 0);
1586 type = TREE_TYPE (expr);
1587 } while (TREE_CODE (expr) == COMPONENT_REF);
1588 }
1589
1590 t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1591
1592 if (TREE_CODE (model->expr) == COMPONENT_REF)
1593 {
1594 unsigned i;
1595 tree expr;
1596
1597 /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
1598 FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1599 {
1600 tree field = TREE_OPERAND (expr, 1);
1601 t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1602 TREE_OPERAND (expr, 2));
1603 }
1604
1605 VEC_free (tree, stack, cr_stack);
1606 }
1607
1608 return t;
1609 }
1610
1611 /* Construct a memory reference consisting of component_refs and array_refs to
1612 a part of an aggregate *RES (which is of type TYPE). The requested part
1613 should have type EXP_TYPE at be the given OFFSET. This function might not
1614 succeed, it returns true when it does and only then *RES points to something
1615 meaningful. This function should be used only to build expressions that we
1616 might need to present to user (e.g. in warnings). In all other situations,
1617 build_ref_for_model or build_ref_for_offset should be used instead. */
1618
1619 static bool
1620 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1621 tree exp_type)
1622 {
1623 while (1)
1624 {
1625 tree fld;
1626 tree tr_size, index, minidx;
1627 HOST_WIDE_INT el_size;
1628
1629 if (offset == 0 && exp_type
1630 && types_compatible_p (exp_type, type))
1631 return true;
1632
1633 switch (TREE_CODE (type))
1634 {
1635 case UNION_TYPE:
1636 case QUAL_UNION_TYPE:
1637 case RECORD_TYPE:
1638 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1639 {
1640 HOST_WIDE_INT pos, size;
1641 tree expr, *expr_ptr;
1642
1643 if (TREE_CODE (fld) != FIELD_DECL)
1644 continue;
1645
1646 pos = int_bit_position (fld);
1647 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1648 tr_size = DECL_SIZE (fld);
1649 if (!tr_size || !host_integerp (tr_size, 1))
1650 continue;
1651 size = tree_low_cst (tr_size, 1);
1652 if (size == 0)
1653 {
1654 if (pos != offset)
1655 continue;
1656 }
1657 else if (pos > offset || (pos + size) <= offset)
1658 continue;
1659
1660 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1661 NULL_TREE);
1662 expr_ptr = &expr;
1663 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1664 offset - pos, exp_type))
1665 {
1666 *res = expr;
1667 return true;
1668 }
1669 }
1670 return false;
1671
1672 case ARRAY_TYPE:
1673 tr_size = TYPE_SIZE (TREE_TYPE (type));
1674 if (!tr_size || !host_integerp (tr_size, 1))
1675 return false;
1676 el_size = tree_low_cst (tr_size, 1);
1677
1678 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1679 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1680 return false;
1681 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1682 if (!integer_zerop (minidx))
1683 index = int_const_binop (PLUS_EXPR, index, minidx);
1684 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1685 NULL_TREE, NULL_TREE);
1686 offset = offset % el_size;
1687 type = TREE_TYPE (type);
1688 break;
1689
1690 default:
1691 if (offset != 0)
1692 return false;
1693
1694 if (exp_type)
1695 return false;
1696 else
1697 return true;
1698 }
1699 }
1700 }
1701
1702 /* Return true iff TYPE is stdarg va_list type. */
1703
1704 static inline bool
1705 is_va_list_type (tree type)
1706 {
1707 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1708 }
1709
1710 /* Print message to dump file why a variable was rejected. */
1711
1712 static void
1713 reject (tree var, const char *msg)
1714 {
1715 if (dump_file && (dump_flags & TDF_DETAILS))
1716 {
1717 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1718 print_generic_expr (dump_file, var, 0);
1719 fprintf (dump_file, "\n");
1720 }
1721 }
1722
1723 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1724 those with type which is suitable for scalarization. */
1725
1726 static bool
1727 find_var_candidates (void)
1728 {
1729 tree var, type;
1730 referenced_var_iterator rvi;
1731 bool ret = false;
1732 const char *msg;
1733
1734 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1735 {
1736 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1737 continue;
1738 type = TREE_TYPE (var);
1739
1740 if (!AGGREGATE_TYPE_P (type))
1741 {
1742 reject (var, "not aggregate");
1743 continue;
1744 }
1745 if (needs_to_live_in_memory (var))
1746 {
1747 reject (var, "needs to live in memory");
1748 continue;
1749 }
1750 if (TREE_THIS_VOLATILE (var))
1751 {
1752 reject (var, "is volatile");
1753 continue;
1754 }
1755 if (!COMPLETE_TYPE_P (type))
1756 {
1757 reject (var, "has incomplete type");
1758 continue;
1759 }
1760 if (!host_integerp (TYPE_SIZE (type), 1))
1761 {
1762 reject (var, "type size not fixed");
1763 continue;
1764 }
1765 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1766 {
1767 reject (var, "type size is zero");
1768 continue;
1769 }
1770 if (type_internals_preclude_sra_p (type, &msg))
1771 {
1772 reject (var, msg);
1773 continue;
1774 }
1775 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1776 we also want to schedule it rather late. Thus we ignore it in
1777 the early pass. */
1778 (sra_mode == SRA_MODE_EARLY_INTRA
1779 && is_va_list_type (type)))
1780 {
1781 reject (var, "is va_list");
1782 continue;
1783 }
1784
1785 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1786
1787 if (dump_file && (dump_flags & TDF_DETAILS))
1788 {
1789 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1790 print_generic_expr (dump_file, var, 0);
1791 fprintf (dump_file, "\n");
1792 }
1793 ret = true;
1794 }
1795
1796 return ret;
1797 }
1798
1799 /* Sort all accesses for the given variable, check for partial overlaps and
1800 return NULL if there are any. If there are none, pick a representative for
1801 each combination of offset and size and create a linked list out of them.
1802 Return the pointer to the first representative and make sure it is the first
1803 one in the vector of accesses. */
1804
1805 static struct access *
1806 sort_and_splice_var_accesses (tree var)
1807 {
1808 int i, j, access_count;
1809 struct access *res, **prev_acc_ptr = &res;
1810 VEC (access_p, heap) *access_vec;
1811 bool first = true;
1812 HOST_WIDE_INT low = -1, high = 0;
1813
1814 access_vec = get_base_access_vector (var);
1815 if (!access_vec)
1816 return NULL;
1817 access_count = VEC_length (access_p, access_vec);
1818
1819 /* Sort by <OFFSET, SIZE>. */
1820 VEC_qsort (access_p, access_vec, compare_access_positions);
1821
1822 i = 0;
1823 while (i < access_count)
1824 {
1825 struct access *access = VEC_index (access_p, access_vec, i);
1826 bool grp_write = access->write;
1827 bool grp_read = !access->write;
1828 bool grp_scalar_write = access->write
1829 && is_gimple_reg_type (access->type);
1830 bool grp_scalar_read = !access->write
1831 && is_gimple_reg_type (access->type);
1832 bool grp_assignment_read = access->grp_assignment_read;
1833 bool grp_assignment_write = access->grp_assignment_write;
1834 bool multiple_scalar_reads = false;
1835 bool total_scalarization = access->grp_total_scalarization;
1836 bool grp_partial_lhs = access->grp_partial_lhs;
1837 bool first_scalar = is_gimple_reg_type (access->type);
1838 bool unscalarizable_region = access->grp_unscalarizable_region;
1839
1840 if (first || access->offset >= high)
1841 {
1842 first = false;
1843 low = access->offset;
1844 high = access->offset + access->size;
1845 }
1846 else if (access->offset > low && access->offset + access->size > high)
1847 return NULL;
1848 else
1849 gcc_assert (access->offset >= low
1850 && access->offset + access->size <= high);
1851
1852 j = i + 1;
1853 while (j < access_count)
1854 {
1855 struct access *ac2 = VEC_index (access_p, access_vec, j);
1856 if (ac2->offset != access->offset || ac2->size != access->size)
1857 break;
1858 if (ac2->write)
1859 {
1860 grp_write = true;
1861 grp_scalar_write = (grp_scalar_write
1862 || is_gimple_reg_type (ac2->type));
1863 }
1864 else
1865 {
1866 grp_read = true;
1867 if (is_gimple_reg_type (ac2->type))
1868 {
1869 if (grp_scalar_read)
1870 multiple_scalar_reads = true;
1871 else
1872 grp_scalar_read = true;
1873 }
1874 }
1875 grp_assignment_read |= ac2->grp_assignment_read;
1876 grp_assignment_write |= ac2->grp_assignment_write;
1877 grp_partial_lhs |= ac2->grp_partial_lhs;
1878 unscalarizable_region |= ac2->grp_unscalarizable_region;
1879 total_scalarization |= ac2->grp_total_scalarization;
1880 relink_to_new_repr (access, ac2);
1881
1882 /* If there are both aggregate-type and scalar-type accesses with
1883 this combination of size and offset, the comparison function
1884 should have put the scalars first. */
1885 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1886 ac2->group_representative = access;
1887 j++;
1888 }
1889
1890 i = j;
1891
1892 access->group_representative = access;
1893 access->grp_write = grp_write;
1894 access->grp_read = grp_read;
1895 access->grp_scalar_read = grp_scalar_read;
1896 access->grp_scalar_write = grp_scalar_write;
1897 access->grp_assignment_read = grp_assignment_read;
1898 access->grp_assignment_write = grp_assignment_write;
1899 access->grp_hint = multiple_scalar_reads || total_scalarization;
1900 access->grp_total_scalarization = total_scalarization;
1901 access->grp_partial_lhs = grp_partial_lhs;
1902 access->grp_unscalarizable_region = unscalarizable_region;
1903 if (access->first_link)
1904 add_access_to_work_queue (access);
1905
1906 *prev_acc_ptr = access;
1907 prev_acc_ptr = &access->next_grp;
1908 }
1909
1910 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1911 return res;
1912 }
1913
1914 /* Create a variable for the given ACCESS which determines the type, name and a
1915 few other properties. Return the variable declaration and store it also to
1916 ACCESS->replacement. */
1917
1918 static tree
1919 create_access_replacement (struct access *access, bool rename)
1920 {
1921 tree repl;
1922
1923 repl = create_tmp_var (access->type, "SR");
1924 add_referenced_var (repl);
1925 if (rename)
1926 mark_sym_for_renaming (repl);
1927
1928 if (!access->grp_partial_lhs
1929 && (TREE_CODE (access->type) == COMPLEX_TYPE
1930 || TREE_CODE (access->type) == VECTOR_TYPE))
1931 DECL_GIMPLE_REG_P (repl) = 1;
1932
1933 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1934 DECL_ARTIFICIAL (repl) = 1;
1935 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1936
1937 if (DECL_NAME (access->base)
1938 && !DECL_IGNORED_P (access->base)
1939 && !DECL_ARTIFICIAL (access->base))
1940 {
1941 char *pretty_name = make_fancy_name (access->expr);
1942 tree debug_expr = unshare_expr (access->expr), d;
1943
1944 DECL_NAME (repl) = get_identifier (pretty_name);
1945 obstack_free (&name_obstack, pretty_name);
1946
1947 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1948 as DECL_DEBUG_EXPR isn't considered when looking for still
1949 used SSA_NAMEs and thus they could be freed. All debug info
1950 generation cares is whether something is constant or variable
1951 and that get_ref_base_and_extent works properly on the
1952 expression. */
1953 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1954 switch (TREE_CODE (d))
1955 {
1956 case ARRAY_REF:
1957 case ARRAY_RANGE_REF:
1958 if (TREE_OPERAND (d, 1)
1959 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1960 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1961 if (TREE_OPERAND (d, 3)
1962 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1963 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1964 /* FALLTHRU */
1965 case COMPONENT_REF:
1966 if (TREE_OPERAND (d, 2)
1967 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1968 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1969 break;
1970 default:
1971 break;
1972 }
1973 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1974 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1975 if (access->grp_no_warning)
1976 TREE_NO_WARNING (repl) = 1;
1977 else
1978 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1979 }
1980 else
1981 TREE_NO_WARNING (repl) = 1;
1982
1983 if (dump_file)
1984 {
1985 fprintf (dump_file, "Created a replacement for ");
1986 print_generic_expr (dump_file, access->base, 0);
1987 fprintf (dump_file, " offset: %u, size: %u: ",
1988 (unsigned) access->offset, (unsigned) access->size);
1989 print_generic_expr (dump_file, repl, 0);
1990 fprintf (dump_file, "\n");
1991 }
1992 sra_stats.replacements++;
1993
1994 return repl;
1995 }
1996
1997 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1998
1999 static inline tree
2000 get_access_replacement (struct access *access)
2001 {
2002 gcc_assert (access->grp_to_be_replaced);
2003
2004 if (!access->replacement_decl)
2005 access->replacement_decl = create_access_replacement (access, true);
2006 return access->replacement_decl;
2007 }
2008
2009 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
2010 not mark it for renaming. */
2011
2012 static inline tree
2013 get_unrenamed_access_replacement (struct access *access)
2014 {
2015 gcc_assert (!access->grp_to_be_replaced);
2016
2017 if (!access->replacement_decl)
2018 access->replacement_decl = create_access_replacement (access, false);
2019 return access->replacement_decl;
2020 }
2021
2022
2023 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2024 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2025 to it is not "within" the root. Return false iff some accesses partially
2026 overlap. */
2027
2028 static bool
2029 build_access_subtree (struct access **access)
2030 {
2031 struct access *root = *access, *last_child = NULL;
2032 HOST_WIDE_INT limit = root->offset + root->size;
2033
2034 *access = (*access)->next_grp;
2035 while (*access && (*access)->offset + (*access)->size <= limit)
2036 {
2037 if (!last_child)
2038 root->first_child = *access;
2039 else
2040 last_child->next_sibling = *access;
2041 last_child = *access;
2042
2043 if (!build_access_subtree (access))
2044 return false;
2045 }
2046
2047 if (*access && (*access)->offset < limit)
2048 return false;
2049
2050 return true;
2051 }
2052
2053 /* Build a tree of access representatives, ACCESS is the pointer to the first
2054 one, others are linked in a list by the next_grp field. Return false iff
2055 some accesses partially overlap. */
2056
2057 static bool
2058 build_access_trees (struct access *access)
2059 {
2060 while (access)
2061 {
2062 struct access *root = access;
2063
2064 if (!build_access_subtree (&access))
2065 return false;
2066 root->next_grp = access;
2067 }
2068 return true;
2069 }
2070
2071 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2072 array. */
2073
2074 static bool
2075 expr_with_var_bounded_array_refs_p (tree expr)
2076 {
2077 while (handled_component_p (expr))
2078 {
2079 if (TREE_CODE (expr) == ARRAY_REF
2080 && !host_integerp (array_ref_low_bound (expr), 0))
2081 return true;
2082 expr = TREE_OPERAND (expr, 0);
2083 }
2084 return false;
2085 }
2086
2087 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2088 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2089 sorts of access flags appropriately along the way, notably always set
2090 grp_read and grp_assign_read according to MARK_READ and grp_write when
2091 MARK_WRITE is true.
2092
2093 Creating a replacement for a scalar access is considered beneficial if its
2094 grp_hint is set (this means we are either attempting total scalarization or
2095 there is more than one direct read access) or according to the following
2096 table:
2097
2098 Access written to through a scalar type (once or more times)
2099 |
2100 | Written to in an assignment statement
2101 | |
2102 | | Access read as scalar _once_
2103 | | |
2104 | | | Read in an assignment statement
2105 | | | |
2106 | | | | Scalarize Comment
2107 -----------------------------------------------------------------------------
2108 0 0 0 0 No access for the scalar
2109 0 0 0 1 No access for the scalar
2110 0 0 1 0 No Single read - won't help
2111 0 0 1 1 No The same case
2112 0 1 0 0 No access for the scalar
2113 0 1 0 1 No access for the scalar
2114 0 1 1 0 Yes s = *g; return s.i;
2115 0 1 1 1 Yes The same case as above
2116 1 0 0 0 No Won't help
2117 1 0 0 1 Yes s.i = 1; *g = s;
2118 1 0 1 0 Yes s.i = 5; g = s.i;
2119 1 0 1 1 Yes The same case as above
2120 1 1 0 0 No Won't help.
2121 1 1 0 1 Yes s.i = 1; *g = s;
2122 1 1 1 0 Yes s = *g; return s.i;
2123 1 1 1 1 Yes Any of the above yeses */
2124
2125 static bool
2126 analyze_access_subtree (struct access *root, struct access *parent,
2127 bool allow_replacements)
2128 {
2129 struct access *child;
2130 HOST_WIDE_INT limit = root->offset + root->size;
2131 HOST_WIDE_INT covered_to = root->offset;
2132 bool scalar = is_gimple_reg_type (root->type);
2133 bool hole = false, sth_created = false;
2134
2135 if (parent)
2136 {
2137 if (parent->grp_read)
2138 root->grp_read = 1;
2139 if (parent->grp_assignment_read)
2140 root->grp_assignment_read = 1;
2141 if (parent->grp_write)
2142 root->grp_write = 1;
2143 if (parent->grp_assignment_write)
2144 root->grp_assignment_write = 1;
2145 if (parent->grp_total_scalarization)
2146 root->grp_total_scalarization = 1;
2147 }
2148
2149 if (root->grp_unscalarizable_region)
2150 allow_replacements = false;
2151
2152 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2153 allow_replacements = false;
2154
2155 for (child = root->first_child; child; child = child->next_sibling)
2156 {
2157 hole |= covered_to < child->offset;
2158 sth_created |= analyze_access_subtree (child, root,
2159 allow_replacements && !scalar);
2160
2161 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2162 root->grp_total_scalarization &= child->grp_total_scalarization;
2163 if (child->grp_covered)
2164 covered_to += child->size;
2165 else
2166 hole = true;
2167 }
2168
2169 if (allow_replacements && scalar && !root->first_child
2170 && (root->grp_hint
2171 || ((root->grp_scalar_read || root->grp_assignment_read)
2172 && (root->grp_scalar_write || root->grp_assignment_write))))
2173 {
2174 bool new_integer_type;
2175 /* Always create access replacements that cover the whole access.
2176 For integral types this means the precision has to match.
2177 Avoid assumptions based on the integral type kind, too. */
2178 if (INTEGRAL_TYPE_P (root->type)
2179 && (TREE_CODE (root->type) != INTEGER_TYPE
2180 || TYPE_PRECISION (root->type) != root->size)
2181 /* But leave bitfield accesses alone. */
2182 && (root->offset % BITS_PER_UNIT) == 0)
2183 {
2184 tree rt = root->type;
2185 root->type = build_nonstandard_integer_type (root->size,
2186 TYPE_UNSIGNED (rt));
2187 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2188 root->base, root->offset,
2189 root->type, NULL, false);
2190 new_integer_type = true;
2191 }
2192 else
2193 new_integer_type = false;
2194
2195 if (dump_file && (dump_flags & TDF_DETAILS))
2196 {
2197 fprintf (dump_file, "Marking ");
2198 print_generic_expr (dump_file, root->base, 0);
2199 fprintf (dump_file, " offset: %u, size: %u ",
2200 (unsigned) root->offset, (unsigned) root->size);
2201 fprintf (dump_file, " to be replaced%s.\n",
2202 new_integer_type ? " with an integer": "");
2203 }
2204
2205 root->grp_to_be_replaced = 1;
2206 sth_created = true;
2207 hole = false;
2208 }
2209 else
2210 {
2211 if (covered_to < limit)
2212 hole = true;
2213 if (scalar)
2214 root->grp_total_scalarization = 0;
2215 }
2216
2217 if (sth_created
2218 && (!hole || root->grp_total_scalarization))
2219 {
2220 root->grp_covered = 1;
2221 return true;
2222 }
2223 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2224 root->grp_unscalarized_data = 1; /* not covered and written to */
2225 if (sth_created)
2226 return true;
2227 return false;
2228 }
2229
2230 /* Analyze all access trees linked by next_grp by the means of
2231 analyze_access_subtree. */
2232 static bool
2233 analyze_access_trees (struct access *access)
2234 {
2235 bool ret = false;
2236
2237 while (access)
2238 {
2239 if (analyze_access_subtree (access, NULL, true))
2240 ret = true;
2241 access = access->next_grp;
2242 }
2243
2244 return ret;
2245 }
2246
2247 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2248 SIZE would conflict with an already existing one. If exactly such a child
2249 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2250
2251 static bool
2252 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2253 HOST_WIDE_INT size, struct access **exact_match)
2254 {
2255 struct access *child;
2256
2257 for (child = lacc->first_child; child; child = child->next_sibling)
2258 {
2259 if (child->offset == norm_offset && child->size == size)
2260 {
2261 *exact_match = child;
2262 return true;
2263 }
2264
2265 if (child->offset < norm_offset + size
2266 && child->offset + child->size > norm_offset)
2267 return true;
2268 }
2269
2270 return false;
2271 }
2272
2273 /* Create a new child access of PARENT, with all properties just like MODEL
2274 except for its offset and with its grp_write false and grp_read true.
2275 Return the new access or NULL if it cannot be created. Note that this access
2276 is created long after all splicing and sorting, it's not located in any
2277 access vector and is automatically a representative of its group. */
2278
2279 static struct access *
2280 create_artificial_child_access (struct access *parent, struct access *model,
2281 HOST_WIDE_INT new_offset)
2282 {
2283 struct access *access;
2284 struct access **child;
2285 tree expr = parent->base;
2286
2287 gcc_assert (!model->grp_unscalarizable_region);
2288
2289 access = (struct access *) pool_alloc (access_pool);
2290 memset (access, 0, sizeof (struct access));
2291 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2292 model->type))
2293 {
2294 access->grp_no_warning = true;
2295 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2296 new_offset, model, NULL, false);
2297 }
2298
2299 access->base = parent->base;
2300 access->expr = expr;
2301 access->offset = new_offset;
2302 access->size = model->size;
2303 access->type = model->type;
2304 access->grp_write = true;
2305 access->grp_read = false;
2306
2307 child = &parent->first_child;
2308 while (*child && (*child)->offset < new_offset)
2309 child = &(*child)->next_sibling;
2310
2311 access->next_sibling = *child;
2312 *child = access;
2313
2314 return access;
2315 }
2316
2317
2318 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2319 true if any new subaccess was created. Additionally, if RACC is a scalar
2320 access but LACC is not, change the type of the latter, if possible. */
2321
2322 static bool
2323 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2324 {
2325 struct access *rchild;
2326 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2327 bool ret = false;
2328
2329 if (is_gimple_reg_type (lacc->type)
2330 || lacc->grp_unscalarizable_region
2331 || racc->grp_unscalarizable_region)
2332 return false;
2333
2334 if (is_gimple_reg_type (racc->type))
2335 {
2336 if (!lacc->first_child && !racc->first_child)
2337 {
2338 tree t = lacc->base;
2339
2340 lacc->type = racc->type;
2341 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2342 lacc->offset, racc->type))
2343 lacc->expr = t;
2344 else
2345 {
2346 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2347 lacc->base, lacc->offset,
2348 racc, NULL, false);
2349 lacc->grp_no_warning = true;
2350 }
2351 }
2352 return false;
2353 }
2354
2355 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2356 {
2357 struct access *new_acc = NULL;
2358 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2359
2360 if (rchild->grp_unscalarizable_region)
2361 continue;
2362
2363 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2364 &new_acc))
2365 {
2366 if (new_acc)
2367 {
2368 rchild->grp_hint = 1;
2369 new_acc->grp_hint |= new_acc->grp_read;
2370 if (rchild->first_child)
2371 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2372 }
2373 continue;
2374 }
2375
2376 rchild->grp_hint = 1;
2377 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2378 if (new_acc)
2379 {
2380 ret = true;
2381 if (racc->first_child)
2382 propagate_subaccesses_across_link (new_acc, rchild);
2383 }
2384 }
2385
2386 return ret;
2387 }
2388
2389 /* Propagate all subaccesses across assignment links. */
2390
2391 static void
2392 propagate_all_subaccesses (void)
2393 {
2394 while (work_queue_head)
2395 {
2396 struct access *racc = pop_access_from_work_queue ();
2397 struct assign_link *link;
2398
2399 gcc_assert (racc->first_link);
2400
2401 for (link = racc->first_link; link; link = link->next)
2402 {
2403 struct access *lacc = link->lacc;
2404
2405 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2406 continue;
2407 lacc = lacc->group_representative;
2408 if (propagate_subaccesses_across_link (lacc, racc)
2409 && lacc->first_link)
2410 add_access_to_work_queue (lacc);
2411 }
2412 }
2413 }
2414
2415 /* Go through all accesses collected throughout the (intraprocedural) analysis
2416 stage, exclude overlapping ones, identify representatives and build trees
2417 out of them, making decisions about scalarization on the way. Return true
2418 iff there are any to-be-scalarized variables after this stage. */
2419
2420 static bool
2421 analyze_all_variable_accesses (void)
2422 {
2423 int res = 0;
2424 bitmap tmp = BITMAP_ALLOC (NULL);
2425 bitmap_iterator bi;
2426 unsigned i, max_total_scalarization_size;
2427
2428 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2429 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2430
2431 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2432 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2433 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2434 {
2435 tree var = referenced_var (i);
2436
2437 if (TREE_CODE (var) == VAR_DECL
2438 && type_consists_of_records_p (TREE_TYPE (var)))
2439 {
2440 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2441 <= max_total_scalarization_size)
2442 {
2443 completely_scalarize_var (var);
2444 if (dump_file && (dump_flags & TDF_DETAILS))
2445 {
2446 fprintf (dump_file, "Will attempt to totally scalarize ");
2447 print_generic_expr (dump_file, var, 0);
2448 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2449 }
2450 }
2451 else if (dump_file && (dump_flags & TDF_DETAILS))
2452 {
2453 fprintf (dump_file, "Too big to totally scalarize: ");
2454 print_generic_expr (dump_file, var, 0);
2455 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2456 }
2457 }
2458 }
2459
2460 bitmap_copy (tmp, candidate_bitmap);
2461 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2462 {
2463 tree var = referenced_var (i);
2464 struct access *access;
2465
2466 access = sort_and_splice_var_accesses (var);
2467 if (!access || !build_access_trees (access))
2468 disqualify_candidate (var,
2469 "No or inhibitingly overlapping accesses.");
2470 }
2471
2472 propagate_all_subaccesses ();
2473
2474 bitmap_copy (tmp, candidate_bitmap);
2475 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2476 {
2477 tree var = referenced_var (i);
2478 struct access *access = get_first_repr_for_decl (var);
2479
2480 if (analyze_access_trees (access))
2481 {
2482 res++;
2483 if (dump_file && (dump_flags & TDF_DETAILS))
2484 {
2485 fprintf (dump_file, "\nAccess trees for ");
2486 print_generic_expr (dump_file, var, 0);
2487 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2488 dump_access_tree (dump_file, access);
2489 fprintf (dump_file, "\n");
2490 }
2491 }
2492 else
2493 disqualify_candidate (var, "No scalar replacements to be created.");
2494 }
2495
2496 BITMAP_FREE (tmp);
2497
2498 if (res)
2499 {
2500 statistics_counter_event (cfun, "Scalarized aggregates", res);
2501 return true;
2502 }
2503 else
2504 return false;
2505 }
2506
2507 /* Generate statements copying scalar replacements of accesses within a subtree
2508 into or out of AGG. ACCESS, all its children, siblings and their children
2509 are to be processed. AGG is an aggregate type expression (can be a
2510 declaration but does not have to be, it can for example also be a mem_ref or
2511 a series of handled components). TOP_OFFSET is the offset of the processed
2512 subtree which has to be subtracted from offsets of individual accesses to
2513 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2514 replacements in the interval <start_offset, start_offset + chunk_size>,
2515 otherwise copy all. GSI is a statement iterator used to place the new
2516 statements. WRITE should be true when the statements should write from AGG
2517 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2518 statements will be added after the current statement in GSI, they will be
2519 added before the statement otherwise. */
2520
2521 static void
2522 generate_subtree_copies (struct access *access, tree agg,
2523 HOST_WIDE_INT top_offset,
2524 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2525 gimple_stmt_iterator *gsi, bool write,
2526 bool insert_after, location_t loc)
2527 {
2528 do
2529 {
2530 if (chunk_size && access->offset >= start_offset + chunk_size)
2531 return;
2532
2533 if (access->grp_to_be_replaced
2534 && (chunk_size == 0
2535 || access->offset + access->size > start_offset))
2536 {
2537 tree expr, repl = get_access_replacement (access);
2538 gimple stmt;
2539
2540 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2541 access, gsi, insert_after);
2542
2543 if (write)
2544 {
2545 if (access->grp_partial_lhs)
2546 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2547 !insert_after,
2548 insert_after ? GSI_NEW_STMT
2549 : GSI_SAME_STMT);
2550 stmt = gimple_build_assign (repl, expr);
2551 }
2552 else
2553 {
2554 TREE_NO_WARNING (repl) = 1;
2555 if (access->grp_partial_lhs)
2556 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2557 !insert_after,
2558 insert_after ? GSI_NEW_STMT
2559 : GSI_SAME_STMT);
2560 stmt = gimple_build_assign (expr, repl);
2561 }
2562 gimple_set_location (stmt, loc);
2563
2564 if (insert_after)
2565 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2566 else
2567 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2568 update_stmt (stmt);
2569 sra_stats.subtree_copies++;
2570 }
2571
2572 if (access->first_child)
2573 generate_subtree_copies (access->first_child, agg, top_offset,
2574 start_offset, chunk_size, gsi,
2575 write, insert_after, loc);
2576
2577 access = access->next_sibling;
2578 }
2579 while (access);
2580 }
2581
2582 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2583 the root of the subtree to be processed. GSI is the statement iterator used
2584 for inserting statements which are added after the current statement if
2585 INSERT_AFTER is true or before it otherwise. */
2586
2587 static void
2588 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2589 bool insert_after, location_t loc)
2590
2591 {
2592 struct access *child;
2593
2594 if (access->grp_to_be_replaced)
2595 {
2596 gimple stmt;
2597
2598 stmt = gimple_build_assign (get_access_replacement (access),
2599 build_zero_cst (access->type));
2600 if (insert_after)
2601 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2602 else
2603 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2604 update_stmt (stmt);
2605 gimple_set_location (stmt, loc);
2606 }
2607
2608 for (child = access->first_child; child; child = child->next_sibling)
2609 init_subtree_with_zero (child, gsi, insert_after, loc);
2610 }
2611
2612 /* Search for an access representative for the given expression EXPR and
2613 return it or NULL if it cannot be found. */
2614
2615 static struct access *
2616 get_access_for_expr (tree expr)
2617 {
2618 HOST_WIDE_INT offset, size, max_size;
2619 tree base;
2620
2621 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2622 a different size than the size of its argument and we need the latter
2623 one. */
2624 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2625 expr = TREE_OPERAND (expr, 0);
2626
2627 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2628 if (max_size == -1 || !DECL_P (base))
2629 return NULL;
2630
2631 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2632 return NULL;
2633
2634 return get_var_base_offset_size_access (base, offset, max_size);
2635 }
2636
2637 /* Replace the expression EXPR with a scalar replacement if there is one and
2638 generate other statements to do type conversion or subtree copying if
2639 necessary. GSI is used to place newly created statements, WRITE is true if
2640 the expression is being written to (it is on a LHS of a statement or output
2641 in an assembly statement). */
2642
2643 static bool
2644 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2645 {
2646 location_t loc;
2647 struct access *access;
2648 tree type, bfr;
2649
2650 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2651 {
2652 bfr = *expr;
2653 expr = &TREE_OPERAND (*expr, 0);
2654 }
2655 else
2656 bfr = NULL_TREE;
2657
2658 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2659 expr = &TREE_OPERAND (*expr, 0);
2660 access = get_access_for_expr (*expr);
2661 if (!access)
2662 return false;
2663 type = TREE_TYPE (*expr);
2664
2665 loc = gimple_location (gsi_stmt (*gsi));
2666 if (access->grp_to_be_replaced)
2667 {
2668 tree repl = get_access_replacement (access);
2669 /* If we replace a non-register typed access simply use the original
2670 access expression to extract the scalar component afterwards.
2671 This happens if scalarizing a function return value or parameter
2672 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2673 gcc.c-torture/compile/20011217-1.c.
2674
2675 We also want to use this when accessing a complex or vector which can
2676 be accessed as a different type too, potentially creating a need for
2677 type conversion (see PR42196) and when scalarized unions are involved
2678 in assembler statements (see PR42398). */
2679 if (!useless_type_conversion_p (type, access->type))
2680 {
2681 tree ref;
2682
2683 ref = build_ref_for_model (loc, access->base, access->offset, access,
2684 NULL, false);
2685
2686 if (write)
2687 {
2688 gimple stmt;
2689
2690 if (access->grp_partial_lhs)
2691 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2692 false, GSI_NEW_STMT);
2693 stmt = gimple_build_assign (repl, ref);
2694 gimple_set_location (stmt, loc);
2695 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2696 }
2697 else
2698 {
2699 gimple stmt;
2700
2701 if (access->grp_partial_lhs)
2702 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2703 true, GSI_SAME_STMT);
2704 stmt = gimple_build_assign (ref, repl);
2705 gimple_set_location (stmt, loc);
2706 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2707 }
2708 }
2709 else
2710 *expr = repl;
2711 sra_stats.exprs++;
2712 }
2713
2714 if (access->first_child)
2715 {
2716 HOST_WIDE_INT start_offset, chunk_size;
2717 if (bfr
2718 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2719 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2720 {
2721 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2722 start_offset = access->offset
2723 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2724 }
2725 else
2726 start_offset = chunk_size = 0;
2727
2728 generate_subtree_copies (access->first_child, access->base, 0,
2729 start_offset, chunk_size, gsi, write, write,
2730 loc);
2731 }
2732 return true;
2733 }
2734
2735 /* Where scalar replacements of the RHS have been written to when a replacement
2736 of a LHS of an assigments cannot be direclty loaded from a replacement of
2737 the RHS. */
2738 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2739 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2740 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2741
2742 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2743 base aggregate if there are unscalarized data or directly to LHS of the
2744 statement that is pointed to by GSI otherwise. */
2745
2746 static enum unscalarized_data_handling
2747 handle_unscalarized_data_in_subtree (struct access *top_racc,
2748 gimple_stmt_iterator *gsi)
2749 {
2750 if (top_racc->grp_unscalarized_data)
2751 {
2752 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2753 gsi, false, false,
2754 gimple_location (gsi_stmt (*gsi)));
2755 return SRA_UDH_RIGHT;
2756 }
2757 else
2758 {
2759 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2760 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2761 0, 0, gsi, false, false,
2762 gimple_location (gsi_stmt (*gsi)));
2763 return SRA_UDH_LEFT;
2764 }
2765 }
2766
2767
2768 /* Try to generate statements to load all sub-replacements in an access subtree
2769 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2770 If that is not possible, refresh the TOP_RACC base aggregate and load the
2771 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2772 copied. NEW_GSI is stmt iterator used for statement insertions after the
2773 original assignment, OLD_GSI is used to insert statements before the
2774 assignment. *REFRESHED keeps the information whether we have needed to
2775 refresh replacements of the LHS and from which side of the assignments this
2776 takes place. */
2777
2778 static void
2779 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2780 HOST_WIDE_INT left_offset,
2781 gimple_stmt_iterator *old_gsi,
2782 gimple_stmt_iterator *new_gsi,
2783 enum unscalarized_data_handling *refreshed)
2784 {
2785 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2786 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2787 {
2788 if (lacc->grp_to_be_replaced)
2789 {
2790 struct access *racc;
2791 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2792 gimple stmt;
2793 tree rhs;
2794
2795 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2796 if (racc && racc->grp_to_be_replaced)
2797 {
2798 rhs = get_access_replacement (racc);
2799 if (!useless_type_conversion_p (lacc->type, racc->type))
2800 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2801
2802 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2803 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2804 true, GSI_SAME_STMT);
2805 }
2806 else
2807 {
2808 /* No suitable access on the right hand side, need to load from
2809 the aggregate. See if we have to update it first... */
2810 if (*refreshed == SRA_UDH_NONE)
2811 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2812 old_gsi);
2813
2814 if (*refreshed == SRA_UDH_LEFT)
2815 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2816 new_gsi, true);
2817 else
2818 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2819 new_gsi, true);
2820 if (lacc->grp_partial_lhs)
2821 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2822 false, GSI_NEW_STMT);
2823 }
2824
2825 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2826 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2827 gimple_set_location (stmt, loc);
2828 update_stmt (stmt);
2829 sra_stats.subreplacements++;
2830 }
2831 else if (*refreshed == SRA_UDH_NONE
2832 && lacc->grp_read && !lacc->grp_covered)
2833 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2834 old_gsi);
2835
2836 if (lacc->first_child)
2837 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2838 old_gsi, new_gsi, refreshed);
2839 }
2840 }
2841
2842 /* Result code for SRA assignment modification. */
2843 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2844 SRA_AM_MODIFIED, /* stmt changed but not
2845 removed */
2846 SRA_AM_REMOVED }; /* stmt eliminated */
2847
2848 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2849 to the assignment and GSI is the statement iterator pointing at it. Returns
2850 the same values as sra_modify_assign. */
2851
2852 static enum assignment_mod_result
2853 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2854 {
2855 tree lhs = gimple_assign_lhs (*stmt);
2856 struct access *acc;
2857 location_t loc;
2858
2859 acc = get_access_for_expr (lhs);
2860 if (!acc)
2861 return SRA_AM_NONE;
2862
2863 if (gimple_clobber_p (*stmt))
2864 {
2865 /* Remove clobbers of fully scalarized variables, otherwise
2866 do nothing. */
2867 if (acc->grp_covered)
2868 {
2869 unlink_stmt_vdef (*stmt);
2870 gsi_remove (gsi, true);
2871 return SRA_AM_REMOVED;
2872 }
2873 else
2874 return SRA_AM_NONE;
2875 }
2876
2877 loc = gimple_location (*stmt);
2878 if (VEC_length (constructor_elt,
2879 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2880 {
2881 /* I have never seen this code path trigger but if it can happen the
2882 following should handle it gracefully. */
2883 if (access_has_children_p (acc))
2884 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2885 true, true, loc);
2886 return SRA_AM_MODIFIED;
2887 }
2888
2889 if (acc->grp_covered)
2890 {
2891 init_subtree_with_zero (acc, gsi, false, loc);
2892 unlink_stmt_vdef (*stmt);
2893 gsi_remove (gsi, true);
2894 return SRA_AM_REMOVED;
2895 }
2896 else
2897 {
2898 init_subtree_with_zero (acc, gsi, true, loc);
2899 return SRA_AM_MODIFIED;
2900 }
2901 }
2902
2903 /* Create and return a new suitable default definition SSA_NAME for RACC which
2904 is an access describing an uninitialized part of an aggregate that is being
2905 loaded. */
2906
2907 static tree
2908 get_repl_default_def_ssa_name (struct access *racc)
2909 {
2910 tree repl, decl;
2911
2912 decl = get_unrenamed_access_replacement (racc);
2913
2914 repl = gimple_default_def (cfun, decl);
2915 if (!repl)
2916 {
2917 repl = make_ssa_name (decl, gimple_build_nop ());
2918 set_default_def (decl, repl);
2919 }
2920
2921 return repl;
2922 }
2923
2924 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2925 somewhere in it. */
2926
2927 static inline bool
2928 contains_bitfld_comp_ref_p (const_tree ref)
2929 {
2930 while (handled_component_p (ref))
2931 {
2932 if (TREE_CODE (ref) == COMPONENT_REF
2933 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2934 return true;
2935 ref = TREE_OPERAND (ref, 0);
2936 }
2937
2938 return false;
2939 }
2940
2941 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2942 bit-field field declaration somewhere in it. */
2943
2944 static inline bool
2945 contains_vce_or_bfcref_p (const_tree ref)
2946 {
2947 while (handled_component_p (ref))
2948 {
2949 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2950 || (TREE_CODE (ref) == COMPONENT_REF
2951 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2952 return true;
2953 ref = TREE_OPERAND (ref, 0);
2954 }
2955
2956 return false;
2957 }
2958
2959 /* Examine both sides of the assignment statement pointed to by STMT, replace
2960 them with a scalare replacement if there is one and generate copying of
2961 replacements if scalarized aggregates have been used in the assignment. GSI
2962 is used to hold generated statements for type conversions and subtree
2963 copying. */
2964
2965 static enum assignment_mod_result
2966 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2967 {
2968 struct access *lacc, *racc;
2969 tree lhs, rhs;
2970 bool modify_this_stmt = false;
2971 bool force_gimple_rhs = false;
2972 location_t loc;
2973 gimple_stmt_iterator orig_gsi = *gsi;
2974
2975 if (!gimple_assign_single_p (*stmt))
2976 return SRA_AM_NONE;
2977 lhs = gimple_assign_lhs (*stmt);
2978 rhs = gimple_assign_rhs1 (*stmt);
2979
2980 if (TREE_CODE (rhs) == CONSTRUCTOR)
2981 return sra_modify_constructor_assign (stmt, gsi);
2982
2983 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2984 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2985 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2986 {
2987 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2988 gsi, false);
2989 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2990 gsi, true);
2991 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2992 }
2993
2994 lacc = get_access_for_expr (lhs);
2995 racc = get_access_for_expr (rhs);
2996 if (!lacc && !racc)
2997 return SRA_AM_NONE;
2998
2999 loc = gimple_location (*stmt);
3000 if (lacc && lacc->grp_to_be_replaced)
3001 {
3002 lhs = get_access_replacement (lacc);
3003 gimple_assign_set_lhs (*stmt, lhs);
3004 modify_this_stmt = true;
3005 if (lacc->grp_partial_lhs)
3006 force_gimple_rhs = true;
3007 sra_stats.exprs++;
3008 }
3009
3010 if (racc && racc->grp_to_be_replaced)
3011 {
3012 rhs = get_access_replacement (racc);
3013 modify_this_stmt = true;
3014 if (racc->grp_partial_lhs)
3015 force_gimple_rhs = true;
3016 sra_stats.exprs++;
3017 }
3018 else if (racc
3019 && !racc->grp_unscalarized_data
3020 && TREE_CODE (lhs) == SSA_NAME
3021 && !access_has_replacements_p (racc))
3022 {
3023 rhs = get_repl_default_def_ssa_name (racc);
3024 modify_this_stmt = true;
3025 sra_stats.exprs++;
3026 }
3027
3028 if (modify_this_stmt)
3029 {
3030 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3031 {
3032 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3033 ??? This should move to fold_stmt which we simply should
3034 call after building a VIEW_CONVERT_EXPR here. */
3035 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3036 && !contains_bitfld_comp_ref_p (lhs)
3037 && !access_has_children_p (lacc))
3038 {
3039 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3040 gimple_assign_set_lhs (*stmt, lhs);
3041 }
3042 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3043 && !contains_vce_or_bfcref_p (rhs)
3044 && !access_has_children_p (racc))
3045 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3046
3047 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3048 {
3049 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3050 rhs);
3051 if (is_gimple_reg_type (TREE_TYPE (lhs))
3052 && TREE_CODE (lhs) != SSA_NAME)
3053 force_gimple_rhs = true;
3054 }
3055 }
3056 }
3057
3058 /* From this point on, the function deals with assignments in between
3059 aggregates when at least one has scalar reductions of some of its
3060 components. There are three possible scenarios: Both the LHS and RHS have
3061 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3062
3063 In the first case, we would like to load the LHS components from RHS
3064 components whenever possible. If that is not possible, we would like to
3065 read it directly from the RHS (after updating it by storing in it its own
3066 components). If there are some necessary unscalarized data in the LHS,
3067 those will be loaded by the original assignment too. If neither of these
3068 cases happen, the original statement can be removed. Most of this is done
3069 by load_assign_lhs_subreplacements.
3070
3071 In the second case, we would like to store all RHS scalarized components
3072 directly into LHS and if they cover the aggregate completely, remove the
3073 statement too. In the third case, we want the LHS components to be loaded
3074 directly from the RHS (DSE will remove the original statement if it
3075 becomes redundant).
3076
3077 This is a bit complex but manageable when types match and when unions do
3078 not cause confusion in a way that we cannot really load a component of LHS
3079 from the RHS or vice versa (the access representing this level can have
3080 subaccesses that are accessible only through a different union field at a
3081 higher level - different from the one used in the examined expression).
3082 Unions are fun.
3083
3084 Therefore, I specially handle a fourth case, happening when there is a
3085 specific type cast or it is impossible to locate a scalarized subaccess on
3086 the other side of the expression. If that happens, I simply "refresh" the
3087 RHS by storing in it is scalarized components leave the original statement
3088 there to do the copying and then load the scalar replacements of the LHS.
3089 This is what the first branch does. */
3090
3091 if (modify_this_stmt
3092 || gimple_has_volatile_ops (*stmt)
3093 || contains_vce_or_bfcref_p (rhs)
3094 || contains_vce_or_bfcref_p (lhs))
3095 {
3096 if (access_has_children_p (racc))
3097 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3098 gsi, false, false, loc);
3099 if (access_has_children_p (lacc))
3100 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3101 gsi, true, true, loc);
3102 sra_stats.separate_lhs_rhs_handling++;
3103
3104 /* This gimplification must be done after generate_subtree_copies,
3105 lest we insert the subtree copies in the middle of the gimplified
3106 sequence. */
3107 if (force_gimple_rhs)
3108 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3109 true, GSI_SAME_STMT);
3110 if (gimple_assign_rhs1 (*stmt) != rhs)
3111 {
3112 modify_this_stmt = true;
3113 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3114 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3115 }
3116
3117 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3118 }
3119 else
3120 {
3121 if (access_has_children_p (lacc) && access_has_children_p (racc))
3122 {
3123 gimple_stmt_iterator orig_gsi = *gsi;
3124 enum unscalarized_data_handling refreshed;
3125
3126 if (lacc->grp_read && !lacc->grp_covered)
3127 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3128 else
3129 refreshed = SRA_UDH_NONE;
3130
3131 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3132 &orig_gsi, gsi, &refreshed);
3133 if (refreshed != SRA_UDH_RIGHT)
3134 {
3135 gsi_next (gsi);
3136 unlink_stmt_vdef (*stmt);
3137 gsi_remove (&orig_gsi, true);
3138 sra_stats.deleted++;
3139 return SRA_AM_REMOVED;
3140 }
3141 }
3142 else
3143 {
3144 if (access_has_children_p (racc)
3145 && !racc->grp_unscalarized_data)
3146 {
3147 if (dump_file)
3148 {
3149 fprintf (dump_file, "Removing load: ");
3150 print_gimple_stmt (dump_file, *stmt, 0, 0);
3151 }
3152 generate_subtree_copies (racc->first_child, lhs,
3153 racc->offset, 0, 0, gsi,
3154 false, false, loc);
3155 gcc_assert (*stmt == gsi_stmt (*gsi));
3156 unlink_stmt_vdef (*stmt);
3157 gsi_remove (gsi, true);
3158 sra_stats.deleted++;
3159 return SRA_AM_REMOVED;
3160 }
3161 /* Restore the aggregate RHS from its components so the
3162 prevailing aggregate copy does the right thing. */
3163 if (access_has_children_p (racc))
3164 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3165 gsi, false, false, loc);
3166 /* Re-load the components of the aggregate copy destination.
3167 But use the RHS aggregate to load from to expose more
3168 optimization opportunities. */
3169 if (access_has_children_p (lacc))
3170 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3171 0, 0, gsi, true, true, loc);
3172 }
3173
3174 return SRA_AM_NONE;
3175 }
3176 }
3177
3178 /* Traverse the function body and all modifications as decided in
3179 analyze_all_variable_accesses. Return true iff the CFG has been
3180 changed. */
3181
3182 static bool
3183 sra_modify_function_body (void)
3184 {
3185 bool cfg_changed = false;
3186 basic_block bb;
3187
3188 FOR_EACH_BB (bb)
3189 {
3190 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3191 while (!gsi_end_p (gsi))
3192 {
3193 gimple stmt = gsi_stmt (gsi);
3194 enum assignment_mod_result assign_result;
3195 bool modified = false, deleted = false;
3196 tree *t;
3197 unsigned i;
3198
3199 switch (gimple_code (stmt))
3200 {
3201 case GIMPLE_RETURN:
3202 t = gimple_return_retval_ptr (stmt);
3203 if (*t != NULL_TREE)
3204 modified |= sra_modify_expr (t, &gsi, false);
3205 break;
3206
3207 case GIMPLE_ASSIGN:
3208 assign_result = sra_modify_assign (&stmt, &gsi);
3209 modified |= assign_result == SRA_AM_MODIFIED;
3210 deleted = assign_result == SRA_AM_REMOVED;
3211 break;
3212
3213 case GIMPLE_CALL:
3214 /* Operands must be processed before the lhs. */
3215 for (i = 0; i < gimple_call_num_args (stmt); i++)
3216 {
3217 t = gimple_call_arg_ptr (stmt, i);
3218 modified |= sra_modify_expr (t, &gsi, false);
3219 }
3220
3221 if (gimple_call_lhs (stmt))
3222 {
3223 t = gimple_call_lhs_ptr (stmt);
3224 modified |= sra_modify_expr (t, &gsi, true);
3225 }
3226 break;
3227
3228 case GIMPLE_ASM:
3229 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3230 {
3231 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3232 modified |= sra_modify_expr (t, &gsi, false);
3233 }
3234 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3235 {
3236 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3237 modified |= sra_modify_expr (t, &gsi, true);
3238 }
3239 break;
3240
3241 default:
3242 break;
3243 }
3244
3245 if (modified)
3246 {
3247 update_stmt (stmt);
3248 if (maybe_clean_eh_stmt (stmt)
3249 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3250 cfg_changed = true;
3251 }
3252 if (!deleted)
3253 gsi_next (&gsi);
3254 }
3255 }
3256
3257 return cfg_changed;
3258 }
3259
3260 /* Generate statements initializing scalar replacements of parts of function
3261 parameters. */
3262
3263 static void
3264 initialize_parameter_reductions (void)
3265 {
3266 gimple_stmt_iterator gsi;
3267 gimple_seq seq = NULL;
3268 tree parm;
3269
3270 for (parm = DECL_ARGUMENTS (current_function_decl);
3271 parm;
3272 parm = DECL_CHAIN (parm))
3273 {
3274 VEC (access_p, heap) *access_vec;
3275 struct access *access;
3276
3277 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3278 continue;
3279 access_vec = get_base_access_vector (parm);
3280 if (!access_vec)
3281 continue;
3282
3283 if (!seq)
3284 {
3285 seq = gimple_seq_alloc ();
3286 gsi = gsi_start (seq);
3287 }
3288
3289 for (access = VEC_index (access_p, access_vec, 0);
3290 access;
3291 access = access->next_grp)
3292 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3293 EXPR_LOCATION (parm));
3294 }
3295
3296 if (seq)
3297 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3298 }
3299
3300 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3301 it reveals there are components of some aggregates to be scalarized, it runs
3302 the required transformations. */
3303 static unsigned int
3304 perform_intra_sra (void)
3305 {
3306 int ret = 0;
3307 sra_initialize ();
3308
3309 if (!find_var_candidates ())
3310 goto out;
3311
3312 if (!scan_function ())
3313 goto out;
3314
3315 if (!analyze_all_variable_accesses ())
3316 goto out;
3317
3318 if (sra_modify_function_body ())
3319 ret = TODO_update_ssa | TODO_cleanup_cfg;
3320 else
3321 ret = TODO_update_ssa;
3322 initialize_parameter_reductions ();
3323
3324 statistics_counter_event (cfun, "Scalar replacements created",
3325 sra_stats.replacements);
3326 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3327 statistics_counter_event (cfun, "Subtree copy stmts",
3328 sra_stats.subtree_copies);
3329 statistics_counter_event (cfun, "Subreplacement stmts",
3330 sra_stats.subreplacements);
3331 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3332 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3333 sra_stats.separate_lhs_rhs_handling);
3334
3335 out:
3336 sra_deinitialize ();
3337 return ret;
3338 }
3339
3340 /* Perform early intraprocedural SRA. */
3341 static unsigned int
3342 early_intra_sra (void)
3343 {
3344 sra_mode = SRA_MODE_EARLY_INTRA;
3345 return perform_intra_sra ();
3346 }
3347
3348 /* Perform "late" intraprocedural SRA. */
3349 static unsigned int
3350 late_intra_sra (void)
3351 {
3352 sra_mode = SRA_MODE_INTRA;
3353 return perform_intra_sra ();
3354 }
3355
3356
3357 static bool
3358 gate_intra_sra (void)
3359 {
3360 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3361 }
3362
3363
3364 struct gimple_opt_pass pass_sra_early =
3365 {
3366 {
3367 GIMPLE_PASS,
3368 "esra", /* name */
3369 gate_intra_sra, /* gate */
3370 early_intra_sra, /* execute */
3371 NULL, /* sub */
3372 NULL, /* next */
3373 0, /* static_pass_number */
3374 TV_TREE_SRA, /* tv_id */
3375 PROP_cfg | PROP_ssa, /* properties_required */
3376 0, /* properties_provided */
3377 0, /* properties_destroyed */
3378 0, /* todo_flags_start */
3379 TODO_update_ssa
3380 | TODO_ggc_collect
3381 | TODO_verify_ssa /* todo_flags_finish */
3382 }
3383 };
3384
3385 struct gimple_opt_pass pass_sra =
3386 {
3387 {
3388 GIMPLE_PASS,
3389 "sra", /* name */
3390 gate_intra_sra, /* gate */
3391 late_intra_sra, /* execute */
3392 NULL, /* sub */
3393 NULL, /* next */
3394 0, /* static_pass_number */
3395 TV_TREE_SRA, /* tv_id */
3396 PROP_cfg | PROP_ssa, /* properties_required */
3397 0, /* properties_provided */
3398 0, /* properties_destroyed */
3399 TODO_update_address_taken, /* todo_flags_start */
3400 TODO_update_ssa
3401 | TODO_ggc_collect
3402 | TODO_verify_ssa /* todo_flags_finish */
3403 }
3404 };
3405
3406
3407 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3408 parameter. */
3409
3410 static bool
3411 is_unused_scalar_param (tree parm)
3412 {
3413 tree name;
3414 return (is_gimple_reg (parm)
3415 && (!(name = gimple_default_def (cfun, parm))
3416 || has_zero_uses (name)));
3417 }
3418
3419 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3420 examine whether there are any direct or otherwise infeasible ones. If so,
3421 return true, otherwise return false. PARM must be a gimple register with a
3422 non-NULL default definition. */
3423
3424 static bool
3425 ptr_parm_has_direct_uses (tree parm)
3426 {
3427 imm_use_iterator ui;
3428 gimple stmt;
3429 tree name = gimple_default_def (cfun, parm);
3430 bool ret = false;
3431
3432 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3433 {
3434 int uses_ok = 0;
3435 use_operand_p use_p;
3436
3437 if (is_gimple_debug (stmt))
3438 continue;
3439
3440 /* Valid uses include dereferences on the lhs and the rhs. */
3441 if (gimple_has_lhs (stmt))
3442 {
3443 tree lhs = gimple_get_lhs (stmt);
3444 while (handled_component_p (lhs))
3445 lhs = TREE_OPERAND (lhs, 0);
3446 if (TREE_CODE (lhs) == MEM_REF
3447 && TREE_OPERAND (lhs, 0) == name
3448 && integer_zerop (TREE_OPERAND (lhs, 1))
3449 && types_compatible_p (TREE_TYPE (lhs),
3450 TREE_TYPE (TREE_TYPE (name)))
3451 && !TREE_THIS_VOLATILE (lhs))
3452 uses_ok++;
3453 }
3454 if (gimple_assign_single_p (stmt))
3455 {
3456 tree rhs = gimple_assign_rhs1 (stmt);
3457 while (handled_component_p (rhs))
3458 rhs = TREE_OPERAND (rhs, 0);
3459 if (TREE_CODE (rhs) == MEM_REF
3460 && TREE_OPERAND (rhs, 0) == name
3461 && integer_zerop (TREE_OPERAND (rhs, 1))
3462 && types_compatible_p (TREE_TYPE (rhs),
3463 TREE_TYPE (TREE_TYPE (name)))
3464 && !TREE_THIS_VOLATILE (rhs))
3465 uses_ok++;
3466 }
3467 else if (is_gimple_call (stmt))
3468 {
3469 unsigned i;
3470 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3471 {
3472 tree arg = gimple_call_arg (stmt, i);
3473 while (handled_component_p (arg))
3474 arg = TREE_OPERAND (arg, 0);
3475 if (TREE_CODE (arg) == MEM_REF
3476 && TREE_OPERAND (arg, 0) == name
3477 && integer_zerop (TREE_OPERAND (arg, 1))
3478 && types_compatible_p (TREE_TYPE (arg),
3479 TREE_TYPE (TREE_TYPE (name)))
3480 && !TREE_THIS_VOLATILE (arg))
3481 uses_ok++;
3482 }
3483 }
3484
3485 /* If the number of valid uses does not match the number of
3486 uses in this stmt there is an unhandled use. */
3487 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3488 --uses_ok;
3489
3490 if (uses_ok != 0)
3491 ret = true;
3492
3493 if (ret)
3494 BREAK_FROM_IMM_USE_STMT (ui);
3495 }
3496
3497 return ret;
3498 }
3499
3500 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3501 them in candidate_bitmap. Note that these do not necessarily include
3502 parameter which are unused and thus can be removed. Return true iff any
3503 such candidate has been found. */
3504
3505 static bool
3506 find_param_candidates (void)
3507 {
3508 tree parm;
3509 int count = 0;
3510 bool ret = false;
3511 const char *msg;
3512
3513 for (parm = DECL_ARGUMENTS (current_function_decl);
3514 parm;
3515 parm = DECL_CHAIN (parm))
3516 {
3517 tree type = TREE_TYPE (parm);
3518
3519 count++;
3520
3521 if (TREE_THIS_VOLATILE (parm)
3522 || TREE_ADDRESSABLE (parm)
3523 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3524 continue;
3525
3526 if (is_unused_scalar_param (parm))
3527 {
3528 ret = true;
3529 continue;
3530 }
3531
3532 if (POINTER_TYPE_P (type))
3533 {
3534 type = TREE_TYPE (type);
3535
3536 if (TREE_CODE (type) == FUNCTION_TYPE
3537 || TYPE_VOLATILE (type)
3538 || (TREE_CODE (type) == ARRAY_TYPE
3539 && TYPE_NONALIASED_COMPONENT (type))
3540 || !is_gimple_reg (parm)
3541 || is_va_list_type (type)
3542 || ptr_parm_has_direct_uses (parm))
3543 continue;
3544 }
3545 else if (!AGGREGATE_TYPE_P (type))
3546 continue;
3547
3548 if (!COMPLETE_TYPE_P (type)
3549 || !host_integerp (TYPE_SIZE (type), 1)
3550 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3551 || (AGGREGATE_TYPE_P (type)
3552 && type_internals_preclude_sra_p (type, &msg)))
3553 continue;
3554
3555 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3556 ret = true;
3557 if (dump_file && (dump_flags & TDF_DETAILS))
3558 {
3559 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3560 print_generic_expr (dump_file, parm, 0);
3561 fprintf (dump_file, "\n");
3562 }
3563 }
3564
3565 func_param_count = count;
3566 return ret;
3567 }
3568
3569 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3570 maybe_modified. */
3571
3572 static bool
3573 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3574 void *data)
3575 {
3576 struct access *repr = (struct access *) data;
3577
3578 repr->grp_maybe_modified = 1;
3579 return true;
3580 }
3581
3582 /* Analyze what representatives (in linked lists accessible from
3583 REPRESENTATIVES) can be modified by side effects of statements in the
3584 current function. */
3585
3586 static void
3587 analyze_modified_params (VEC (access_p, heap) *representatives)
3588 {
3589 int i;
3590
3591 for (i = 0; i < func_param_count; i++)
3592 {
3593 struct access *repr;
3594
3595 for (repr = VEC_index (access_p, representatives, i);
3596 repr;
3597 repr = repr->next_grp)
3598 {
3599 struct access *access;
3600 bitmap visited;
3601 ao_ref ar;
3602
3603 if (no_accesses_p (repr))
3604 continue;
3605 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3606 || repr->grp_maybe_modified)
3607 continue;
3608
3609 ao_ref_init (&ar, repr->expr);
3610 visited = BITMAP_ALLOC (NULL);
3611 for (access = repr; access; access = access->next_sibling)
3612 {
3613 /* All accesses are read ones, otherwise grp_maybe_modified would
3614 be trivially set. */
3615 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3616 mark_maybe_modified, repr, &visited);
3617 if (repr->grp_maybe_modified)
3618 break;
3619 }
3620 BITMAP_FREE (visited);
3621 }
3622 }
3623 }
3624
3625 /* Propagate distances in bb_dereferences in the opposite direction than the
3626 control flow edges, in each step storing the maximum of the current value
3627 and the minimum of all successors. These steps are repeated until the table
3628 stabilizes. Note that BBs which might terminate the functions (according to
3629 final_bbs bitmap) never updated in this way. */
3630
3631 static void
3632 propagate_dereference_distances (void)
3633 {
3634 VEC (basic_block, heap) *queue;
3635 basic_block bb;
3636
3637 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3638 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3639 FOR_EACH_BB (bb)
3640 {
3641 VEC_quick_push (basic_block, queue, bb);
3642 bb->aux = bb;
3643 }
3644
3645 while (!VEC_empty (basic_block, queue))
3646 {
3647 edge_iterator ei;
3648 edge e;
3649 bool change = false;
3650 int i;
3651
3652 bb = VEC_pop (basic_block, queue);
3653 bb->aux = NULL;
3654
3655 if (bitmap_bit_p (final_bbs, bb->index))
3656 continue;
3657
3658 for (i = 0; i < func_param_count; i++)
3659 {
3660 int idx = bb->index * func_param_count + i;
3661 bool first = true;
3662 HOST_WIDE_INT inh = 0;
3663
3664 FOR_EACH_EDGE (e, ei, bb->succs)
3665 {
3666 int succ_idx = e->dest->index * func_param_count + i;
3667
3668 if (e->src == EXIT_BLOCK_PTR)
3669 continue;
3670
3671 if (first)
3672 {
3673 first = false;
3674 inh = bb_dereferences [succ_idx];
3675 }
3676 else if (bb_dereferences [succ_idx] < inh)
3677 inh = bb_dereferences [succ_idx];
3678 }
3679
3680 if (!first && bb_dereferences[idx] < inh)
3681 {
3682 bb_dereferences[idx] = inh;
3683 change = true;
3684 }
3685 }
3686
3687 if (change && !bitmap_bit_p (final_bbs, bb->index))
3688 FOR_EACH_EDGE (e, ei, bb->preds)
3689 {
3690 if (e->src->aux)
3691 continue;
3692
3693 e->src->aux = e->src;
3694 VEC_quick_push (basic_block, queue, e->src);
3695 }
3696 }
3697
3698 VEC_free (basic_block, heap, queue);
3699 }
3700
3701 /* Dump a dereferences TABLE with heading STR to file F. */
3702
3703 static void
3704 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3705 {
3706 basic_block bb;
3707
3708 fprintf (dump_file, str);
3709 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3710 {
3711 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3712 if (bb != EXIT_BLOCK_PTR)
3713 {
3714 int i;
3715 for (i = 0; i < func_param_count; i++)
3716 {
3717 int idx = bb->index * func_param_count + i;
3718 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3719 }
3720 }
3721 fprintf (f, "\n");
3722 }
3723 fprintf (dump_file, "\n");
3724 }
3725
3726 /* Determine what (parts of) parameters passed by reference that are not
3727 assigned to are not certainly dereferenced in this function and thus the
3728 dereferencing cannot be safely moved to the caller without potentially
3729 introducing a segfault. Mark such REPRESENTATIVES as
3730 grp_not_necessarilly_dereferenced.
3731
3732 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3733 part is calculated rather than simple booleans are calculated for each
3734 pointer parameter to handle cases when only a fraction of the whole
3735 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3736 an example).
3737
3738 The maximum dereference distances for each pointer parameter and BB are
3739 already stored in bb_dereference. This routine simply propagates these
3740 values upwards by propagate_dereference_distances and then compares the
3741 distances of individual parameters in the ENTRY BB to the equivalent
3742 distances of each representative of a (fraction of a) parameter. */
3743
3744 static void
3745 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3746 {
3747 int i;
3748
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 dump_dereferences_table (dump_file,
3751 "Dereference table before propagation:\n",
3752 bb_dereferences);
3753
3754 propagate_dereference_distances ();
3755
3756 if (dump_file && (dump_flags & TDF_DETAILS))
3757 dump_dereferences_table (dump_file,
3758 "Dereference table after propagation:\n",
3759 bb_dereferences);
3760
3761 for (i = 0; i < func_param_count; i++)
3762 {
3763 struct access *repr = VEC_index (access_p, representatives, i);
3764 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3765
3766 if (!repr || no_accesses_p (repr))
3767 continue;
3768
3769 do
3770 {
3771 if ((repr->offset + repr->size) > bb_dereferences[idx])
3772 repr->grp_not_necessarilly_dereferenced = 1;
3773 repr = repr->next_grp;
3774 }
3775 while (repr);
3776 }
3777 }
3778
3779 /* Return the representative access for the parameter declaration PARM if it is
3780 a scalar passed by reference which is not written to and the pointer value
3781 is not used directly. Thus, if it is legal to dereference it in the caller
3782 and we can rule out modifications through aliases, such parameter should be
3783 turned into one passed by value. Return NULL otherwise. */
3784
3785 static struct access *
3786 unmodified_by_ref_scalar_representative (tree parm)
3787 {
3788 int i, access_count;
3789 struct access *repr;
3790 VEC (access_p, heap) *access_vec;
3791
3792 access_vec = get_base_access_vector (parm);
3793 gcc_assert (access_vec);
3794 repr = VEC_index (access_p, access_vec, 0);
3795 if (repr->write)
3796 return NULL;
3797 repr->group_representative = repr;
3798
3799 access_count = VEC_length (access_p, access_vec);
3800 for (i = 1; i < access_count; i++)
3801 {
3802 struct access *access = VEC_index (access_p, access_vec, i);
3803 if (access->write)
3804 return NULL;
3805 access->group_representative = repr;
3806 access->next_sibling = repr->next_sibling;
3807 repr->next_sibling = access;
3808 }
3809
3810 repr->grp_read = 1;
3811 repr->grp_scalar_ptr = 1;
3812 return repr;
3813 }
3814
3815 /* Return true iff this access precludes IPA-SRA of the parameter it is
3816 associated with. */
3817
3818 static bool
3819 access_precludes_ipa_sra_p (struct access *access)
3820 {
3821 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3822 is incompatible assign in a call statement (and possibly even in asm
3823 statements). This can be relaxed by using a new temporary but only for
3824 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3825 intraprocedural SRA we deal with this by keeping the old aggregate around,
3826 something we cannot do in IPA-SRA.) */
3827 if (access->write
3828 && (is_gimple_call (access->stmt)
3829 || gimple_code (access->stmt) == GIMPLE_ASM))
3830 return true;
3831
3832 if (STRICT_ALIGNMENT
3833 && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3834 return true;
3835
3836 return false;
3837 }
3838
3839
3840 /* Sort collected accesses for parameter PARM, identify representatives for
3841 each accessed region and link them together. Return NULL if there are
3842 different but overlapping accesses, return the special ptr value meaning
3843 there are no accesses for this parameter if that is the case and return the
3844 first representative otherwise. Set *RO_GRP if there is a group of accesses
3845 with only read (i.e. no write) accesses. */
3846
3847 static struct access *
3848 splice_param_accesses (tree parm, bool *ro_grp)
3849 {
3850 int i, j, access_count, group_count;
3851 int agg_size, total_size = 0;
3852 struct access *access, *res, **prev_acc_ptr = &res;
3853 VEC (access_p, heap) *access_vec;
3854
3855 access_vec = get_base_access_vector (parm);
3856 if (!access_vec)
3857 return &no_accesses_representant;
3858 access_count = VEC_length (access_p, access_vec);
3859
3860 VEC_qsort (access_p, access_vec, compare_access_positions);
3861
3862 i = 0;
3863 total_size = 0;
3864 group_count = 0;
3865 while (i < access_count)
3866 {
3867 bool modification;
3868 tree a1_alias_type;
3869 access = VEC_index (access_p, access_vec, i);
3870 modification = access->write;
3871 if (access_precludes_ipa_sra_p (access))
3872 return NULL;
3873 a1_alias_type = reference_alias_ptr_type (access->expr);
3874
3875 /* Access is about to become group representative unless we find some
3876 nasty overlap which would preclude us from breaking this parameter
3877 apart. */
3878
3879 j = i + 1;
3880 while (j < access_count)
3881 {
3882 struct access *ac2 = VEC_index (access_p, access_vec, j);
3883 if (ac2->offset != access->offset)
3884 {
3885 /* All or nothing law for parameters. */
3886 if (access->offset + access->size > ac2->offset)
3887 return NULL;
3888 else
3889 break;
3890 }
3891 else if (ac2->size != access->size)
3892 return NULL;
3893
3894 if (access_precludes_ipa_sra_p (ac2)
3895 || (ac2->type != access->type
3896 && (TREE_ADDRESSABLE (ac2->type)
3897 || TREE_ADDRESSABLE (access->type)))
3898 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3899 return NULL;
3900
3901 modification |= ac2->write;
3902 ac2->group_representative = access;
3903 ac2->next_sibling = access->next_sibling;
3904 access->next_sibling = ac2;
3905 j++;
3906 }
3907
3908 group_count++;
3909 access->grp_maybe_modified = modification;
3910 if (!modification)
3911 *ro_grp = true;
3912 *prev_acc_ptr = access;
3913 prev_acc_ptr = &access->next_grp;
3914 total_size += access->size;
3915 i = j;
3916 }
3917
3918 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3919 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3920 else
3921 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3922 if (total_size >= agg_size)
3923 return NULL;
3924
3925 gcc_assert (group_count > 0);
3926 return res;
3927 }
3928
3929 /* Decide whether parameters with representative accesses given by REPR should
3930 be reduced into components. */
3931
3932 static int
3933 decide_one_param_reduction (struct access *repr)
3934 {
3935 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3936 bool by_ref;
3937 tree parm;
3938
3939 parm = repr->base;
3940 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3941 gcc_assert (cur_parm_size > 0);
3942
3943 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3944 {
3945 by_ref = true;
3946 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3947 }
3948 else
3949 {
3950 by_ref = false;
3951 agg_size = cur_parm_size;
3952 }
3953
3954 if (dump_file)
3955 {
3956 struct access *acc;
3957 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3958 print_generic_expr (dump_file, parm, 0);
3959 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3960 for (acc = repr; acc; acc = acc->next_grp)
3961 dump_access (dump_file, acc, true);
3962 }
3963
3964 total_size = 0;
3965 new_param_count = 0;
3966
3967 for (; repr; repr = repr->next_grp)
3968 {
3969 gcc_assert (parm == repr->base);
3970
3971 /* Taking the address of a non-addressable field is verboten. */
3972 if (by_ref && repr->non_addressable)
3973 return 0;
3974
3975 /* Do not decompose a non-BLKmode param in a way that would
3976 create BLKmode params. Especially for by-reference passing
3977 (thus, pointer-type param) this is hardly worthwhile. */
3978 if (DECL_MODE (parm) != BLKmode
3979 && TYPE_MODE (repr->type) == BLKmode)
3980 return 0;
3981
3982 if (!by_ref || (!repr->grp_maybe_modified
3983 && !repr->grp_not_necessarilly_dereferenced))
3984 total_size += repr->size;
3985 else
3986 total_size += cur_parm_size;
3987
3988 new_param_count++;
3989 }
3990
3991 gcc_assert (new_param_count > 0);
3992
3993 if (optimize_function_for_size_p (cfun))
3994 parm_size_limit = cur_parm_size;
3995 else
3996 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3997 * cur_parm_size);
3998
3999 if (total_size < agg_size
4000 && total_size <= parm_size_limit)
4001 {
4002 if (dump_file)
4003 fprintf (dump_file, " ....will be split into %i components\n",
4004 new_param_count);
4005 return new_param_count;
4006 }
4007 else
4008 return 0;
4009 }
4010
4011 /* The order of the following enums is important, we need to do extra work for
4012 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4013 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4014 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4015
4016 /* Identify representatives of all accesses to all candidate parameters for
4017 IPA-SRA. Return result based on what representatives have been found. */
4018
4019 static enum ipa_splicing_result
4020 splice_all_param_accesses (VEC (access_p, heap) **representatives)
4021 {
4022 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4023 tree parm;
4024 struct access *repr;
4025
4026 *representatives = VEC_alloc (access_p, heap, func_param_count);
4027
4028 for (parm = DECL_ARGUMENTS (current_function_decl);
4029 parm;
4030 parm = DECL_CHAIN (parm))
4031 {
4032 if (is_unused_scalar_param (parm))
4033 {
4034 VEC_quick_push (access_p, *representatives,
4035 &no_accesses_representant);
4036 if (result == NO_GOOD_ACCESS)
4037 result = UNUSED_PARAMS;
4038 }
4039 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4040 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4041 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4042 {
4043 repr = unmodified_by_ref_scalar_representative (parm);
4044 VEC_quick_push (access_p, *representatives, repr);
4045 if (repr)
4046 result = UNMODIF_BY_REF_ACCESSES;
4047 }
4048 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4049 {
4050 bool ro_grp = false;
4051 repr = splice_param_accesses (parm, &ro_grp);
4052 VEC_quick_push (access_p, *representatives, repr);
4053
4054 if (repr && !no_accesses_p (repr))
4055 {
4056 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4057 {
4058 if (ro_grp)
4059 result = UNMODIF_BY_REF_ACCESSES;
4060 else if (result < MODIF_BY_REF_ACCESSES)
4061 result = MODIF_BY_REF_ACCESSES;
4062 }
4063 else if (result < BY_VAL_ACCESSES)
4064 result = BY_VAL_ACCESSES;
4065 }
4066 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4067 result = UNUSED_PARAMS;
4068 }
4069 else
4070 VEC_quick_push (access_p, *representatives, NULL);
4071 }
4072
4073 if (result == NO_GOOD_ACCESS)
4074 {
4075 VEC_free (access_p, heap, *representatives);
4076 *representatives = NULL;
4077 return NO_GOOD_ACCESS;
4078 }
4079
4080 return result;
4081 }
4082
4083 /* Return the index of BASE in PARMS. Abort if it is not found. */
4084
4085 static inline int
4086 get_param_index (tree base, VEC(tree, heap) *parms)
4087 {
4088 int i, len;
4089
4090 len = VEC_length (tree, parms);
4091 for (i = 0; i < len; i++)
4092 if (VEC_index (tree, parms, i) == base)
4093 return i;
4094 gcc_unreachable ();
4095 }
4096
4097 /* Convert the decisions made at the representative level into compact
4098 parameter adjustments. REPRESENTATIVES are pointers to first
4099 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4100 final number of adjustments. */
4101
4102 static ipa_parm_adjustment_vec
4103 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4104 int adjustments_count)
4105 {
4106 VEC (tree, heap) *parms;
4107 ipa_parm_adjustment_vec adjustments;
4108 tree parm;
4109 int i;
4110
4111 gcc_assert (adjustments_count > 0);
4112 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4113 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4114 parm = DECL_ARGUMENTS (current_function_decl);
4115 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4116 {
4117 struct access *repr = VEC_index (access_p, representatives, i);
4118
4119 if (!repr || no_accesses_p (repr))
4120 {
4121 struct ipa_parm_adjustment *adj;
4122
4123 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4124 memset (adj, 0, sizeof (*adj));
4125 adj->base_index = get_param_index (parm, parms);
4126 adj->base = parm;
4127 if (!repr)
4128 adj->copy_param = 1;
4129 else
4130 adj->remove_param = 1;
4131 }
4132 else
4133 {
4134 struct ipa_parm_adjustment *adj;
4135 int index = get_param_index (parm, parms);
4136
4137 for (; repr; repr = repr->next_grp)
4138 {
4139 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4140 memset (adj, 0, sizeof (*adj));
4141 gcc_assert (repr->base == parm);
4142 adj->base_index = index;
4143 adj->base = repr->base;
4144 adj->type = repr->type;
4145 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4146 adj->offset = repr->offset;
4147 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4148 && (repr->grp_maybe_modified
4149 || repr->grp_not_necessarilly_dereferenced));
4150
4151 }
4152 }
4153 }
4154 VEC_free (tree, heap, parms);
4155 return adjustments;
4156 }
4157
4158 /* Analyze the collected accesses and produce a plan what to do with the
4159 parameters in the form of adjustments, NULL meaning nothing. */
4160
4161 static ipa_parm_adjustment_vec
4162 analyze_all_param_acesses (void)
4163 {
4164 enum ipa_splicing_result repr_state;
4165 bool proceed = false;
4166 int i, adjustments_count = 0;
4167 VEC (access_p, heap) *representatives;
4168 ipa_parm_adjustment_vec adjustments;
4169
4170 repr_state = splice_all_param_accesses (&representatives);
4171 if (repr_state == NO_GOOD_ACCESS)
4172 return NULL;
4173
4174 /* If there are any parameters passed by reference which are not modified
4175 directly, we need to check whether they can be modified indirectly. */
4176 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4177 {
4178 analyze_caller_dereference_legality (representatives);
4179 analyze_modified_params (representatives);
4180 }
4181
4182 for (i = 0; i < func_param_count; i++)
4183 {
4184 struct access *repr = VEC_index (access_p, representatives, i);
4185
4186 if (repr && !no_accesses_p (repr))
4187 {
4188 if (repr->grp_scalar_ptr)
4189 {
4190 adjustments_count++;
4191 if (repr->grp_not_necessarilly_dereferenced
4192 || repr->grp_maybe_modified)
4193 VEC_replace (access_p, representatives, i, NULL);
4194 else
4195 {
4196 proceed = true;
4197 sra_stats.scalar_by_ref_to_by_val++;
4198 }
4199 }
4200 else
4201 {
4202 int new_components = decide_one_param_reduction (repr);
4203
4204 if (new_components == 0)
4205 {
4206 VEC_replace (access_p, representatives, i, NULL);
4207 adjustments_count++;
4208 }
4209 else
4210 {
4211 adjustments_count += new_components;
4212 sra_stats.aggregate_params_reduced++;
4213 sra_stats.param_reductions_created += new_components;
4214 proceed = true;
4215 }
4216 }
4217 }
4218 else
4219 {
4220 if (no_accesses_p (repr))
4221 {
4222 proceed = true;
4223 sra_stats.deleted_unused_parameters++;
4224 }
4225 adjustments_count++;
4226 }
4227 }
4228
4229 if (!proceed && dump_file)
4230 fprintf (dump_file, "NOT proceeding to change params.\n");
4231
4232 if (proceed)
4233 adjustments = turn_representatives_into_adjustments (representatives,
4234 adjustments_count);
4235 else
4236 adjustments = NULL;
4237
4238 VEC_free (access_p, heap, representatives);
4239 return adjustments;
4240 }
4241
4242 /* If a parameter replacement identified by ADJ does not yet exist in the form
4243 of declaration, create it and record it, otherwise return the previously
4244 created one. */
4245
4246 static tree
4247 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4248 {
4249 tree repl;
4250 if (!adj->new_ssa_base)
4251 {
4252 char *pretty_name = make_fancy_name (adj->base);
4253
4254 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4255 DECL_NAME (repl) = get_identifier (pretty_name);
4256 obstack_free (&name_obstack, pretty_name);
4257
4258 add_referenced_var (repl);
4259 adj->new_ssa_base = repl;
4260 }
4261 else
4262 repl = adj->new_ssa_base;
4263 return repl;
4264 }
4265
4266 /* Find the first adjustment for a particular parameter BASE in a vector of
4267 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4268 adjustment. */
4269
4270 static struct ipa_parm_adjustment *
4271 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4272 {
4273 int i, len;
4274
4275 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4276 for (i = 0; i < len; i++)
4277 {
4278 struct ipa_parm_adjustment *adj;
4279
4280 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4281 if (!adj->copy_param && adj->base == base)
4282 return adj;
4283 }
4284
4285 return NULL;
4286 }
4287
4288 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4289 removed because its value is not used, replace the SSA_NAME with a one
4290 relating to a created VAR_DECL together all of its uses and return true.
4291 ADJUSTMENTS is a pointer to an adjustments vector. */
4292
4293 static bool
4294 replace_removed_params_ssa_names (gimple stmt,
4295 ipa_parm_adjustment_vec adjustments)
4296 {
4297 struct ipa_parm_adjustment *adj;
4298 tree lhs, decl, repl, name;
4299
4300 if (gimple_code (stmt) == GIMPLE_PHI)
4301 lhs = gimple_phi_result (stmt);
4302 else if (is_gimple_assign (stmt))
4303 lhs = gimple_assign_lhs (stmt);
4304 else if (is_gimple_call (stmt))
4305 lhs = gimple_call_lhs (stmt);
4306 else
4307 gcc_unreachable ();
4308
4309 if (TREE_CODE (lhs) != SSA_NAME)
4310 return false;
4311 decl = SSA_NAME_VAR (lhs);
4312 if (TREE_CODE (decl) != PARM_DECL)
4313 return false;
4314
4315 adj = get_adjustment_for_base (adjustments, decl);
4316 if (!adj)
4317 return false;
4318
4319 repl = get_replaced_param_substitute (adj);
4320 name = make_ssa_name (repl, stmt);
4321
4322 if (dump_file)
4323 {
4324 fprintf (dump_file, "replacing an SSA name of a removed param ");
4325 print_generic_expr (dump_file, lhs, 0);
4326 fprintf (dump_file, " with ");
4327 print_generic_expr (dump_file, name, 0);
4328 fprintf (dump_file, "\n");
4329 }
4330
4331 if (is_gimple_assign (stmt))
4332 gimple_assign_set_lhs (stmt, name);
4333 else if (is_gimple_call (stmt))
4334 gimple_call_set_lhs (stmt, name);
4335 else
4336 gimple_phi_set_result (stmt, name);
4337
4338 replace_uses_by (lhs, name);
4339 release_ssa_name (lhs);
4340 return true;
4341 }
4342
4343 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4344 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4345 specifies whether the function should care about type incompatibility the
4346 current and new expressions. If it is false, the function will leave
4347 incompatibility issues to the caller. Return true iff the expression
4348 was modified. */
4349
4350 static bool
4351 sra_ipa_modify_expr (tree *expr, bool convert,
4352 ipa_parm_adjustment_vec adjustments)
4353 {
4354 int i, len;
4355 struct ipa_parm_adjustment *adj, *cand = NULL;
4356 HOST_WIDE_INT offset, size, max_size;
4357 tree base, src;
4358
4359 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4360
4361 if (TREE_CODE (*expr) == BIT_FIELD_REF
4362 || TREE_CODE (*expr) == IMAGPART_EXPR
4363 || TREE_CODE (*expr) == REALPART_EXPR)
4364 {
4365 expr = &TREE_OPERAND (*expr, 0);
4366 convert = true;
4367 }
4368
4369 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4370 if (!base || size == -1 || max_size == -1)
4371 return false;
4372
4373 if (TREE_CODE (base) == MEM_REF)
4374 {
4375 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4376 base = TREE_OPERAND (base, 0);
4377 }
4378
4379 base = get_ssa_base_param (base);
4380 if (!base || TREE_CODE (base) != PARM_DECL)
4381 return false;
4382
4383 for (i = 0; i < len; i++)
4384 {
4385 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4386
4387 if (adj->base == base &&
4388 (adj->offset == offset || adj->remove_param))
4389 {
4390 cand = adj;
4391 break;
4392 }
4393 }
4394 if (!cand || cand->copy_param || cand->remove_param)
4395 return false;
4396
4397 if (cand->by_ref)
4398 src = build_simple_mem_ref (cand->reduction);
4399 else
4400 src = cand->reduction;
4401
4402 if (dump_file && (dump_flags & TDF_DETAILS))
4403 {
4404 fprintf (dump_file, "About to replace expr ");
4405 print_generic_expr (dump_file, *expr, 0);
4406 fprintf (dump_file, " with ");
4407 print_generic_expr (dump_file, src, 0);
4408 fprintf (dump_file, "\n");
4409 }
4410
4411 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4412 {
4413 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4414 *expr = vce;
4415 }
4416 else
4417 *expr = src;
4418 return true;
4419 }
4420
4421 /* If the statement pointed to by STMT_PTR contains any expressions that need
4422 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4423 potential type incompatibilities (GSI is used to accommodate conversion
4424 statements and must point to the statement). Return true iff the statement
4425 was modified. */
4426
4427 static bool
4428 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4429 ipa_parm_adjustment_vec adjustments)
4430 {
4431 gimple stmt = *stmt_ptr;
4432 tree *lhs_p, *rhs_p;
4433 bool any;
4434
4435 if (!gimple_assign_single_p (stmt))
4436 return false;
4437
4438 rhs_p = gimple_assign_rhs1_ptr (stmt);
4439 lhs_p = gimple_assign_lhs_ptr (stmt);
4440
4441 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4442 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4443 if (any)
4444 {
4445 tree new_rhs = NULL_TREE;
4446
4447 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4448 {
4449 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4450 {
4451 /* V_C_Es of constructors can cause trouble (PR 42714). */
4452 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4453 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4454 else
4455 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4456 }
4457 else
4458 new_rhs = fold_build1_loc (gimple_location (stmt),
4459 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4460 *rhs_p);
4461 }
4462 else if (REFERENCE_CLASS_P (*rhs_p)
4463 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4464 && !is_gimple_reg (*lhs_p))
4465 /* This can happen when an assignment in between two single field
4466 structures is turned into an assignment in between two pointers to
4467 scalars (PR 42237). */
4468 new_rhs = *rhs_p;
4469
4470 if (new_rhs)
4471 {
4472 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4473 true, GSI_SAME_STMT);
4474
4475 gimple_assign_set_rhs_from_tree (gsi, tmp);
4476 }
4477
4478 return true;
4479 }
4480
4481 return false;
4482 }
4483
4484 /* Traverse the function body and all modifications as described in
4485 ADJUSTMENTS. Return true iff the CFG has been changed. */
4486
4487 static bool
4488 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4489 {
4490 bool cfg_changed = false;
4491 basic_block bb;
4492
4493 FOR_EACH_BB (bb)
4494 {
4495 gimple_stmt_iterator gsi;
4496
4497 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4498 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4499
4500 gsi = gsi_start_bb (bb);
4501 while (!gsi_end_p (gsi))
4502 {
4503 gimple stmt = gsi_stmt (gsi);
4504 bool modified = false;
4505 tree *t;
4506 unsigned i;
4507
4508 switch (gimple_code (stmt))
4509 {
4510 case GIMPLE_RETURN:
4511 t = gimple_return_retval_ptr (stmt);
4512 if (*t != NULL_TREE)
4513 modified |= sra_ipa_modify_expr (t, true, adjustments);
4514 break;
4515
4516 case GIMPLE_ASSIGN:
4517 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4518 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4519 break;
4520
4521 case GIMPLE_CALL:
4522 /* Operands must be processed before the lhs. */
4523 for (i = 0; i < gimple_call_num_args (stmt); i++)
4524 {
4525 t = gimple_call_arg_ptr (stmt, i);
4526 modified |= sra_ipa_modify_expr (t, true, adjustments);
4527 }
4528
4529 if (gimple_call_lhs (stmt))
4530 {
4531 t = gimple_call_lhs_ptr (stmt);
4532 modified |= sra_ipa_modify_expr (t, false, adjustments);
4533 modified |= replace_removed_params_ssa_names (stmt,
4534 adjustments);
4535 }
4536 break;
4537
4538 case GIMPLE_ASM:
4539 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4540 {
4541 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4542 modified |= sra_ipa_modify_expr (t, true, adjustments);
4543 }
4544 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4545 {
4546 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4547 modified |= sra_ipa_modify_expr (t, false, adjustments);
4548 }
4549 break;
4550
4551 default:
4552 break;
4553 }
4554
4555 if (modified)
4556 {
4557 update_stmt (stmt);
4558 if (maybe_clean_eh_stmt (stmt)
4559 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4560 cfg_changed = true;
4561 }
4562 gsi_next (&gsi);
4563 }
4564 }
4565
4566 return cfg_changed;
4567 }
4568
4569 /* Call gimple_debug_bind_reset_value on all debug statements describing
4570 gimple register parameters that are being removed or replaced. */
4571
4572 static void
4573 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4574 {
4575 int i, len;
4576 gimple_stmt_iterator *gsip = NULL, gsi;
4577
4578 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4579 {
4580 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4581 gsip = &gsi;
4582 }
4583 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4584 for (i = 0; i < len; i++)
4585 {
4586 struct ipa_parm_adjustment *adj;
4587 imm_use_iterator ui;
4588 gimple stmt, def_temp;
4589 tree name, vexpr, copy = NULL_TREE;
4590 use_operand_p use_p;
4591
4592 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4593 if (adj->copy_param || !is_gimple_reg (adj->base))
4594 continue;
4595 name = gimple_default_def (cfun, adj->base);
4596 vexpr = NULL;
4597 if (name)
4598 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4599 {
4600 /* All other users must have been removed by
4601 ipa_sra_modify_function_body. */
4602 gcc_assert (is_gimple_debug (stmt));
4603 if (vexpr == NULL && gsip != NULL)
4604 {
4605 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4606 vexpr = make_node (DEBUG_EXPR_DECL);
4607 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4608 NULL);
4609 DECL_ARTIFICIAL (vexpr) = 1;
4610 TREE_TYPE (vexpr) = TREE_TYPE (name);
4611 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4612 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4613 }
4614 if (vexpr)
4615 {
4616 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4617 SET_USE (use_p, vexpr);
4618 }
4619 else
4620 gimple_debug_bind_reset_value (stmt);
4621 update_stmt (stmt);
4622 }
4623 /* Create a VAR_DECL for debug info purposes. */
4624 if (!DECL_IGNORED_P (adj->base))
4625 {
4626 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4627 VAR_DECL, DECL_NAME (adj->base),
4628 TREE_TYPE (adj->base));
4629 if (DECL_PT_UID_SET_P (adj->base))
4630 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4631 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4632 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4633 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4634 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4635 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4636 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4637 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4638 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4639 SET_DECL_RTL (copy, 0);
4640 TREE_USED (copy) = 1;
4641 DECL_CONTEXT (copy) = current_function_decl;
4642 add_referenced_var (copy);
4643 add_local_decl (cfun, copy);
4644 DECL_CHAIN (copy) =
4645 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4646 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4647 }
4648 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4649 {
4650 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4651 if (vexpr)
4652 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4653 else
4654 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4655 NULL);
4656 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4657 }
4658 }
4659 }
4660
4661 /* Return false iff all callers have at least as many actual arguments as there
4662 are formal parameters in the current function. */
4663
4664 static bool
4665 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4666 void *data ATTRIBUTE_UNUSED)
4667 {
4668 struct cgraph_edge *cs;
4669 for (cs = node->callers; cs; cs = cs->next_caller)
4670 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4671 return true;
4672
4673 return false;
4674 }
4675
4676 /* Convert all callers of NODE. */
4677
4678 static bool
4679 convert_callers_for_node (struct cgraph_node *node,
4680 void *data)
4681 {
4682 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4683 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4684 struct cgraph_edge *cs;
4685
4686 for (cs = node->callers; cs; cs = cs->next_caller)
4687 {
4688 current_function_decl = cs->caller->decl;
4689 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4690
4691 if (dump_file)
4692 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4693 cs->caller->uid, cs->callee->uid,
4694 cgraph_node_name (cs->caller),
4695 cgraph_node_name (cs->callee));
4696
4697 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4698
4699 pop_cfun ();
4700 }
4701
4702 for (cs = node->callers; cs; cs = cs->next_caller)
4703 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4704 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4705 compute_inline_parameters (cs->caller, true);
4706 BITMAP_FREE (recomputed_callers);
4707
4708 return true;
4709 }
4710
4711 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4712
4713 static void
4714 convert_callers (struct cgraph_node *node, tree old_decl,
4715 ipa_parm_adjustment_vec adjustments)
4716 {
4717 tree old_cur_fndecl = current_function_decl;
4718 basic_block this_block;
4719
4720 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4721 adjustments, false);
4722
4723 current_function_decl = old_cur_fndecl;
4724
4725 if (!encountered_recursive_call)
4726 return;
4727
4728 FOR_EACH_BB (this_block)
4729 {
4730 gimple_stmt_iterator gsi;
4731
4732 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4733 {
4734 gimple stmt = gsi_stmt (gsi);
4735 tree call_fndecl;
4736 if (gimple_code (stmt) != GIMPLE_CALL)
4737 continue;
4738 call_fndecl = gimple_call_fndecl (stmt);
4739 if (call_fndecl == old_decl)
4740 {
4741 if (dump_file)
4742 fprintf (dump_file, "Adjusting recursive call");
4743 gimple_call_set_fndecl (stmt, node->decl);
4744 ipa_modify_call_arguments (NULL, stmt, adjustments);
4745 }
4746 }
4747 }
4748
4749 return;
4750 }
4751
4752 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4753 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4754
4755 static bool
4756 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4757 {
4758 struct cgraph_node *new_node;
4759 bool cfg_changed;
4760 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4761
4762 rebuild_cgraph_edges ();
4763 free_dominance_info (CDI_DOMINATORS);
4764 pop_cfun ();
4765 current_function_decl = NULL_TREE;
4766
4767 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4768 false, NULL, NULL, "isra");
4769 current_function_decl = new_node->decl;
4770 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4771
4772 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4773 cfg_changed = ipa_sra_modify_function_body (adjustments);
4774 sra_ipa_reset_debug_stmts (adjustments);
4775 convert_callers (new_node, node->decl, adjustments);
4776 cgraph_make_node_local (new_node);
4777 return cfg_changed;
4778 }
4779
4780 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4781 attributes, return true otherwise. NODE is the cgraph node of the current
4782 function. */
4783
4784 static bool
4785 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4786 {
4787 if (!cgraph_node_can_be_local_p (node))
4788 {
4789 if (dump_file)
4790 fprintf (dump_file, "Function not local to this compilation unit.\n");
4791 return false;
4792 }
4793
4794 if (!node->local.can_change_signature)
4795 {
4796 if (dump_file)
4797 fprintf (dump_file, "Function can not change signature.\n");
4798 return false;
4799 }
4800
4801 if (!tree_versionable_function_p (node->decl))
4802 {
4803 if (dump_file)
4804 fprintf (dump_file, "Function is not versionable.\n");
4805 return false;
4806 }
4807
4808 if (DECL_VIRTUAL_P (current_function_decl))
4809 {
4810 if (dump_file)
4811 fprintf (dump_file, "Function is a virtual method.\n");
4812 return false;
4813 }
4814
4815 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4816 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4817 {
4818 if (dump_file)
4819 fprintf (dump_file, "Function too big to be made truly local.\n");
4820 return false;
4821 }
4822
4823 if (!node->callers)
4824 {
4825 if (dump_file)
4826 fprintf (dump_file,
4827 "Function has no callers in this compilation unit.\n");
4828 return false;
4829 }
4830
4831 if (cfun->stdarg)
4832 {
4833 if (dump_file)
4834 fprintf (dump_file, "Function uses stdarg. \n");
4835 return false;
4836 }
4837
4838 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4839 return false;
4840
4841 return true;
4842 }
4843
4844 /* Perform early interprocedural SRA. */
4845
4846 static unsigned int
4847 ipa_early_sra (void)
4848 {
4849 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4850 ipa_parm_adjustment_vec adjustments;
4851 int ret = 0;
4852
4853 if (!ipa_sra_preliminary_function_checks (node))
4854 return 0;
4855
4856 sra_initialize ();
4857 sra_mode = SRA_MODE_EARLY_IPA;
4858
4859 if (!find_param_candidates ())
4860 {
4861 if (dump_file)
4862 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4863 goto simple_out;
4864 }
4865
4866 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4867 NULL, true))
4868 {
4869 if (dump_file)
4870 fprintf (dump_file, "There are callers with insufficient number of "
4871 "arguments.\n");
4872 goto simple_out;
4873 }
4874
4875 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4876 func_param_count
4877 * last_basic_block_for_function (cfun));
4878 final_bbs = BITMAP_ALLOC (NULL);
4879
4880 scan_function ();
4881 if (encountered_apply_args)
4882 {
4883 if (dump_file)
4884 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4885 goto out;
4886 }
4887
4888 if (encountered_unchangable_recursive_call)
4889 {
4890 if (dump_file)
4891 fprintf (dump_file, "Function calls itself with insufficient "
4892 "number of arguments.\n");
4893 goto out;
4894 }
4895
4896 adjustments = analyze_all_param_acesses ();
4897 if (!adjustments)
4898 goto out;
4899 if (dump_file)
4900 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4901
4902 if (modify_function (node, adjustments))
4903 ret = TODO_update_ssa | TODO_cleanup_cfg;
4904 else
4905 ret = TODO_update_ssa;
4906 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4907
4908 statistics_counter_event (cfun, "Unused parameters deleted",
4909 sra_stats.deleted_unused_parameters);
4910 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4911 sra_stats.scalar_by_ref_to_by_val);
4912 statistics_counter_event (cfun, "Aggregate parameters broken up",
4913 sra_stats.aggregate_params_reduced);
4914 statistics_counter_event (cfun, "Aggregate parameter components created",
4915 sra_stats.param_reductions_created);
4916
4917 out:
4918 BITMAP_FREE (final_bbs);
4919 free (bb_dereferences);
4920 simple_out:
4921 sra_deinitialize ();
4922 return ret;
4923 }
4924
4925 /* Return if early ipa sra shall be performed. */
4926 static bool
4927 ipa_early_sra_gate (void)
4928 {
4929 return flag_ipa_sra && dbg_cnt (eipa_sra);
4930 }
4931
4932 struct gimple_opt_pass pass_early_ipa_sra =
4933 {
4934 {
4935 GIMPLE_PASS,
4936 "eipa_sra", /* name */
4937 ipa_early_sra_gate, /* gate */
4938 ipa_early_sra, /* execute */
4939 NULL, /* sub */
4940 NULL, /* next */
4941 0, /* static_pass_number */
4942 TV_IPA_SRA, /* tv_id */
4943 0, /* properties_required */
4944 0, /* properties_provided */
4945 0, /* properties_destroyed */
4946 0, /* todo_flags_start */
4947 TODO_dump_cgraph /* todo_flags_finish */
4948 }
4949 };